repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Spoken-tutorial/spoken-website | events/notification.py | 1 | 5034 |
from builtins import str
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from events.models import *
from django.http import HttpResponse, HttpResponseRedirect
import time
def nemail(request):
plaintext = get_template('email-template/email-template.txt')
htmly = get_template('email-template/email-template.html')
d = Context({ 'username': 'username' })
text_content = plaintext.render(d)
html_content = htmly.render(d)
organisers = Organiser.objects.filter(status=1).exclude(user_id__in=OrganiserNotification.objects.all().values_list('user_id'))
sent = 0
notsent = 0
count = 0
tot_count = organisers.count()
subject = "Spoken Tutorial Software Training Program"
message = '''Dear Organiser,
<b>Many </b>thanks to you and your Institute for being a part of the Spoken Tutorial Software training program and contributing towards making it such a mega success. This is to inform you that we have introduced a separate interface in the Training Dashboard of our website, namely the Semester Training Planner (STP), that has to be completed prior to raising the Training Request. This ensures that all the batches belonging to the different departments and the specific semesters are able to take training in maximum possible relevant FOSS.The Institute will
1. Appoint Faculty coordinator/s (FC) - so as to cover maximum departments in the Institute.
2. As a first step in the Training process, each FC will Register/ Create a login ID
3. Second step, complete the STP with details -
Dept. name/s (single/multiple), Semester number (single semester), Semester start date, FOSS Course selection method -
i) Mapped with computer lab course hours,
vii) Unmapped but during computer lab course hours,
iii) Outside lab course hours/time-table.
N.B : Many of you have completed mapping of FOSS courses in your time-tables so this part should not be difficult to do.
4. Third step, FC will upload a Master Batch (all students in that Dept. and Year), .csv file of Student details -
i) Dept. name
ii) Year of joining
iii) First name, Last name, Valid e-mail ID, Gender
5. Fourth step, complete the Training Request form which is to be filled within 10 weeks of Semester start date in the case of FOSS courses that come with Online Assessment Tests. This is so that students get adequate time to completely revise the entire series of tutorials of the particular FOSS course.
6. In the fourth step, the FC will select from the Master Batch to create a list with the names of students who will learn the particular FOSS/s
7. In the fifth step, the FC will need to download the specified software, for that Click below.
Link : http://process.spoken-tutorial.org/images/1/1b/Download-Tutorials.pdf
And get the lab and systems ready for the training Click below
Link : http://process.spoken-tutorial.org/images/5/58/Machine-Readiness.pdf
IMPORTANT - Learner's Certificates will no longer be provided for FOSS courses that come with Online assessment Tests. For these courses, only Completion Certificate will be given on successfully completing and passing the test.
As before, the students must go through the instruction sheet and see the tutorials as directed in the instructions mentioned in it and also practice the commands and instruction as shown in the tutorial following the Side by Side method during the Training. Side by Side means that on the screen, we keep the terminal/console window open on the right hand side for the practice and the tutorial window open on the left hand side for the learning.
Here's wishing you the best and guaranteeing our continued support for offering the Spoken Tutorial Software training to all.
Regards,
Spoken Tutorial Team,
IIT Bombay.'''
for organiser in organisers:
to = [organiser.user.email]
#to = ['[email protected]', '[email protected]']
email = EmailMultiAlternatives(
subject, text_content, '[email protected]',
to = to,
headers = {
"Content-type" : "text/html"
}
)
email.attach_alternative(html_content, "text/html")
count = count + 1
try:
result = email.send(fail_silently=False)
sent += 1
OrganiserNotification.objects.create(user=organiser.user)
if sent%10 == 0:
time.sleep(5)
print((to," => sent (", str(count),"/",str(tot_count),")"))
except Exception as e:
print(e)
print((to," => not sent (",count,"/",tot_count,")"))
#break
print("--------------------------------")
print(("Total sent mails:", sent))
print(("Total not sent mails:", notsent))
print("--------------------------------")
return HttpResponse("Done!")
| gpl-3.0 | 6,011,057,536,180,010,000 | 54.318681 | 564 | 0.696464 | false |
sestrella/ansible | test/units/modules/network/fortios/test_fortios_firewall_profile_protocol_options.py | 21 | 9947 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_firewall_profile_protocol_options
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_profile_protocol_options.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_profile_protocol_options_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_profile_protocol_options': {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize-log': 'disable',
'replacemsg-group': 'test_value_6',
'rpc-over-http': 'enable',
'switching-protocols-log': 'disable'
}
set_method_mock.assert_called_with('firewall', 'profile-protocol-options', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_profile_protocol_options_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_profile_protocol_options': {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize-log': 'disable',
'replacemsg-group': 'test_value_6',
'rpc-over-http': 'enable',
'switching-protocols-log': 'disable'
}
set_method_mock.assert_called_with('firewall', 'profile-protocol-options', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_profile_protocol_options_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_profile_protocol_options': {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'profile-protocol-options', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_profile_protocol_options_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_profile_protocol_options': {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'profile-protocol-options', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_profile_protocol_options_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_profile_protocol_options': {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize-log': 'disable',
'replacemsg-group': 'test_value_6',
'rpc-over-http': 'enable',
'switching-protocols-log': 'disable'
}
set_method_mock.assert_called_with('firewall', 'profile-protocol-options', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_profile_protocol_options_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_profile_protocol_options': {
'random_attribute_not_valid': 'tag',
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize_log': 'disable',
'replacemsg_group': 'test_value_6',
'rpc_over_http': 'enable',
'switching_protocols_log': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_profile_protocol_options.fortios_firewall(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'name': 'default_name_4',
'oversize-log': 'disable',
'replacemsg-group': 'test_value_6',
'rpc-over-http': 'enable',
'switching-protocols-log': 'disable'
}
set_method_mock.assert_called_with('firewall', 'profile-protocol-options', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | 8,671,566,389,978,949,000 | 38.947791 | 142 | 0.645622 | false |
sqlalchemy/sqlalchemy | test/dialect/oracle/test_reflection.py | 3 | 27835 | # coding: utf-8
from sqlalchemy import exc
from sqlalchemy import FLOAT
from sqlalchemy import ForeignKey
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import func
from sqlalchemy import Identity
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import INTEGER
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Numeric
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import select
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import Unicode
from sqlalchemy import UniqueConstraint
from sqlalchemy.dialects.oracle.base import BINARY_DOUBLE
from sqlalchemy.dialects.oracle.base import BINARY_FLOAT
from sqlalchemy.dialects.oracle.base import DOUBLE_PRECISION
from sqlalchemy.dialects.oracle.base import NUMBER
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import is_true
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = "oracle"
__backend__ = True
@classmethod
def setup_test_class(cls):
# currently assuming full DBA privs for the user.
# don't really know how else to go here unless
# we connect as the other user.
with testing.db.begin() as conn:
for stmt in (
"""
create table %(test_schema)s.parent(
id integer primary key,
data varchar2(50)
);
COMMENT ON TABLE %(test_schema)s.parent IS 'my table comment';
create table %(test_schema)s.child(
id integer primary key,
data varchar2(50),
parent_id integer references %(test_schema)s.parent(id)
);
create table local_table(
id integer primary key,
data varchar2(50)
);
create synonym %(test_schema)s.ptable for %(test_schema)s.parent;
create synonym %(test_schema)s.ctable for %(test_schema)s.child;
create synonym %(test_schema)s_pt for %(test_schema)s.parent;
create synonym %(test_schema)s.local_table for local_table;
-- can't make a ref from local schema to the
-- remote schema's table without this,
-- *and* cant give yourself a grant !
-- so we give it to public. ideas welcome.
grant references on %(test_schema)s.parent to public;
grant references on %(test_schema)s.child to public;
"""
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
conn.exec_driver_sql(stmt)
@classmethod
def teardown_test_class(cls):
with testing.db.begin() as conn:
for stmt in (
"""
drop table %(test_schema)s.child;
drop table %(test_schema)s.parent;
drop table local_table;
drop synonym %(test_schema)s.ctable;
drop synonym %(test_schema)s.ptable;
drop synonym %(test_schema)s_pt;
drop synonym %(test_schema)s.local_table;
"""
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
conn.exec_driver_sql(stmt)
def test_create_same_names_explicit_schema(self, metadata, connection):
schema = testing.db.dialect.default_schema_name
meta = metadata
parent = Table(
"parent",
meta,
Column("pid", Integer, primary_key=True),
schema=schema,
)
child = Table(
"child",
meta,
Column("cid", Integer, primary_key=True),
Column("pid", Integer, ForeignKey("%s.parent.pid" % schema)),
schema=schema,
)
meta.create_all(connection)
connection.execute(parent.insert(), {"pid": 1})
connection.execute(child.insert(), {"cid": 1, "pid": 1})
eq_(connection.execute(child.select()).fetchall(), [(1, 1)])
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData()
parent = Table(
"%s_pt" % testing.config.test_schema,
meta,
autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
self.assert_compile(
parent.select(),
"SELECT %(test_schema)s_pt.id, "
"%(test_schema)s_pt.data FROM %(test_schema)s_pt"
% {"test_schema": testing.config.test_schema},
)
def test_reflect_alt_synonym_owner_local_table(self):
meta = MetaData()
parent = Table(
"local_table",
meta,
autoload_with=testing.db,
oracle_resolve_synonyms=True,
schema=testing.config.test_schema,
)
self.assert_compile(
parent.select(),
"SELECT %(test_schema)s.local_table.id, "
"%(test_schema)s.local_table.data "
"FROM %(test_schema)s.local_table"
% {"test_schema": testing.config.test_schema},
)
def test_create_same_names_implicit_schema(self, metadata, connection):
meta = metadata
parent = Table(
"parent", meta, Column("pid", Integer, primary_key=True)
)
child = Table(
"child",
meta,
Column("cid", Integer, primary_key=True),
Column("pid", Integer, ForeignKey("parent.pid")),
)
meta.create_all(connection)
connection.execute(parent.insert(), {"pid": 1})
connection.execute(child.insert(), {"cid": 1, "pid": 1})
eq_(connection.execute(child.select()).fetchall(), [(1, 1)])
def test_reflect_alt_owner_explicit(self):
meta = MetaData()
parent = Table(
"parent",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
)
child = Table(
"child",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.parent JOIN %(test_schema)s.child ON "
"%(test_schema)s.parent.id = %(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
with testing.db.connect() as conn:
conn.execute(
select(parent, child).select_from(parent.join(child))
).fetchall()
# check table comment (#5146)
eq_(parent.comment, "my table comment")
def test_reflect_table_comment(self, metadata, connection):
local_parent = Table(
"parent",
metadata,
Column("q", Integer),
comment="my local comment",
)
local_parent.create(connection)
insp = inspect(connection)
eq_(
insp.get_table_comment(
"parent", schema=testing.config.test_schema
),
{"text": "my table comment"},
)
eq_(
insp.get_table_comment(
"parent",
),
{"text": "my local comment"},
)
eq_(
insp.get_table_comment(
"parent", schema=connection.dialect.default_schema_name
),
{"text": "my local comment"},
)
def test_reflect_local_to_remote(self, connection):
connection.exec_driver_sql(
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
"%(test_schema)s.parent(id))"
% {"test_schema": testing.config.test_schema},
)
try:
meta = MetaData()
lcl = Table("localtable", meta, autoload_with=testing.db)
parent = meta.tables["%s.parent" % testing.config.test_schema]
self.assert_compile(
parent.join(lcl),
"%(test_schema)s.parent JOIN localtable ON "
"%(test_schema)s.parent.id = "
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
finally:
connection.exec_driver_sql("DROP TABLE localtable")
def test_reflect_alt_owner_implicit(self):
meta = MetaData()
parent = Table(
"parent",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
)
child = Table(
"child",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.parent JOIN %(test_schema)s.child "
"ON %(test_schema)s.parent.id = "
"%(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
with testing.db.connect() as conn:
conn.execute(
select(parent, child).select_from(parent.join(child))
).fetchall()
def test_reflect_alt_owner_synonyms(self, connection):
connection.exec_driver_sql(
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
"%s.ptable(id))" % testing.config.test_schema,
)
try:
meta = MetaData()
lcl = Table(
"localtable",
meta,
autoload_with=connection,
oracle_resolve_synonyms=True,
)
parent = meta.tables["%s.ptable" % testing.config.test_schema]
self.assert_compile(
parent.join(lcl),
"%(test_schema)s.ptable JOIN localtable ON "
"%(test_schema)s.ptable.id = "
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
connection.execute(
select(parent, lcl).select_from(parent.join(lcl))
).fetchall()
finally:
connection.exec_driver_sql("DROP TABLE localtable")
def test_reflect_remote_synonyms(self):
meta = MetaData()
parent = Table(
"ptable",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
child = Table(
"ctable",
meta,
autoload_with=testing.db,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.ptable JOIN "
"%(test_schema)s.ctable "
"ON %(test_schema)s.ptable.id = "
"%(test_schema)s.ctable.parent_id"
% {"test_schema": testing.config.test_schema},
)
class ConstraintTest(fixtures.TablesTest):
__only_on__ = "oracle"
__backend__ = True
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata, Column("id", Integer, primary_key=True))
def test_oracle_has_no_on_update_cascade(self, connection):
bar = Table(
"bar",
self.tables_test_metadata,
Column("id", Integer, primary_key=True),
Column(
"foo_id", Integer, ForeignKey("foo.id", onupdate="CASCADE")
),
)
assert_raises(exc.SAWarning, bar.create, connection)
bat = Table(
"bat",
self.tables_test_metadata,
Column("id", Integer, primary_key=True),
Column("foo_id", Integer),
ForeignKeyConstraint(["foo_id"], ["foo.id"], onupdate="CASCADE"),
)
assert_raises(exc.SAWarning, bat.create, connection)
def test_reflect_check_include_all(self, connection):
insp = inspect(connection)
eq_(insp.get_check_constraints("foo"), [])
eq_(
[
rec["sqltext"]
for rec in insp.get_check_constraints("foo", include_all=True)
],
['"ID" IS NOT NULL'],
)
class SystemTableTablenamesTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
def setup_test(self):
with testing.db.begin() as conn:
conn.exec_driver_sql("create table my_table (id integer)")
conn.exec_driver_sql(
"create global temporary table my_temp_table (id integer)",
)
conn.exec_driver_sql(
"create table foo_table (id integer) tablespace SYSTEM"
)
def teardown_test(self):
with testing.db.begin() as conn:
conn.exec_driver_sql("drop table my_temp_table")
conn.exec_driver_sql("drop table my_table")
conn.exec_driver_sql("drop table foo_table")
def test_table_names_no_system(self):
insp = inspect(testing.db)
eq_(insp.get_table_names(), ["my_table"])
def test_temp_table_names_no_system(self):
insp = inspect(testing.db)
eq_(insp.get_temp_table_names(), ["my_temp_table"])
def test_table_names_w_system(self):
engine = testing_engine(options={"exclude_tablespaces": ["FOO"]})
insp = inspect(engine)
eq_(
set(insp.get_table_names()).intersection(
["my_table", "foo_table"]
),
set(["my_table", "foo_table"]),
)
class DontReflectIOTTest(fixtures.TestBase):
"""test that index overflow tables aren't included in
table_names."""
__only_on__ = "oracle"
__backend__ = True
def setup_test(self):
with testing.db.begin() as conn:
conn.exec_driver_sql(
"""
CREATE TABLE admin_docindex(
token char(20),
doc_id NUMBER,
token_frequency NUMBER,
token_offsets VARCHAR2(2000),
CONSTRAINT pk_admin_docindex PRIMARY KEY (token, doc_id))
ORGANIZATION INDEX
TABLESPACE users
PCTTHRESHOLD 20
OVERFLOW TABLESPACE users
""",
)
def teardown_test(self):
with testing.db.begin() as conn:
conn.exec_driver_sql("drop table admin_docindex")
def test_reflect_all(self, connection):
m = MetaData()
m.reflect(connection)
eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
def all_tables_compression_missing():
with testing.db.connect() as conn:
if (
"Enterprise Edition"
not in conn.exec_driver_sql("select * from v$version").scalar()
# this works in Oracle Database 18c Express Edition Release
) and testing.db.dialect.server_version_info < (18,):
return True
return False
def all_tables_compress_for_missing():
with testing.db.connect() as conn:
if (
"Enterprise Edition"
not in conn.exec_driver_sql("select * from v$version").scalar()
):
return True
return False
class TableReflectionTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.fails_if(all_tables_compression_missing)
def test_reflect_basic_compression(self, metadata, connection):
tbl = Table(
"test_compress",
metadata,
Column("data", Integer, primary_key=True),
oracle_compress=True,
)
metadata.create_all(connection)
m2 = MetaData()
tbl = Table("test_compress", m2, autoload_with=connection)
# Don't hardcode the exact value, but it must be non-empty
assert tbl.dialect_options["oracle"]["compress"]
@testing.fails_if(all_tables_compress_for_missing)
def test_reflect_oltp_compression(self, metadata, connection):
tbl = Table(
"test_compress",
metadata,
Column("data", Integer, primary_key=True),
oracle_compress="OLTP",
)
metadata.create_all(connection)
m2 = MetaData()
tbl = Table("test_compress", m2, autoload_with=connection)
assert tbl.dialect_options["oracle"]["compress"] == "OLTP"
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
def test_no_pk(self, metadata, connection):
Table(
"sometable",
metadata,
Column("id_a", Unicode(255)),
Column("id_b", Unicode(255)),
Index("pk_idx_1", "id_a", "id_b", unique=True),
Index("pk_idx_2", "id_b", "id_a", unique=True),
)
metadata.create_all(connection)
insp = inspect(connection)
eq_(
insp.get_indexes("sometable"),
[
{
"name": "pk_idx_1",
"column_names": ["id_a", "id_b"],
"dialect_options": {},
"unique": True,
},
{
"name": "pk_idx_2",
"column_names": ["id_b", "id_a"],
"dialect_options": {},
"unique": True,
},
],
)
@testing.combinations((True,), (False,), argnames="explicit_pk")
def test_include_indexes_resembling_pk(
self, metadata, connection, explicit_pk
):
t = Table(
"sometable",
metadata,
Column("id_a", Unicode(255), primary_key=True),
Column("id_b", Unicode(255), primary_key=True),
Column("group", Unicode(255), primary_key=True),
Column("col", Unicode(255)),
# Oracle won't let you do this unless the indexes have
# the columns in different order
Index("pk_idx_1", "id_b", "id_a", "group", unique=True),
Index("pk_idx_2", "id_b", "group", "id_a", unique=True),
)
if explicit_pk:
t.append_constraint(
PrimaryKeyConstraint(
"id_a", "id_b", "group", name="some_primary_key"
)
)
metadata.create_all(connection)
insp = inspect(connection)
eq_(
insp.get_indexes("sometable"),
[
{
"name": "pk_idx_1",
"column_names": ["id_b", "id_a", "group"],
"dialect_options": {},
"unique": True,
},
{
"name": "pk_idx_2",
"column_names": ["id_b", "group", "id_a"],
"dialect_options": {},
"unique": True,
},
],
)
def test_reflect_fn_index(self, metadata, connection):
"""test reflection of a functional index.
it appears this emitted a warning at some point but does not right now.
the returned data is not exactly correct, but this is what it's
likely been doing for many years.
"""
s_table = Table(
"sometable",
metadata,
Column("group", Unicode(255), primary_key=True),
Column("col", Unicode(255)),
)
Index("data_idx", func.upper(s_table.c.col))
metadata.create_all(connection)
eq_(
inspect(connection).get_indexes("sometable"),
[
{
"column_names": [],
"dialect_options": {},
"name": "data_idx",
"unique": False,
}
],
)
def test_basic(self, metadata, connection):
s_table = Table(
"sometable",
metadata,
Column("id_a", Unicode(255), primary_key=True),
Column("id_b", Unicode(255), primary_key=True, unique=True),
Column("group", Unicode(255), primary_key=True),
Column("col", Unicode(255)),
UniqueConstraint("col", "group"),
)
# "group" is a keyword, so lower case
normalind = Index("tableind", s_table.c.id_b, s_table.c.group)
Index(
"compress1", s_table.c.id_a, s_table.c.id_b, oracle_compress=True
)
Index(
"compress2",
s_table.c.id_a,
s_table.c.id_b,
s_table.c.col,
oracle_compress=1,
)
metadata.create_all(connection)
mirror = MetaData()
mirror.reflect(connection)
metadata.drop_all(connection)
mirror.create_all(connection)
inspect = MetaData()
inspect.reflect(connection)
def obj_definition(obj):
return (
obj.__class__,
tuple([c.name for c in obj.columns]),
getattr(obj, "unique", None),
)
# find what the primary k constraint name should be
primaryconsname = connection.scalar(
text(
"""SELECT constraint_name
FROM all_constraints
WHERE table_name = :table_name
AND owner = :owner
AND constraint_type = 'P' """
),
dict(
table_name=s_table.name.upper(),
owner=testing.db.dialect.default_schema_name.upper(),
),
)
reflectedtable = inspect.tables[s_table.name]
# make a dictionary of the reflected objects:
reflected = dict(
[
(obj_definition(i), i)
for i in reflectedtable.indexes | reflectedtable.constraints
]
)
# assert we got primary key constraint and its name, Error
# if not in dict
assert (
reflected[
(PrimaryKeyConstraint, ("id_a", "id_b", "group"), None)
].name.upper()
== primaryconsname.upper()
)
# Error if not in dict
eq_(reflected[(Index, ("id_b", "group"), False)].name, normalind.name)
assert (Index, ("id_b",), True) in reflected
assert (Index, ("col", "group"), True) in reflected
idx = reflected[(Index, ("id_a", "id_b"), False)]
assert idx.dialect_options["oracle"]["compress"] == 2
idx = reflected[(Index, ("id_a", "id_b", "col"), False)]
assert idx.dialect_options["oracle"]["compress"] == 1
eq_(len(reflectedtable.constraints), 1)
eq_(len(reflectedtable.indexes), 5)
class DBLinkReflectionTest(fixtures.TestBase):
__requires__ = ("oracle_test_dblink",)
__only_on__ = "oracle"
__backend__ = True
@classmethod
def setup_test_class(cls):
from sqlalchemy.testing import config
cls.dblink = config.file_config.get("sqla_testing", "oracle_db_link")
# note that the synonym here is still not totally functional
# when accessing via a different username as we do with the
# multiprocess test suite, so testing here is minimal
with testing.db.begin() as conn:
conn.exec_driver_sql(
"create table test_table "
"(id integer primary key, data varchar2(50))"
)
conn.exec_driver_sql(
"create synonym test_table_syn "
"for test_table@%s" % cls.dblink
)
@classmethod
def teardown_test_class(cls):
with testing.db.begin() as conn:
conn.exec_driver_sql("drop synonym test_table_syn")
conn.exec_driver_sql("drop table test_table")
def test_reflection(self):
"""test the resolution of the synonym/dblink."""
m = MetaData()
t = Table(
"test_table_syn",
m,
autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
eq_(list(t.c.keys()), ["id", "data"])
eq_(list(t.primary_key), [t.c.id])
class TypeReflectionTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
def _run_test(self, metadata, connection, specs, attributes):
columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
m = metadata
Table("oracle_types", m, *columns)
m.create_all(connection)
m2 = MetaData()
table = Table("oracle_types", m2, autoload_with=connection)
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
expected_spec = spec[1]
reflected_type = reflected_col.type
is_(type(reflected_type), type(expected_spec))
for attr in attributes:
eq_(
getattr(reflected_type, attr),
getattr(expected_spec, attr),
"Column %s: Attribute %s value of %s does not "
"match %s for type %s"
% (
"c%i" % (i + 1),
attr,
getattr(reflected_type, attr),
getattr(expected_spec, attr),
spec[0],
),
)
def test_integer_types(self, metadata, connection):
specs = [(Integer, INTEGER()), (Numeric, INTEGER())]
self._run_test(metadata, connection, specs, [])
def test_number_types(
self,
metadata,
connection,
):
specs = [(Numeric(5, 2), NUMBER(5, 2)), (NUMBER, NUMBER())]
self._run_test(metadata, connection, specs, ["precision", "scale"])
def test_float_types(
self,
metadata,
connection,
):
specs = [
(DOUBLE_PRECISION(), FLOAT()),
# when binary_precision is supported
# (DOUBLE_PRECISION(), oracle.FLOAT(binary_precision=126)),
(BINARY_DOUBLE(), BINARY_DOUBLE()),
(BINARY_FLOAT(), BINARY_FLOAT()),
(FLOAT(5), FLOAT()),
# when binary_precision is supported
# (FLOAT(5), oracle.FLOAT(binary_precision=5),),
(FLOAT(), FLOAT()),
# when binary_precision is supported
# (FLOAT(5), oracle.FLOAT(binary_precision=126),),
]
self._run_test(metadata, connection, specs, ["precision"])
class IdentityReflectionTest(fixtures.TablesTest):
__only_on__ = "oracle"
__backend__ = True
__requires__ = ("identity_columns",)
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata, Column("id1", Integer, Identity(on_null=True)))
Table("t2", metadata, Column("id2", Integer, Identity(order=True)))
def test_reflect_identity(self):
insp = inspect(testing.db)
common = {
"always": False,
"start": 1,
"increment": 1,
"on_null": False,
"maxvalue": 10 ** 28 - 1,
"minvalue": 1,
"cycle": False,
"cache": 20,
"order": False,
}
for col in insp.get_columns("t1") + insp.get_columns("t2"):
if col["name"] == "id1":
is_true("identity" in col)
exp = common.copy()
exp["on_null"] = True
eq_(col["identity"], exp)
if col["name"] == "id2":
is_true("identity" in col)
exp = common.copy()
exp["order"] = True
eq_(col["identity"], exp)
| mit | 3,258,350,939,267,259,000 | 31.593677 | 79 | 0.53113 | false |
vitmod/enigma2-test | PowerTimer.py | 1 | 37250 | import os
from boxbranding import getMachineBrand, getMachineName
import xml.etree.cElementTree
from datetime import datetime
from time import ctime, time, strftime, localtime, mktime
from bisect import insort
from enigma import eActionMap, quitMainloop
from Components.config import config
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
import Screens.Standby
from Tools import Directories, Notifications
from Tools.XMLTools import stringToXML
import timer
import NavigationInstance
#global variables begin
DSsave = False
RSsave = False
RBsave = False
aeDSsave = False
wasTimerWakeup = False
netbytes = 0
#+++
debug = False
#+++
#global variables end
#----------------------------------------------------------------------------------------------------
#Timer shutdown, reboot and restart priority
#1. wakeup
#2. wakeuptostandby -> (same as 1.)
#3. deepstandby -> DSsave
#4. deppstandby after event -> aeDSsave
#5. reboot system -> RBsave
#6. restart gui -> RSsave
#7. standby
#8. autostandby
#9. nothing (no function, only for suppress autodeepstandby timer)
#10. autodeepstandby
#-overlapping timers or next timer start is within 15 minutes, will only the high-order timer executed (at same types will executed the next timer)
#-autodeepstandby timer is only effective if no other timer is active or current time is in the time window
#-priority for repeated timer: shift from begin and end time only temporary, end-action priority is higher as the begin-action
#----------------------------------------------------------------------------------------------------
#reset wakeupstatus
def resetTimerWakeup():
global wasTimerWakeup
if os.path.exists("/tmp/was_powertimer_wakeup"):
os.remove("/tmp/was_powertimer_wakeup")
wasTimerWakeup = False
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev):
begin = ev.getBeginTime()
end = begin + ev.getDuration()
return begin, end
class AFTEREVENT:
def __init__(self):
pass
NONE = 0
WAKEUP = 1
WAKEUPTOSTANDBY = 2
STANDBY = 3
DEEPSTANDBY = 4
class TIMERTYPE:
def __init__(self):
pass
NONE = 0
WAKEUP = 1
WAKEUPTOSTANDBY = 2
AUTOSTANDBY = 3
AUTODEEPSTANDBY = 4
STANDBY = 5
DEEPSTANDBY = 6
REBOOT = 7
RESTART = 8
# please do not translate log messages
class PowerTimerEntry(timer.TimerEntry, object):
def __init__(self, begin, end, disabled = False, afterEvent = AFTEREVENT.NONE, timerType = TIMERTYPE.WAKEUP, checkOldTimers = False, autosleepdelay = 60):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
self.dontSave = False
self.disabled = disabled
self.timer = None
self.__record_service = None
self.start_prepare = 0
self.timerType = timerType
self.afterEvent = afterEvent
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.autosleepinstandbyonly = 'no'
self.autosleepdelay = autosleepdelay
self.autosleeprepeat = 'once'
self.autosleepwindow = 'no'
self.autosleepbegin = self.begin
self.autosleepend = self.end
self.log_entries = []
self.resetState()
#check autopowertimer
if (self.timerType == TIMERTYPE.AUTOSTANDBY or self.timerType == TIMERTYPE.AUTODEEPSTANDBY) and not self.disabled and time() > 3600 and self.begin > time():
self.begin = int(time()) #the begin is in the future -> set to current time = no start delay of this timer
def __repr__(self):
timertype = {
TIMERTYPE.NONE: "nothing",
TIMERTYPE.WAKEUP: "wakeup",
TIMERTYPE.WAKEUPTOSTANDBY: "wakeuptostandby",
TIMERTYPE.AUTOSTANDBY: "autostandby",
TIMERTYPE.AUTODEEPSTANDBY: "autodeepstandby",
TIMERTYPE.STANDBY: "standby",
TIMERTYPE.DEEPSTANDBY: "deepstandby",
TIMERTYPE.REBOOT: "reboot",
TIMERTYPE.RESTART: "restart"
}[self.timerType]
if not self.disabled:
return "PowerTimerEntry(type=%s, begin=%s)" % (timertype, ctime(self.begin))
else:
return "PowerTimerEntry(type=%s, begin=%s Disabled)" % (timertype, ctime(self.begin))
def log(self, code, msg):
self.log_entries.append((int(time()), code, msg))
def do_backoff(self):
if Screens.Standby.inStandby and not wasTimerWakeup or RSsave or RBsave or aeDSsave or DSsave:
self.backoff = 300
else:
if self.backoff == 0:
self.backoff = 300
else:
self.backoff += 300
if self.backoff > 900:
self.backoff = 900
self.log(10, "backoff: retry in %d minutes" % (int(self.backoff)/60))
def activate(self):
global RSsave, RBsave, DSsave, aeDSsave, wasTimerWakeup
breakPT = shiftPT = False
now = time()
next_state = self.state + 1
self.log(5, "activating state %d" % next_state)
if next_state == self.StatePrepared and (self.timerType == TIMERTYPE.AUTOSTANDBY or self.timerType == TIMERTYPE.AUTODEEPSTANDBY):
if self.autosleepwindow == 'yes':
ltm = localtime(now)
asb = strftime("%H:%M", localtime(self.autosleepbegin)).split(':')
ase = strftime("%H:%M", localtime(self.autosleepend)).split(':')
self.autosleepbegin = int(mktime(datetime(ltm.tm_year, ltm.tm_mon, ltm.tm_mday, int(asb[0]), int(asb[1])).timetuple()))
self.autosleepend = int(mktime(datetime(ltm.tm_year, ltm.tm_mon, ltm.tm_mday, int(ase[0]), int(ase[1])).timetuple()))
if self.autosleepend <= self.autosleepbegin:
self.autosleepbegin -= 86400
if self.getAutoSleepWindow():
self.begin = self.end = int(now) + int(self.autosleepdelay)*60
eActionMap.getInstance().bindAction('', -0x7FFFFFFF, self.keyPressed)
if (next_state == self.StateRunning or next_state == self.StateEnded) and NavigationInstance.instance.PowerTimer is None:
#TODO: running/ended timer at system start has no nav instance
#First fix: crash in getPriorityCheck (NavigationInstance.instance.PowerTimer...)
#Second fix: suppress the message (A finished powertimer wants to ...)
if debug: print "*****NavigationInstance.instance.PowerTimer is None*****", self.timerType, self.state, ctime(self.begin), ctime(self.end)
return True
elif next_state == self.StateRunning and abs(self.begin - now) >= 60: return True
elif next_state == self.StateEnded and abs(self.end - now) >= 60: return True
if next_state == self.StatePrepared:
self.log(6, "prepare ok, waiting for begin")
if self.begin <= now:
self.next_activation = int(now) + 20
else:
self.next_activation = self.begin
self.backoff = 0
return True
elif next_state == self.StateRunning:
if os.path.exists("/tmp/was_powertimer_wakeup") and not wasTimerWakeup:
wasTimerWakeup = int(open("/tmp/was_powertimer_wakeup", "r").read()) and True or False
elif os.path.exists("/tmp/was_rectimer_wakeup") and not wasTimerWakeup:
wasTimerWakeup = int(open("/tmp/was_rectimer_wakeup", "r").read()) and True or False
# if this timer has been cancelled, just go to "end" state.
if self.cancelled:
return True
if self.failed:
return True
if self.timerType == TIMERTYPE.NONE:
return True
elif self.timerType == TIMERTYPE.WAKEUP:
if Screens.Standby.inStandby:
Screens.Standby.inStandby.Power()
return True
elif self.timerType == TIMERTYPE.WAKEUPTOSTANDBY:
return True
elif self.timerType == TIMERTYPE.STANDBY:
if debug: print "self.timerType == TIMERTYPE.STANDBY:"
prioPT = [TIMERTYPE.WAKEUP,TIMERTYPE.RESTART,TIMERTYPE.REBOOT,TIMERTYPE.DEEPSTANDBY]
prioPTae = [AFTEREVENT.WAKEUP,AFTEREVENT.DEEPSTANDBY]
shiftPT,breakPT = self.getPriorityCheck(prioPT,prioPTae)
if not Screens.Standby.inStandby and not breakPT: # not already in standby
Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished powertimer wants to set your\n%s %s to standby. Do that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
return True
elif self.timerType == TIMERTYPE.AUTOSTANDBY:
if debug: print "self.timerType == TIMERTYPE.AUTOSTANDBY:"
if not self.getAutoSleepWindow():
return False
if not Screens.Standby.inStandby: # not already in standby
Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished powertimer wants to set your\n%s %s to standby. Do that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
if self.autosleeprepeat == "once":
eActionMap.getInstance().unbindAction('', self.keyPressed)
return True
else:
self.begin = self.end = int(now) + int(self.autosleepdelay)*60
else:
self.begin = self.end = int(now) + int(self.autosleepdelay)*60
elif self.timerType == TIMERTYPE.AUTODEEPSTANDBY:
if debug: print "self.timerType == TIMERTYPE.AUTODEEPSTANDBY:"
if not self.getAutoSleepWindow():
return False
if self.getNetworkTraffic() or NavigationInstance.instance.PowerTimer.isProcessing() or abs(NavigationInstance.instance.PowerTimer.getNextPowerManagerTime() - now) <= 900 \
or NavigationInstance.instance.RecordTimer.isRecording() or abs(NavigationInstance.instance.RecordTimer.getNextRecordingTime() - now) <= 900 or abs(NavigationInstance.instance.RecordTimer.getNextZapTime() - now) <= 900 \
or ((self.autosleepinstandbyonly == 'yesACnetwork' or self.autosleepinstandbyonly == 'yes') and not Screens.Standby.inStandby):
self.do_backoff()
# retry
self.begin = self.end = int(now) + self.backoff
return False
elif not Screens.Standby.inTryQuitMainloop: # not a shutdown messagebox is open
if self.autosleeprepeat == "once":
self.disabled = True
if Screens.Standby.inStandby: # in standby
print "[PowerTimer] quitMainloop #1"
quitMainloop(1)
return True
else:
Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished powertimer wants to shutdown your %s %s.\nDo that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
if self.autosleeprepeat == "once":
eActionMap.getInstance().unbindAction('', self.keyPressed)
return True
else:
self.begin = self.end = int(now) + int(self.autosleepdelay)*60
elif self.timerType == TIMERTYPE.RESTART:
if debug: print "self.timerType == TIMERTYPE.RESTART:"
#check priority
prioPT = [TIMERTYPE.RESTART,TIMERTYPE.REBOOT,TIMERTYPE.DEEPSTANDBY]
prioPTae = [AFTEREVENT.DEEPSTANDBY]
shiftPT,breakPT = self.getPriorityCheck(prioPT,prioPTae)
#a timer with higher priority was shifted - no execution of current timer
if RBsave or aeDSsave or DSsave:
if debug: print "break#1"
breakPT = True
#a timer with lower priority was shifted - shift now current timer and wait for restore the saved time values from other timer
if False:
if debug: print "shift#1"
breakPT = False
shiftPT = True
#shift or break
if shiftPT or breakPT \
or NavigationInstance.instance.RecordTimer.isRecording() or abs(NavigationInstance.instance.RecordTimer.getNextRecordingTime() - now) <= 900 or abs(NavigationInstance.instance.RecordTimer.getNextZapTime() - now) <= 900:
if self.repeated and not RSsave:
self.savebegin = self.begin
self.saveend = self.end
RSsave = True
if not breakPT:
self.do_backoff()
#check difference begin to end before shift begin time
if RSsave and self.end - self.begin > 3 and self.end - now - self.backoff <= 240: breakPT = True
#breakPT
if breakPT:
if self.repeated and RSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
RSsave = False
return True
# retry
oldbegin = self.begin
self.begin = int(now) + self.backoff
if abs(self.end - oldbegin) <= 3:
self.end = self.begin
else:
if not self.repeated and self.end < self.begin + 300:
self.end = self.begin + 300
return False
elif not Screens.Standby.inTryQuitMainloop: # not a shutdown messagebox is open
if self.repeated and RSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
if Screens.Standby.inStandby: # in standby
print "[PowerTimer] quitMainloop #4"
quitMainloop(3)
else:
Notifications.AddNotificationWithCallback(self.sendTryToRestartNotification, MessageBox, _("A finished powertimer wants to restart the user interface.\nDo that now?"), timeout = 180)
RSsave = False
return True
elif self.timerType == TIMERTYPE.REBOOT:
if debug: print "self.timerType == TIMERTYPE.REBOOT:"
#check priority
prioPT = [TIMERTYPE.REBOOT,TIMERTYPE.DEEPSTANDBY]
prioPTae = [AFTEREVENT.DEEPSTANDBY]
shiftPT,breakPT = self.getPriorityCheck(prioPT,prioPTae)
#a timer with higher priority was shifted - no execution of current timer
if aeDSsave or DSsave:
if debug: print "break#1"
breakPT = True
#a timer with lower priority was shifted - shift now current timer and wait for restore the saved time values from other timer
if RSsave:
if debug: print "shift#1"
breakPT = False
shiftPT = True
#shift or break
if shiftPT or breakPT \
or NavigationInstance.instance.RecordTimer.isRecording() or abs(NavigationInstance.instance.RecordTimer.getNextRecordingTime() - now) <= 900 or abs(NavigationInstance.instance.RecordTimer.getNextZapTime() - now) <= 900:
if self.repeated and not RBsave:
self.savebegin = self.begin
self.saveend = self.end
RBsave = True
if not breakPT:
self.do_backoff()
#check difference begin to end before shift begin time
if RBsave and self.end - self.begin > 3 and self.end - now - self.backoff <= 240: breakPT = True
#breakPT
if breakPT:
if self.repeated and RBsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
RBsave = False
return True
# retry
oldbegin = self.begin
self.begin = int(now) + self.backoff
if abs(self.end - oldbegin) <= 3:
self.end = self.begin
else:
if not self.repeated and self.end < self.begin + 300:
self.end = self.begin + 300
return False
elif not Screens.Standby.inTryQuitMainloop: # not a shutdown messagebox is open
if self.repeated and RBsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
if Screens.Standby.inStandby: # in standby
print "[PowerTimer] quitMainloop #3"
quitMainloop(2)
else:
Notifications.AddNotificationWithCallback(self.sendTryToRebootNotification, MessageBox, _("A finished powertimer wants to reboot your %s %s.\nDo that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
RBsave = False
return True
elif self.timerType == TIMERTYPE.DEEPSTANDBY:
if debug: print "self.timerType == TIMERTYPE.DEEPSTANDBY:"
#check priority
prioPT = [TIMERTYPE.WAKEUP,TIMERTYPE.WAKEUPTOSTANDBY,TIMERTYPE.DEEPSTANDBY]
prioPTae = [AFTEREVENT.WAKEUP,AFTEREVENT.WAKEUPTOSTANDBY,AFTEREVENT.DEEPSTANDBY]
shiftPT,breakPT = self.getPriorityCheck(prioPT,prioPTae)
#a timer with higher priority was shifted - no execution of current timer
if False:
if debug: print "break#1"
breakPT = True
#a timer with lower priority was shifted - shift now current timer and wait for restore the saved time values from other timer
if RSsave or RBsave or aeDSsave:
if debug: print "shift#1"
breakPT = False
shiftPT = True
#shift or break
if shiftPT or breakPT \
or NavigationInstance.instance.RecordTimer.isRecording() or abs(NavigationInstance.instance.RecordTimer.getNextRecordingTime() - now) <= 900 or abs(NavigationInstance.instance.RecordTimer.getNextZapTime() - now) <= 900:
if self.repeated and not DSsave:
self.savebegin = self.begin
self.saveend = self.end
DSsave = True
if not breakPT:
self.do_backoff()
#check difference begin to end before shift begin time
if DSsave and self.end - self.begin > 3 and self.end - now - self.backoff <= 240: breakPT = True
#breakPT
if breakPT:
if self.repeated and DSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
DSsave = False
return True
# retry
oldbegin = self.begin
self.begin = int(now) + self.backoff
if abs(self.end - oldbegin) <= 3:
self.end = self.begin
else:
if not self.repeated and self.end < self.begin + 300:
self.end = self.begin + 300
return False
elif not Screens.Standby.inTryQuitMainloop: # not a shutdown messagebox is open
if self.repeated and DSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
if Screens.Standby.inStandby: # in standby
print "[PowerTimer] quitMainloop #2"
quitMainloop(1)
else:
Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished powertimer wants to shutdown your %s %s.\nDo that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
DSsave = False
return True
elif next_state == self.StateEnded:
if self.afterEvent == AFTEREVENT.WAKEUP:
if Screens.Standby.inStandby:
Screens.Standby.inStandby.Power()
elif self.afterEvent == AFTEREVENT.STANDBY:
if not Screens.Standby.inStandby: # not already in standby
Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished powertimer wants to set your\n%s %s to standby. Do that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
elif self.afterEvent == AFTEREVENT.DEEPSTANDBY:
if debug: print "self.afterEvent == AFTEREVENT.DEEPSTANDBY:"
#check priority
prioPT = [TIMERTYPE.WAKEUP,TIMERTYPE.WAKEUPTOSTANDBY,TIMERTYPE.DEEPSTANDBY]
prioPTae = [AFTEREVENT.WAKEUP,AFTEREVENT.WAKEUPTOSTANDBY,AFTEREVENT.DEEPSTANDBY]
shiftPT,breakPT = self.getPriorityCheck(prioPT,prioPTae)
#a timer with higher priority was shifted - no execution of current timer
if DSsave:
if debug: print "break#1"
breakPT = True
#a timer with lower priority was shifted - shift now current timer and wait for restore the saved time values
if RSsave or RBsave:
if debug: print "shift#1"
breakPT = False
shiftPT = True
#shift or break
runningPT = False
#option: check other powertimer is running (current disabled)
#runningPT = NavigationInstance.instance.PowerTimer.isProcessing(exceptTimer = TIMERTYPE.NONE, endedTimer = self.timerType)
if shiftPT or breakPT or runningPT \
or NavigationInstance.instance.RecordTimer.isRecording() or abs(NavigationInstance.instance.RecordTimer.getNextRecordingTime() - now) <= 900 or abs(NavigationInstance.instance.RecordTimer.getNextZapTime() - now) <= 900:
if self.repeated and not aeDSsave:
self.savebegin = self.begin
self.saveend = self.end
aeDSsave = True
if not breakPT: self.do_backoff()
#breakPT
if breakPT:
if self.repeated and aeDSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
aeDSsave = False
return True
# retry
self.end = int(now) + self.backoff
return False
elif not Screens.Standby.inTryQuitMainloop: # not a shutdown messagebox is open
if self.repeated and aeDSsave:
try:
self.begin = self.savebegin
self.end = self.saveend
except:
pass
if Screens.Standby.inStandby: # in standby
print "[PowerTimer] quitMainloop #5"
quitMainloop(1)
else:
Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished powertimer wants to shutdown your %s %s.\nDo that now?") % (getMachineBrand(), getMachineName()), timeout = 180)
aeDSsave = False
NavigationInstance.instance.PowerTimer.saveTimer()
return True
def setAutoincreaseEnd(self, entry = None):
if not self.autoincrease:
return False
if entry is None:
new_end = int(time()) + self.autoincreasetime
else:
new_end = entry.begin - 30
dummyentry = PowerTimerEntry(self.begin, new_end, disabled=True, afterEvent = self.afterEvent, timerType = self.timerType)
dummyentry.disabled = self.disabled
timersanitycheck = TimerSanityCheck(NavigationInstance.instance.PowerManager.timer_list, dummyentry)
if not timersanitycheck.check():
simulTimerList = timersanitycheck.getSimulTimerList()
if simulTimerList is not None and len(simulTimerList) > 1:
new_end = simulTimerList[1].begin
new_end -= 30 # 30 Sekunden Prepare-Zeit lassen
if new_end <= time():
return False
self.end = new_end
return True
def sendStandbyNotification(self, answer):
if answer:
Notifications.AddNotification(Screens.Standby.Standby)
def sendTryQuitMainloopNotification(self, answer):
if answer:
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 1)
def sendTryToRebootNotification(self, answer):
if answer:
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 2)
def sendTryToRestartNotification(self, answer):
if answer:
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 3)
def keyPressed(self, key, tag):
if self.getAutoSleepWindow():
self.begin = self.end = int(time()) + int(self.autosleepdelay)*60
def getAutoSleepWindow(self):
now = time()
if self.autosleepwindow == 'yes':
if now < self.autosleepbegin and now < self.autosleepend:
self.begin = self.autosleepbegin + int(self.autosleepdelay)*60
self.end = self.autosleepend
return False
elif now > self.autosleepbegin and now > self.autosleepend:
while self.autosleepend < now:
self.autosleepend += 86400
while self.autosleepbegin + 86400 < self.autosleepend:
self.autosleepbegin += 86400
self.begin = self.autosleepbegin + int(self.autosleepdelay)*60
self.end = self.autosleepend
return False
return True
def getPriorityCheck(self,prioPT,prioPTae):
shiftPT = breakPT = False
nextPTlist = NavigationInstance.instance.PowerTimer.getNextPowerManagerTime(getNextTimerTyp = True)
for entry in nextPTlist:
#check timers within next 15 mins will started or ended
if abs(entry[0] - time()) > 900:
continue
#faketime
if entry[1] is None and entry[2] is None and entry[3] is None:
if debug: print "shift#2 - entry is faketime", ctime(entry[0]), entry
shiftPT = True
continue
#is timer in list itself?
if entry[0] == self.begin and entry[1] == self.timerType and entry[2] is None and entry[3] == self.state \
or entry[0] == self.end and entry[1] is None and entry[2] == self.afterEvent and entry[3] == self.state:
if debug: print "entry is itself", ctime(entry[0]), entry
nextPTitself = True
else:
nextPTitself = False
if (entry[1] in prioPT or entry[2] in prioPTae) and not nextPTitself:
if debug: print "break#2 <= 900", ctime(entry[0]), entry
breakPT = True
break
return shiftPT, breakPT
def getNextActivation(self):
if self.state == self.StateEnded or self.state == self.StateFailed:
return self.end
next_state = self.state + 1
return {self.StatePrepared: self.start_prepare,
self.StateRunning: self.begin,
self.StateEnded: self.end }[next_state]
def getNextWakeup(self, getNextStbPowerOn = False):
next_state = self.state + 1
if getNextStbPowerOn:
if next_state == 3 and (self.timerType == TIMERTYPE.WAKEUP or self.timerType == TIMERTYPE.WAKEUPTOSTANDBY or self.afterEvent == AFTEREVENT.WAKEUP or self.afterEvent == AFTEREVENT.WAKEUPTOSTANDBY):
if self.begin > time() and (self.timerType == TIMERTYPE.WAKEUP or self.timerType == TIMERTYPE.WAKEUPTOSTANDBY): #timer start time is later as now - begin time was changed while running timer
return self.begin
if self.afterEvent == AFTEREVENT.WAKEUP or self.afterEvent == AFTEREVENT.WAKEUPTOSTANDBY:
return self.end
next_day = 0
count_day = 0
wd_timer = datetime.fromtimestamp(self.begin).isoweekday()*-1
wd_repeated = bin(128+self.repeated)
for s in range(wd_timer-1,-8,-1):
count_day +=1
if int(wd_repeated[s]):
next_day = s
break
if next_day == 0:
for s in range(-1,wd_timer-1,-1):
count_day +=1
if int(wd_repeated[s]):
next_day = s
break
return self.begin + 86400 * count_day
elif next_state < 3 and (self.timerType == TIMERTYPE.WAKEUP or self.timerType == TIMERTYPE.WAKEUPTOSTANDBY):
return self.begin
elif next_state < 3 and (self.afterEvent == AFTEREVENT.WAKEUP or self.afterEvent == AFTEREVENT.WAKEUPTOSTANDBY):
return self.end
else:
return -1
if self.state == self.StateEnded or self.state == self.StateFailed:
return self.end
return {self.StatePrepared: self.start_prepare,
self.StateRunning: self.begin,
self.StateEnded: self.end}[next_state]
def timeChanged(self):
old_prepare = self.start_prepare
self.start_prepare = self.begin - self.prepare_time
self.backoff = 0
if int(old_prepare) > 60 and int(old_prepare) != int(self.start_prepare):
self.log(15, "time changed, start prepare is now: %s" % ctime(self.start_prepare))
def getNetworkTraffic(self):
global netbytes
oldbytes = netbytes
newbytes = 0
if Screens.Standby.inStandby and self.autosleepinstandbyonly == 'yesACnetwork':
try:
if os.path.exists('/proc/net/dev'):
f = open('/proc/net/dev', 'r')
temp = f.readlines()
f.close()
for lines in temp:
lisp = lines.split()
if lisp[0].endswith(':') and (lisp[0].startswith('eth') or lisp[0].startswith('wlan')):
newbytes += int(lisp[1]) + int(lisp[9])
netbytes = newbytes
print '[PowerTimer] Receive/Transmit Bytes : ', str(newbytes - oldbytes), '(' + str(int((newbytes - oldbytes)/1024/1024)) + ' MBytes)'
if (newbytes - oldbytes) > 1048576:
return True
except:
print '[PowerTimer] Receive/Transmit Bytes: Error reading values! Use "cat /proc/net/dev" for testing on command line.'
return False
def createTimer(xml):
timertype = str(xml.get("timertype") or "wakeup")
timertype = {
"nothing": TIMERTYPE.NONE,
"wakeup": TIMERTYPE.WAKEUP,
"wakeuptostandby": TIMERTYPE.WAKEUPTOSTANDBY,
"autostandby": TIMERTYPE.AUTOSTANDBY,
"autodeepstandby": TIMERTYPE.AUTODEEPSTANDBY,
"standby": TIMERTYPE.STANDBY,
"deepstandby": TIMERTYPE.DEEPSTANDBY,
"reboot": TIMERTYPE.REBOOT,
"restart": TIMERTYPE.RESTART
}[timertype]
begin = int(xml.get("begin"))
end = int(xml.get("end"))
repeated = xml.get("repeated").encode("utf-8")
disabled = long(xml.get("disabled") or "0")
afterevent = str(xml.get("afterevent") or "nothing")
afterevent = {
"nothing": AFTEREVENT.NONE,
"wakeup": AFTEREVENT.WAKEUP,
"wakeuptostandby": AFTEREVENT.WAKEUPTOSTANDBY,
"standby": AFTEREVENT.STANDBY,
"deepstandby": AFTEREVENT.DEEPSTANDBY
}[afterevent]
autosleepinstandbyonly = str(xml.get("autosleepinstandbyonly") or "no")
autosleepdelay = str(xml.get("autosleepdelay") or "0")
autosleeprepeat = str(xml.get("autosleeprepeat") or "once")
autosleepwindow = str(xml.get("autosleepwindow") or "no")
autosleepbegin = int(xml.get("autosleepbegin") or begin)
autosleepend = int(xml.get("autosleepend") or end)
entry = PowerTimerEntry(begin, end, disabled, afterevent, timertype)
entry.repeated = int(repeated)
entry.autosleepinstandbyonly = autosleepinstandbyonly
entry.autosleepdelay = int(autosleepdelay)
entry.autosleeprepeat = autosleeprepeat
entry.autosleepwindow = autosleepwindow
entry.autosleepbegin = autosleepbegin
entry.autosleepend = autosleepend
for l in xml.findall("log"):
ltime = int(l.get("time"))
code = int(l.get("code"))
msg = l.text.strip().encode("utf-8")
entry.log_entries.append((ltime, code, msg))
return entry
class PowerTimer(timer.Timer):
def __init__(self):
timer.Timer.__init__(self)
self.Filename = Directories.resolveFilename(Directories.SCOPE_CONFIG, "pm_timers.xml")
try:
self.loadTimer()
except IOError:
print "unable to load timers from file!"
def doActivate(self, w):
# when activating a timer which has already passed,
# simply abort the timer. don't run trough all the stages.
if w.shouldSkip():
w.state = PowerTimerEntry.StateEnded
else:
# when active returns true, this means "accepted".
# otherwise, the current state is kept.
# the timer entry itself will fix up the delay then.
if w.activate():
w.state += 1
try:
self.timer_list.remove(w)
except:
print '[PowerManager]: Remove list failed'
# did this timer reached the last state?
if w.state < PowerTimerEntry.StateEnded:
# no, sort it into active list
insort(self.timer_list, w)
else:
# yes. Process repeated, and re-add.
if w.repeated:
w.processRepeated()
w.state = PowerTimerEntry.StateWaiting
self.addTimerEntry(w)
else:
# Remove old timers as set in config
self.cleanupDaily(config.recording.keep_timers.value)
insort(self.processed_timers, w)
self.stateChanged(w)
def loadTimer(self):
# TODO: PATH!
if not Directories.fileExists(self.Filename):
return
try:
file = open(self.Filename, 'r')
doc = xml.etree.cElementTree.parse(file)
file.close()
except SyntaxError:
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("The timer file (pm_timers.xml) is corrupt and could not be loaded."), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
print "pm_timers.xml failed to load!"
try:
import os
os.rename(self.Filename, self.Filename + "_old")
except (IOError, OSError):
print "renaming broken timer failed"
return
except IOError:
print "pm_timers.xml not found!"
return
root = doc.getroot()
# put out a message when at least one timer overlaps
checkit = True
for timer in root.findall("timer"):
newTimer = createTimer(timer)
if (self.record(newTimer, True, dosave=False) is not None) and (checkit == True):
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("Timer overlap in pm_timers.xml detected!\nPlease recheck it!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
checkit = False # at moment it is enough when the message is displayed one time
def saveTimer(self):
savedays = 3600 * 24 * 7 #logs older 7 Days will not saved
list = ['<?xml version="1.0" ?>\n', '<timers>\n']
for timer in self.timer_list + self.processed_timers:
if timer.dontSave:
continue
list.append('<timer')
list.append(' timertype="' + str(stringToXML({
TIMERTYPE.NONE: "nothing",
TIMERTYPE.WAKEUP: "wakeup",
TIMERTYPE.WAKEUPTOSTANDBY: "wakeuptostandby",
TIMERTYPE.AUTOSTANDBY: "autostandby",
TIMERTYPE.AUTODEEPSTANDBY: "autodeepstandby",
TIMERTYPE.STANDBY: "standby",
TIMERTYPE.DEEPSTANDBY: "deepstandby",
TIMERTYPE.REBOOT: "reboot",
TIMERTYPE.RESTART: "restart"
}[timer.timerType])) + '"')
list.append(' begin="' + str(int(timer.begin)) + '"')
list.append(' end="' + str(int(timer.end)) + '"')
list.append(' repeated="' + str(int(timer.repeated)) + '"')
list.append(' afterevent="' + str(stringToXML({
AFTEREVENT.NONE: "nothing",
AFTEREVENT.WAKEUP: "wakeup",
AFTEREVENT.WAKEUPTOSTANDBY: "wakeuptostandby",
AFTEREVENT.STANDBY: "standby",
AFTEREVENT.DEEPSTANDBY: "deepstandby"
}[timer.afterEvent])) + '"')
list.append(' disabled="' + str(int(timer.disabled)) + '"')
list.append(' autosleepinstandbyonly="' + str(timer.autosleepinstandbyonly) + '"')
list.append(' autosleepdelay="' + str(timer.autosleepdelay) + '"')
list.append(' autosleeprepeat="' + str(timer.autosleeprepeat) + '"')
list.append(' autosleepwindow="' + str(timer.autosleepwindow) + '"')
list.append(' autosleepbegin="' + str(int(timer.autosleepbegin)) + '"')
list.append(' autosleepend="' + str(int(timer.autosleepend)) + '"')
list.append('>\n')
for ltime, code, msg in timer.log_entries:
if ltime > time() - savedays:
list.append('<log')
list.append(' code="' + str(code) + '"')
list.append(' time="' + str(ltime) + '"')
list.append('>')
list.append(str(stringToXML(msg)))
list.append('</log>\n')
list.append('</timer>\n')
list.append('</timers>\n')
file = open(self.Filename + ".writing", "w")
for x in list:
file.write(x)
file.flush()
os.fsync(file.fileno())
file.close()
os.rename(self.Filename + ".writing", self.Filename)
def isProcessing(self, exceptTimer = None, endedTimer = None):
isRunning = False
for timer in self.timer_list:
if timer.timerType != TIMERTYPE.AUTOSTANDBY and timer.timerType != TIMERTYPE.AUTODEEPSTANDBY and timer.timerType != exceptTimer and timer.timerType != endedTimer:
if timer.isRunning():
isRunning = True
break
return isRunning
def getNextZapTime(self):
now = time()
for timer in self.timer_list:
if timer.begin < now:
continue
return timer.begin
return -1
def getNextPowerManagerTimeOld(self, getNextStbPowerOn = False):
now = int(time())
nextPTlist = [(-1,None,None,None)]
for timer in self.timer_list:
if timer.timerType != TIMERTYPE.AUTOSTANDBY and timer.timerType != TIMERTYPE.AUTODEEPSTANDBY:
next_act = timer.getNextWakeup(getNextStbPowerOn)
if next_act + 3 < now:
continue
if getNextStbPowerOn and debug:
print "[powertimer] next stb power up", strftime("%a, %Y/%m/%d %H:%M", localtime(next_act))
next_timertype = next_afterevent = None
if nextPTlist[0][0] == -1:
if abs(next_act - timer.begin) <= 30:
next_timertype = timer.timerType
elif abs(next_act - timer.end) <= 30:
next_afterevent = timer.afterEvent
nextPTlist = [(next_act,next_timertype,next_afterevent,timer.state)]
else:
if abs(next_act - timer.begin) <= 30:
next_timertype = timer.timerType
elif abs(next_act - timer.end) <= 30:
next_afterevent = timer.afterEvent
nextPTlist.append((next_act,next_timertype,next_afterevent,timer.state))
nextPTlist.sort()
return nextPTlist
def getNextPowerManagerTime(self, getNextStbPowerOn = False, getNextTimerTyp = False):
global DSsave, RSsave, RBsave, aeDSsave
nextrectime = self.getNextPowerManagerTimeOld(getNextStbPowerOn)
faketime = int(time()) + 300
if getNextTimerTyp:
#check entrys and plausibility of shift state (manual canceled timer has shift/save state not reset)
tt = ae = []
now = time()
if debug: print "+++++++++++++++"
for entry in nextrectime:
if entry[0] < now + 900: tt.append(entry[1])
if entry[0] < now + 900: ae.append(entry[2])
if debug: print ctime(entry[0]), entry
if not TIMERTYPE.RESTART in tt: RSsave = False
if not TIMERTYPE.REBOOT in tt: RBsave = False
if not TIMERTYPE.DEEPSTANDBY in tt: DSsave = False
if not AFTEREVENT.DEEPSTANDBY in ae: aeDSsave = False
if debug: print "RSsave=%s, RBsave=%s, DSsave=%s, aeDSsave=%s, wasTimerWakeup=%s" %(RSsave, RBsave, DSsave, aeDSsave, wasTimerWakeup)
if debug: print "+++++++++++++++"
###
if config.timeshift.isRecording.value:
if 0 < nextrectime[0][0] < faketime:
return nextrectime
else:
nextrectime.append((faketime,None,None,None))
nextrectime.sort()
return nextrectime
else:
return nextrectime
else:
if config.timeshift.isRecording.value:
if 0 < nextrectime[0][0] < faketime:
return nextrectime[0][0]
else:
return faketime
else:
return nextrectime[0][0]
def isNextPowerManagerAfterEventActionAuto(self):
now = time()
t = None
for timer in self.timer_list:
if timer.timerType == TIMERTYPE.WAKEUPTOSTANDBY or timer.afterEvent == AFTEREVENT.WAKEUPTOSTANDBY:
return True
return False
def record(self, entry, ignoreTSC=False, dosave=True): #wird von loadTimer mit dosave=False aufgerufen
entry.timeChanged()
print "[PowerTimer]",str(entry)
entry.Timer = self
self.addTimerEntry(entry)
if dosave:
self.saveTimer()
return None
def removeEntry(self, entry):
print "[PowerTimer] Remove",str(entry)
# avoid re-enqueuing
entry.repeated = False
# abort timer.
# this sets the end time to current time, so timer will be stopped.
entry.autoincrease = False
entry.abort()
if entry.state != entry.StateEnded:
self.timeChanged(entry)
# print "state: ", entry.state
# print "in processed: ", entry in self.processed_timers
# print "in running: ", entry in self.timer_list
# disable timer first
if entry.state != 3:
entry.disable()
# autoincrease instanttimer if possible
if not entry.dontSave:
for x in self.timer_list:
if x.setAutoincreaseEnd():
self.timeChanged(x)
# now the timer should be in the processed_timers list. remove it from there.
if entry in self.processed_timers:
self.processed_timers.remove(entry)
self.saveTimer()
def shutdown(self):
self.saveTimer()
| gpl-2.0 | 4,261,229,192,709,240,300 | 37.010204 | 225 | 0.694013 | false |
bentley-historical-library/migration-tools | ead/agents/update_corpnames.py | 2 | 1639 | '''
first things first, import what we need'''
# lxml is a powerful xml document parser, you'll need to download it
from lxml import etree
# os provides a portable way of using operating system dependent functionality
import os
from os.path import join
# tqdm adds a progress meter to loops, you'll need to install it
from tqdm import *
'''
setup'''
# where are the eads?
ead_folder = 'C:/Users/eckardm/GitHub/vandura/Real_Masters_all'
# ead_folder = 'C:/Users/Public/Documents/Real_Masters_all'
'''
now add any sources in the dictionary to names that are missing them'''
# go through each of the files in the ead folder
for filename in tqdm(os.listdir(ead_folder)):
# but only do the ones that are actually eads (we can tell because they are xml files)
if filename.endswith('.xml'):
# create an etree (part of lxml) tree that we can parse
ead_tree = etree.parse(join(ead_folder, filename))
# go through each of the elements at that xpath
for name in ead_tree.xpath('//corpname'):
# if it doesn't have a source and the name is in the dictionary we just created
if 'controlaccess' in ead_tree.getpath(name) or 'origination' in ead_tree.getpath(name):
if not name.get('source') and '--' not in name.text:
name.attrib['source'] = 'lcnaf'
with open(os.path.join(ead_folder, filename), mode="w") as see_i_am_making_all_things_new:
# and write it
see_i_am_making_all_things_new.write(etree.tostring(ead_tree, xml_declaration=True, encoding='utf-8', pretty_print=True)) | cc0-1.0 | -5,656,328,961,481,028,000 | 40 | 145 | 0.663819 | false |
dubourg/openturns | python/test/t_MarginalTransformationGradient_std.py | 2 | 3551 | #! /usr/bin/env python
from __future__ import print_function
from openturns import *
from math import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
coll1 = DistributionCollection(0)
coll1.add(Normal(1.0, 2.5))
coll1.add(Gamma(1.5, 3.0))
pointLow = NumericalPoint(0)
pointLow.add(coll1[0].computeQuantile(0.25)[0])
pointLow.add(coll1[1].computeQuantile(0.25)[0])
pointHigh = NumericalPoint(0)
pointHigh.add(coll1[0].computeQuantile(0.75)[0])
pointHigh.add(coll1[1].computeQuantile(0.75)[0])
coll2 = DistributionCollection(0)
coll2.add(Gamma(2.5, 2.0))
coll2.add(Normal(3.0, 1.5))
# First, check the old constructor
evaluation = MarginalTransformationEvaluation(coll1)
transformation = MarginalTransformationGradient(evaluation)
print("transformation=", repr(transformation))
print("transformation.gradient(", repr(pointLow), ")=",
repr(transformation.gradient(pointLow)))
print("finite difference gradient(", repr(pointLow), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(pointLow)))
print("transformation.gradient(", repr(pointHigh), ")=",
repr(transformation.gradient(pointHigh)))
print("finite difference gradient(", repr(pointHigh), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(pointHigh)))
print("input dimension=", transformation.getInputDimension())
print("output dimension=", transformation.getOutputDimension())
# Second, check the constructor for old inverse transformation
evaluation = MarginalTransformationEvaluation(
coll1, MarginalTransformationEvaluation.TO)
transformation = MarginalTransformationGradient(evaluation)
print("transformation=", repr(transformation))
uLow = NumericalPoint(coll1.getSize(), 0.25)
uHigh = NumericalPoint(coll1.getSize(), 0.75)
print("transformation.gradient(", repr(uLow), ")=",
repr(transformation.gradient(uLow)))
print("finite difference gradient(", repr(uLow), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(uLow)))
print("transformation.gradient(", repr(uHigh), ")=",
repr(transformation.gradient(uHigh)))
print("finite difference gradient(", repr(uHigh), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(uHigh)))
print("input dimension=", transformation.getInputDimension())
print("output dimension=", transformation.getOutputDimension())
# Third, check the constructor for the new transformation
evaluation = MarginalTransformationEvaluation(coll1, coll2)
transformation = MarginalTransformationGradient(evaluation)
print("transformation=", repr(transformation))
print("transformation.gradient(", repr(pointLow), ")=",
repr(transformation.gradient(pointLow)))
print("finite difference gradient(", repr(pointLow), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(pointLow)))
print("transformation.gradient(", repr(pointHigh), ")=",
repr(transformation.gradient(pointHigh)))
print("finite difference gradient(", repr(pointHigh), ")=", repr(
CenteredFiniteDifferenceGradient(1.0e-5, evaluation).gradient(pointHigh)))
print("input dimension=", transformation.getInputDimension())
print("output dimension=", transformation.getOutputDimension())
except:
import sys
print("t_MarginalTransformationGradient_std.py",
sys.exc_info()[0], sys.exc_info()[1])
| gpl-3.0 | 658,573,793,307,330,400 | 46.346667 | 82 | 0.710786 | false |
Architektor/PySnip | venv/lib/python2.7/site-packages/twisted/conch/test/test_window.py | 50 | 2115 |
"""
Tests for the insults windowing module, L{twisted.conch.insults.window}.
"""
from twisted.trial.unittest import TestCase
from twisted.conch.insults.window import TopWindow, ScrolledArea, TextOutput
class TopWindowTests(TestCase):
"""
Tests for L{TopWindow}, the root window container class.
"""
def test_paintScheduling(self):
"""
Verify that L{TopWindow.repaint} schedules an actual paint to occur
using the scheduling object passed to its initializer.
"""
paints = []
scheduled = []
root = TopWindow(lambda: paints.append(None), scheduled.append)
# Nothing should have happened yet.
self.assertEqual(paints, [])
self.assertEqual(scheduled, [])
# Cause a paint to be scheduled.
root.repaint()
self.assertEqual(paints, [])
self.assertEqual(len(scheduled), 1)
# Do another one to verify nothing else happens as long as the previous
# one is still pending.
root.repaint()
self.assertEqual(paints, [])
self.assertEqual(len(scheduled), 1)
# Run the actual paint call.
scheduled.pop()()
self.assertEqual(len(paints), 1)
self.assertEqual(scheduled, [])
# Do one more to verify that now that the previous one is finished
# future paints will succeed.
root.repaint()
self.assertEqual(len(paints), 1)
self.assertEqual(len(scheduled), 1)
class ScrolledAreaTests(TestCase):
"""
Tests for L{ScrolledArea}, a widget which creates a viewport containing
another widget and can reposition that viewport using scrollbars.
"""
def test_parent(self):
"""
The parent of the widget passed to L{ScrolledArea} is set to a new
L{Viewport} created by the L{ScrolledArea} which itself has the
L{ScrolledArea} instance as its parent.
"""
widget = TextOutput()
scrolled = ScrolledArea(widget)
self.assertIs(widget.parent, scrolled._viewport)
self.assertIs(scrolled._viewport.parent, scrolled)
| gpl-3.0 | -4,307,036,491,197,730,300 | 30.567164 | 79 | 0.643499 | false |
Mk555/Packet-Sniffer | main.py | 1 | 3549 | #!/usr/bin/python
#####################################
# Libs
#####################################
from scapy.all import *
import getpass
import argparse
import os
import sys
import time
import sqlite3 as sqlite
#####################################
# Arguments
#####################################
# Get the current username
user = getpass.getuser()
# Check if the user is root
if user != "root":
print "You need to be root to use me."
sys.exit(1);
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', type=bool, default=False)
args = parser.parse_args()
#####################################
# Variables
#####################################
## Create a Packet Count var
cptPck = 0
connexion = None
####################################
# CLASS
####################################
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
#####################################
# Functions
#####################################
def initConn():
dbVersion = ""
cursor = None
connexion = sqlite.connect("servers.db")
with connexion:
# Init the cursor
cursor = connexion.cursor()
# Prepare the request and execute it
cursor.execute( 'SELECT SQLITE_VERSION()')
dbVersion = cursor.fetchone()
# Extract the data and print it
print( "Connected to the database. (v %s)" %dbVersion )
# End function
#####################################
def checkIpPresence( ipToCheck ):
cursor = None
nbOccurences = 0
connexion = sqlite.connect("servers.db")
with connexion:
cursor = connexion.cursor()
cursor.execute( "Select count(*) From Servers Where ip = '" + ipToCheck + "';")
nbOccurences = cursor.fetchone()
nbOccurences = int("%i" %nbOccurences)
if( nbOccurences > 0 ):
return True
else:
return False
# End function
#####################################
def addIpToDb( ipToAdd ):
cursor = None
connexion = sqlite.connect( "servers.db")
with connexion:
cursor = connexion.cursor()
cursor.execute( "Insert into servers values ( '" + ipToAdd + "', '' );")
# End function
#####################################
def addContentPackToDb( contentPack ):
cursor = None
connexion = sqlite.connect( "servers.db" )
with connexion:
cursor = connexion.cursor()
# Replace special chars
stringPack = '%r' %contentPack
# DEBUG
#print( stringPack )
cursor.execute( r'Insert into packets (packContent) values ("' + stringPack + '");' )
# End function
#####################################
# Capture packets
def captureStandart( packet ):
global cptPck
cptPck += 1
contentPack = ""
# DEBUG
#print(bcolors.HEADER + "\nPacket #" + str(cptPck) + "" + bcolors.ENDC)
#print( bcolors.OKGREEN + "src : " + packet[0][1].src + bcolors.ENDC + bcolors.OKBLUE + "\ndst : " + packet[0][1].dst + "\n" + bcolors.ENDC)
if( checkIpPresence( packet[0][1].dst )):
cptPck = cptPck
else:
addIpToDb( packet[0][1].dst )
try:
addContentPackToDb( packet[0][1] )
except sqlite.Error, e:
addContentPackToDb( "" )
if args.verbose:
sys.stdout.write( "\r" + str(cptPck) + " Packets sniffed." )
sys.stdout.flush()
# End function
#####################################
#####################################
# Code
#####################################
initConn()
sniff(filter="tcp", prn=captureStandart)
print("END !! \n")
| gpl-2.0 | -1,599,799,470,134,683,600 | 16.745 | 148 | 0.524655 | false |
rubendibattista/ovh-python-email-manager | ovhem/em.py | 1 | 5365 | import ovh
import ConfigParser
import string
import warnings
from random import choice
from prettytable import PrettyTable
class EmailManager :
''' This class uses the ovh Python API and provide some
functionalities to interact with email accounts
Arguments:
niceoutput Optional. If True (default), prints out better looking tables
Properties:
client: ovh.Client() object
Methods:
list_emails List all the domain-associated email accounts
add_emails Add the emails from the dictionary given as argument
remove_emails Remove the emails listed in the dictionary given as argument
'''
client = ovh.Client()
parser = ConfigParser.SafeConfigParser()
parser.read('ovh.conf')
DOMAIN = parser.get('ovh-eu', 'domain')
def __init__(self,niceoutput = True):
''' Constructor. Checks for token validity and if not present or invalid prompt the user
for getting it '''
self.niceoutput = niceoutput
if not(self.__check_token()):
self.__get_token()
def __check_token(self):
print 'Checking Token...'
try:
self.client.get('/me/api/credential')
return True
except ovh.APIError as e:
print "API Error ({0})\n".format(e)
return False
def __get_token(self):
access_rules = [
{'method': 'GET', 'path': '/me/api/credential'},
{'method': 'GET', 'path': '/email/domain*'},
{'method': 'POST', 'path': '/email/domain*'},
{'method': 'PUT', 'path': '/email/domain*'},
{'method': 'DELETE', 'path': '/email/domain*'}
]
validation = self.client.request_consumerkey(access_rules)
print "To access OVH Api you must validate. Please visit the following\
link:\n %s" % validation['validationUrl']
raw_input('Press Enter when done...')
self.parser.set('ovh-eu', 'consumer_key', validation['consumerKey'])
with open('ovh.conf','wb') as configfile:
self.parser.write(configfile)
def __get_emails(self):
accounts=self.client.get('/email/domain/{0}/account'.format(self.DOMAIN))
accountData = []
for account in accounts:
accountData.append(self.client.get('/email/domain/{0}/account/{1}'.format(self.DOMAIN,\
account)))
return accountData
def list_emails(self):
accounts=self.__get_emails()
if not(self.niceoutput):
for account in accounts:
print account['accountName']+'@'+account['domain']
else:
tab = PrettyTable(["Account Name","Description","Size","Blocked"])
tab.align["City name"] = "c"
for account in accounts:
tab.add_row([
account['accountName']+'@'+account['domain'],
account['description'],
account['size'],
account['isBlocked']
])
print tab
def add_emails(self,emails):
print 'Adding emails...'
for i,email in enumerate(emails):
# If password is not set
if not(email['password']):
password = self.__mkpassword()
emails[i]['password'] = password
email['password'] = password
self.__add_email(email['address'], email['password'], email['description'])
return emails
def remove_emails(self,emails):
print 'Removing emails...'
for email in emails:
self.__remove_email(email['address'])
def __add_email(self,email,password,desc=None):
#Checking if email already present
accounts = self.__get_emails()
if email in [account['accountName']+'@'+account['domain'] for account in accounts]:
warnings.warn('{email} is already there!'.format(email=email),RuntimeWarning)
else:
self.client.post('/email/domain/{0}/account'.format(self.DOMAIN),
accountName=email.split('@')[0],
description = desc,
password = password,
size = 5E9
)
print email+' added!'
def __remove_email(self,email):
#Checking if email is present
accounts = self.__get_emails()
if not(email in [account['accountName']+'@'+account['domain'] for account in accounts]):
warnings.warn('{email} cannot be deleted: not present!'.format(email=email),\
RuntimeWarning)
else:
self.client.delete('/email/domain/{0}/account/{1}'.format(self.DOMAIN,email.split('@')[0]))
print email+' removed!'
def __mkpassword(self,size=18):
chars = string.ascii_letters+string.digits
return ''.join(choice(chars) for _ in range(size))
| bsd-2-clause | -799,074,262,545,124,100 | 37.597122 | 103 | 0.522274 | false |
discourse-lab/pocores | src/pocores/preferences.py | 1 | 1856 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from discoursegraphs.readwrite.conll import traverse_dependencies_up
def check_parallelism(pocores, antecedent_id, anaphora_id,
deprel_attr=None):
"""
Checks syntactical role parallelism between two given words.
Parameters
----------
pocores : Pocores
an instance of the Pocores class
antecedent_id : str
the node ID of the antecedent
anaphora_id : str
the node ID of the anaphora
Returns
-------
parallel : bool
True, if parallelism is found, False otherwise.
"""
if deprel_attr is None:
deprel_attr = pocores.document.deprel_attr
ant = pocores.node_attrs(antecedent_id)
ana = pocores.node_attrs(anaphora_id)
if (ant[deprel_attr] == ana[deprel_attr]
and ant[deprel_attr] in ("SB", "OA", "DA")):
return True
return False
def check_role(pocores, antecedent_id, role, deprel_attr=None):
"""
Checks if a given word has a certain syntactic role.
antecedent_id : str
the node ID of the antecedent
"""
if deprel_attr is None:
deprel_attr = pocores.document.deprel_attr
ant = pocores.node_attrs(antecedent_id)
if ant[deprel_attr] == role:
return True
return False
def get_chain_length(pocores, antecedent_id):
"""
Returns the count of known mentions of the discourse referent of the
given word.
"""
first_mention = pocores.mentions[antecedent_id]
return len(pocores.entities[first_mention])
def get_depth(pocores, token_id):
"""
Returns number of dependency edges from a given word to root of the
sentence.
"""
return len(list(traverse_dependencies_up(pocores.document, token_id,
node_attr=pocores.document.lemma_attr)))
| agpl-3.0 | 76,191,262,284,637,330 | 26.294118 | 85 | 0.633621 | false |
acabey/flash-dump-tool | lib/patcher.py | 1 | 3173 | #!/usr/bin/env python3
import struct
import sys
import logging
logging.basicConfig()
LOGGER = logging.getLogger('patcher')
def patch(originaldata, patchset):
"""
Apply KXAM patches to a target
Patch format
4 byte offset
4 byte count
4 byte * count patch payload
:param originaldata:
:param patchset:
:return:
"""
# Get the patch offset
# Get the patch size
patched_data = originaldata
currentoffset = 0
patchoffsetbytes = bytes(patchset[currentoffset:currentoffset + 4])
while (patchoffsetbytes != b'\xFF\xFF\xFF\xFF'):
patchoffsetbytes = bytes(patchset[currentoffset:currentoffset + 4])
patchoffset = struct.unpack('>I', patchoffsetbytes)[0]
LOGGER.debug('patch offset: ' + str(hex(patchoffset)))
currentoffset += 4
patchcountbytes = bytes(patchset[currentoffset:currentoffset + 4])
patchcount = struct.unpack('>I', patchcountbytes)[0]
LOGGER.debug('patch count : ' + str(hex(patchcount)))
LOGGER.debug('payload size: ' + str(hex(patchcount * 4)))
currentoffset += 4
patchpayloadbytes = bytes(patchset[currentoffset:currentoffset + 4 * patchcount])
LOGGER.debug('payload : ' + str(patchpayloadbytes))
patched_data[patchoffset:patchoffset + patchcount] = [patchpayloadbytes]
currentoffset += 4 * patchcount
return bytes(patched_data)
"""
There are two implementations of the actual patching algorithm here because writing directly to files is much lighter on MemoryStream
There is no reason to completely load the files into RAM before modifying when they can be modified InitializeComponent
"""
def main(argv):
target = argv[1] if len(argv) > 0 else None
patch = argv[2] if len(argv) > 1 else None
if not (target and patch):
print('Usage: applypatch.py target.bin patches.kxam')
# Patch format
# 4 byte offet
# 4 byte count
# 4 byte * count patch payload
with open(patch, 'rb') as patchfile:
with open(target, 'r+b') as targetfile:
while (patchfile.readable()):
patchoffsetbytes = patchfile.read(4)
if patchoffsetbytes == b'\xFF\xFF\xFF\xFF':
break
patchoffset = struct.unpack('>I', patchoffsetbytes)[0]
print('patchoffset: ' + str(hex(patchoffset)))
patchcountbytes = patchfile.read(4)
patchcount = struct.unpack('>I', patchcountbytes)[0]
print('patchcount: ' + str(hex(patchcount)))
print('expected payload size: ' + str(hex(patchcount * 4)))
patchpayloadbytes = patchfile.read(4 * patchcount)
print('payload length: ' + str(hex(len(patchpayloadbytes))))
print('payload: ' + str(patchpayloadbytes))
print('Writing patch of length ' + str(hex(patchcount)) + ' to offset ' + str(hex(patchoffset)))
targetfile.seek(patchoffset, 0)
targetfile.write(patchpayloadbytes)
print('Successfully wrote patches')
if __name__ == '__main__':
main(sys.argv)
| gpl-3.0 | -7,047,104,009,182,040,000 | 29.219048 | 133 | 0.626536 | false |
Swiftea/Swiftea-Crawler | crawler/crawling/searches.py | 2 | 2692 | #!/usr/bin/env python3
"""Define several functions SiteInformations."""
from urllib.parse import urlparse
from re import compile as compile_regex
from crawler.swiftea_bot.data import BAD_EXTENTIONS, DIR_STATS
regex = compile_regex(r'(\w+|\d+)') # used in site_informations.py
def clean_text(text):
"""Clean up text by removing tabulations, blanks and carriage returns.
:param text: text to clean_text
:type text: str
:return: cleaned text
"""
return ' '.join(text.split())
def get_base_url(url):
"""Get base url using urlparse.
:param url: url
:type url: str
:return: base url of given url
"""
infos_url = urlparse(url)
base_url = infos_url.scheme + '://' + infos_url.netloc
return base_url
def is_homepage(url):
"""Check if url is the homepage.
If there is only two '/' and two '.' if www and one otherwise.
:param url: url to check
:type url: str
:return: True or False
"""
return url.count('/') == 2 and ('//www.' in url and url.count('.') == 2) or url.count('.') == 1
def clean_link(url, base_url=None):
"""Clean a link.
Rebuild url with base url, pass mailto and javascript,
remove anchors, pass if more than 5 queries, pass if more than 255 chars,
remove /index.xxx, remove last /.
:param url: links to clean
:type url: str
:param base_url: base url for rebuilding, can be None if
:return: cleaned link
"""
new = url.strip() # Link to add in new list of links
if (not new.endswith(BAD_EXTENTIONS) and
new != '/' and
new != '#' and
not new.startswith('mailto:') and
'javascript:' not in new and
new != ''):
if not new.startswith('http') and not new.startswith('www'):
if new.startswith('//'):
new = 'http:' + new
elif new.startswith('/'):
new = base_url + new
elif new.startswith(':'):
new = 'http' + new
else:
new = base_url + '/' + new
infos_url = urlparse(new) # Removing excpet ValueError
new = infos_url.scheme + '://' + infos_url.netloc + infos_url.path
if new.endswith('/'):
new = new[:-1]
nb_index = new.find('/index.')
if nb_index != -1:
new = new[:nb_index]
if infos_url.query != '':
new += '?' + infos_url.query
if len(new) > 8 and new.count('&') < 5 and len(new) <= 255:
return new
return None
return None
def capitalize(text):
"""Upper the first letter of given text
:param text: text
:type text: str
:return: text
"""
if text:
return text[0].upper() + text[1:]
return ''
def stats_links(stats):
"""Write the number of links for statistics.
:param stat: number of list in a webpage
:type stat: int
"""
with open(DIR_STATS + 'stat_links', 'a') as myfile:
myfile.write(str(stats) + '\n') # Write the number of links found
| gpl-3.0 | -6,023,395,482,706,254,000 | 21.813559 | 96 | 0.643388 | false |
alishakiba/kaggle_diabetic | blend.py | 4 | 7140 | """Blend features extracted with Conv Nets and make predictions/submissions."""
from __future__ import division, print_function
from datetime import datetime
from glob import glob
import click
import numpy as np
import pandas as pd
import theano
from lasagne import init
from lasagne.updates import adam
from lasagne.nonlinearities import rectify
from lasagne.layers import DenseLayer, InputLayer, FeaturePoolLayer
from nolearn.lasagne import BatchIterator
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import StandardScaler
import yaml
import data
import nn
import util
np.random.seed(9)
START_LR = 0.0005
END_LR = START_LR * 0.001
L1 = 2e-5
L2 = 0.005
N_ITER = 100
PATIENCE = 20
POWER = 0.5
N_HIDDEN_1 = 32
N_HIDDEN_2 = 32
BATCH_SIZE = 128
SCHEDULE = {
60: START_LR / 10.0,
80: START_LR / 100.0,
90: START_LR / 1000.0,
N_ITER: 'stop'
}
class BlendNet(nn.Net):
def set_split(self, files, labels):
"""Override train/test split method to use our default split."""
def split(X, y, eval_size):
if eval_size:
tr, te = data.split_indices(files, labels, eval_size)
return X[tr], X[te], y[tr], y[te]
else:
return X, X[len(X):], y, y[len(y):]
setattr(self, 'train_test_split', split)
class ResampleIterator(BatchIterator):
def __init__(self, batch_size, resample_prob=0.2, shuffle_prob=0.5):
self.resample_prob = resample_prob
self.shuffle_prob = shuffle_prob
super(ResampleIterator, self).__init__(batch_size)
def __iter__(self):
n_samples = self.X.shape[0]
bs = self.batch_size
indices = data.balance_per_class_indices(self.y.ravel())
for i in range((n_samples + bs - 1) // bs):
r = np.random.rand()
if r < self.resample_prob:
sl = indices[np.random.randint(0, n_samples, size=bs)]
elif r < self.shuffle_prob:
sl = np.random.randint(0, n_samples, size=bs)
else:
sl = slice(i * bs, (i + 1) * bs)
Xb = self.X[sl]
if self.y is not None:
yb = self.y[sl]
else:
yb = None
yield self.transform(Xb, yb)
def get_estimator(n_features, files, labels, eval_size=0.1):
layers = [
(InputLayer, {'shape': (None, n_features)}),
(DenseLayer, {'num_units': N_HIDDEN_1, 'nonlinearity': rectify,
'W': init.Orthogonal('relu'),
'b': init.Constant(0.01)}),
(FeaturePoolLayer, {'pool_size': 2}),
(DenseLayer, {'num_units': N_HIDDEN_2, 'nonlinearity': rectify,
'W': init.Orthogonal('relu'),
'b': init.Constant(0.01)}),
(FeaturePoolLayer, {'pool_size': 2}),
(DenseLayer, {'num_units': 1, 'nonlinearity': None}),
]
args = dict(
update=adam,
update_learning_rate=theano.shared(util.float32(START_LR)),
batch_iterator_train=ResampleIterator(BATCH_SIZE),
batch_iterator_test=BatchIterator(BATCH_SIZE),
objective=nn.get_objective(l1=L1, l2=L2),
eval_size=eval_size,
custom_score=('kappa', util.kappa) if eval_size > 0.0 else None,
on_epoch_finished=[
nn.Schedule('update_learning_rate', SCHEDULE),
],
regression=True,
max_epochs=N_ITER,
verbose=1,
)
net = BlendNet(layers, **args)
net.set_split(files, labels)
return net
@click.command()
@click.option('--cnf', default='configs/c_512_4x4_32.py', show_default=True,
help="Path or name of configuration module.")
@click.option('--predict', is_flag=True, default=False, show_default=True,
help="Make predictions on test set features after training.")
@click.option('--per_patient', is_flag=True, default=False, show_default=True,
help="Blend features of both patient eyes.")
@click.option('--features_file', default=None, show_default=True,
help="Read features from specified file.")
@click.option('--n_iter', default=1, show_default=True,
help="Number of times to fit and average.")
@click.option('--blend_cnf', default='blend.yml', show_default=True,
help="Blending configuration file.")
@click.option('--test_dir', default=None, show_default=True,
help="Override directory with test set images.")
def fit(cnf, predict, per_patient, features_file, n_iter, blend_cnf, test_dir):
config = util.load_module(cnf).config
image_files = data.get_image_files(config.get('train_dir'))
names = data.get_names(image_files)
labels = data.get_labels(names).astype(np.float32)[:, np.newaxis]
if features_file is not None:
runs = {'run': [features_file]}
else:
runs = data.parse_blend_config(yaml.load(open(blend_cnf)))
scalers = {run: StandardScaler() for run in runs}
tr, te = data.split_indices(image_files, labels)
y_preds = []
for i in range(n_iter):
print("iteration {} / {}".format(i + 1, n_iter))
for run, files in runs.items():
print("fitting features for run {}".format(run))
X = data.load_features(files)
X = scalers[run].fit_transform(X)
X = data.per_patient_reshape(X) if per_patient else X
est = get_estimator(X.shape[1], image_files, labels,
eval_size=0.0 if predict else 0.1)
est.fit(X, labels)
if not predict:
y_pred = est.predict(X[te]).ravel()
y_preds.append(y_pred)
y_pred = np.mean(y_preds, axis=0)
y_pred = np.clip(np.round(y_pred).astype(int),
np.min(labels), np.max(labels))
print("kappa after run {}, iter {}: {}".format(
run, i, util.kappa(labels[te], y_pred)))
print("confusion matrix")
print(confusion_matrix(labels[te], y_pred))
else:
X = data.load_features(files, test=True)
X = scalers[run].transform(X)
X = data.per_patient_reshape(X) if per_patient else X
y_pred = est.predict(X).ravel()
y_preds.append(y_pred)
if predict:
y_pred = np.mean(y_preds, axis=0)
y_pred = np.clip(np.round(y_pred),
np.min(labels), np.max(labels)).astype(int)
submission_filename = util.get_submission_filename()
image_files = data.get_image_files(test_dir or config.get('test_dir'))
names = data.get_names(image_files)
image_column = pd.Series(names, name='image')
level_column = pd.Series(y_pred, name='level')
predictions = pd.concat([image_column, level_column], axis=1)
print("tail of predictions file")
print(predictions.tail())
predictions.to_csv(submission_filename, index=False)
print("saved predictions to {}".format(submission_filename))
if __name__ == '__main__':
fit()
| mit | 3,866,865,463,629,590,500 | 35.615385 | 79 | 0.584594 | false |
mne-tools/mne-tools.github.io | 0.21/_downloads/e9c2029d538a27854bee8b41e5e8eca3/plot_limo_data.py | 3 | 13302 | """
.. _ex-limo-data:
=============================================================
Single trial linear regression analysis with the LIMO dataset
=============================================================
Here we explore the structure of the data contained in the
`LIMO dataset`_.
This example replicates and extends some of the main analysis
and tools integrated in `LIMO MEEG`_, a MATLAB toolbox originally designed
to interface with EEGLAB_.
In summary, the example:
- Fetches epoched data files for a single subject of the LIMO dataset [1]_.
If the LIMO files are not found on disk, the
fetcher :func:`mne.datasets.limo.load_data()` will automatically download
the files from a remote repository.
- During import, information about the data (i.e., sampling rate, number of
epochs per condition, number and name of EEG channels per subject, etc.) is
extracted from the LIMO :file:`.mat` files stored on disk and added to the
epochs structure as metadata.
- Fits linear models on the single subject's data and visualizes inferential
measures to evaluate the significance of the estimated effects.
References
----------
.. [1] Guillaume, Rousselet. (2016). LIMO EEG Dataset, [dataset].
University of Edinburgh, Centre for Clinical Brain Sciences.
https://doi.org/10.7488/ds/1556.
.. [2] Rousselet, G. A., Gaspar, C. M., Pernet, C. R., Husk, J. S.,
Bennett, P. J., & Sekuler, A. B. (2010). Healthy aging delays scalp EEG
sensitivity to noise in a face discrimination task.
Frontiers in psychology, 1, 19. https://doi.org/10.3389/fpsyg.2010.00019
.. [3] Rousselet, G. A., Pernet, C. R., Bennett, P. J., & Sekuler, A. B.
(2008). Parametric study of EEG sensitivity to phase noise during face
processing. BMC neuroscience, 9(1), 98.
https://doi.org/10.1186/1471-2202-9-98
.. _LIMO dataset: https://datashare.is.ed.ac.uk/handle/10283/2189?show=full
.. _LIMO MEEG: https://github.com/LIMO-EEG-Toolbox
.. _EEGLAB: https://sccn.ucsd.edu/eeglab/index.php
.. _Fig 1: https://bmcneurosci.biomedcentral.com/articles/10.1186/1471-2202-9-98/figures/1
.. _least squares: https://docs.scipy.org/doc/scipy/reference/generated/scipy.linalg.lstsq.html
""" # noqa: E501
# Authors: Jose C. Garcia Alanis <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne.datasets.limo import load_data
from mne.stats import linear_regression
from mne.viz import plot_events, plot_compare_evokeds
from mne import combine_evoked
print(__doc__)
# subject to use
subj = 1
###############################################################################
# About the data
# --------------
#
# In the original LIMO experiment (see [2]_), participants performed a
# two-alternative forced choice task, discriminating between two face stimuli.
# The same two faces were used during the whole experiment,
# with varying levels of noise added, making the faces more or less
# discernible to the observer (see `Fig 1`_ in [3]_ for a similar approach).
#
# The presented faces varied across a noise-signal (or phase-coherence)
# continuum spanning from 0 to 85% in increasing steps of 5%.
# In other words, faces with high phase-coherence (e.g., 85%) were easy to
# identify, while faces with low phase-coherence (e.g., 5%) were hard to
# identify and by extension very hard to discriminate.
#
#
# Load the data
# -------------
#
# We'll begin by loading the data from subject 1 of the LIMO dataset.
# This step can take a little while if you're loading the data for the
# first time.
limo_epochs = load_data(subject=subj)
###############################################################################
# Note that the result of the loading process is an
# :class:`mne.EpochsArray` containing the data ready to interface
# with MNE-Python.
print(limo_epochs)
###############################################################################
# Visualize events
# ----------------
#
# We can visualise the distribution of the face events contained in the
# ``limo_epochs`` structure. Events should appear clearly grouped, as the
# epochs are ordered by condition.
fig = plot_events(limo_epochs.events, event_id=limo_epochs.event_id)
fig.suptitle("Distribution of events in LIMO epochs")
###############################################################################
# As it can be seen above, conditions are coded as ``Face/A`` and ``Face/B``.
# Information about the phase-coherence of the presented faces is stored in the
# epochs metadata. These information can be easily accessed by calling
# ``limo_epochs.metadata``. As shown below, the epochs metadata also contains
# information about the presented faces for convenience.
print(limo_epochs.metadata.head())
###############################################################################
# Now let's take a closer look at the information in the epochs
# metadata.
# We want include all columns in the summary table
epochs_summary = limo_epochs.metadata.describe(include='all').round(3)
print(epochs_summary)
###############################################################################
# The first column of the summary table above provides more or less the same
# information as the ``print(limo_epochs)`` command we ran before. There are
# 1055 faces (i.e., epochs), subdivided in 2 conditions (i.e., Face A and
# Face B) and, for this particular subject, there are more epochs for the
# condition Face B.
#
# In addition, we can see in the second column that the values for the
# phase-coherence variable range from -1.619 to 1.642. This is because the
# phase-coherence values are provided as a z-scored variable in the LIMO
# dataset. Note that they have a mean of zero and a standard deviation of 1.
#
#
# Visualize condition ERPs
# ------------------------
#
# Let's plot the ERPs evoked by Face A and Face B, to see how similar they are.
# only show -250 to 500 ms
ts_args = dict(xlim=(-0.25, 0.5))
# plot evoked response for face A
limo_epochs['Face/A'].average().plot_joint(times=[0.15],
title='Evoked response: Face A',
ts_args=ts_args)
# and face B
limo_epochs['Face/B'].average().plot_joint(times=[0.15],
title='Evoked response: Face B',
ts_args=ts_args)
###############################################################################
# We can also compute the difference wave contrasting Face A and Face B.
# Although, looking at the evoked responses above, we shouldn't expect great
# differences among these face-stimuli.
# Face A minus Face B
difference_wave = combine_evoked([limo_epochs['Face/A'].average(),
limo_epochs['Face/B'].average()],
weights=[1, -1])
# plot difference wave
difference_wave.plot_joint(times=[0.15], title='Difference Face A - Face B')
###############################################################################
# As expected, no clear pattern appears when contrasting
# Face A and Face B. However, we could narrow our search a little bit more.
# Since this is a "visual paradigm" it might be best to look at electrodes
# located over the occipital lobe, as differences between stimuli (if any)
# might easier to spot over visual areas.
# Create a dictionary containing the evoked responses
conditions = ["Face/A", "Face/B"]
evokeds = {condition: limo_epochs[condition].average()
for condition in conditions}
# concentrate analysis an occipital electrodes (e.g. B11)
pick = evokeds["Face/A"].ch_names.index('B11')
# compare evoked responses
plot_compare_evokeds(evokeds, picks=pick, ylim=dict(eeg=(-15, 7.5)))
###############################################################################
# We do see a difference between Face A and B, but it is pretty small.
#
#
# Visualize effect of stimulus phase-coherence
# --------------------------------------------
#
# Since phase-coherence
# determined whether a face stimulus could be easily identified,
# one could expect that faces with high phase-coherence should evoke stronger
# activation patterns along occipital electrodes.
phase_coh = limo_epochs.metadata['phase-coherence']
# get levels of phase coherence
levels = sorted(phase_coh.unique())
# create labels for levels of phase coherence (i.e., 0 - 85%)
labels = ["{0:.2f}".format(i) for i in np.arange(0., 0.90, 0.05)]
# create dict of evokeds for each level of phase-coherence
evokeds = {label: limo_epochs[phase_coh == level].average()
for level, label in zip(levels, labels)}
# pick channel to plot
electrodes = ['C22', 'B11']
# create figures
for electrode in electrodes:
fig, ax = plt.subplots(figsize=(8, 4))
plot_compare_evokeds(evokeds,
axes=ax,
ylim=dict(eeg=(-20, 15)),
picks=electrode,
cmap=("Phase coherence", "magma"))
###############################################################################
# As shown above, there are some considerable differences between the
# activation patterns evoked by stimuli with low vs. high phase-coherence at
# the chosen electrodes.
#
#
# Prepare data for linear regression analysis
# --------------------------------------------
#
# Before we test the significance of these differences using linear
# regression, we'll interpolate missing channels that were
# dropped during preprocessing of the data.
# Furthermore, we'll drop the EOG channels (marked by the "EXG" prefix)
# present in the data:
limo_epochs.interpolate_bads(reset_bads=True)
limo_epochs.drop_channels(['EXG1', 'EXG2', 'EXG3', 'EXG4'])
###############################################################################
# Define predictor variables and design matrix
# --------------------------------------------
#
# To run the regression analysis,
# we need to create a design matrix containing information about the
# variables (i.e., predictors) we want to use for prediction of brain
# activity patterns. For this purpose, we'll use the information we have in
# ``limo_epochs.metadata``: phase-coherence and Face A vs. Face B.
# name of predictors + intercept
predictor_vars = ['face a - face b', 'phase-coherence', 'intercept']
# create design matrix
design = limo_epochs.metadata[['phase-coherence', 'face']].copy()
design['face a - face b'] = np.where(design['face'] == 'A', 1, -1)
design['intercept'] = 1
design = design[predictor_vars]
###############################################################################
# Now we can set up the linear model to be used in the analysis using
# MNE-Python's func:`~mne.stats.linear_regression` function.
reg = linear_regression(limo_epochs,
design_matrix=design,
names=predictor_vars)
###############################################################################
# Extract regression coefficients
# -------------------------------
#
# The results are stored within the object ``reg``,
# which is a dictionary of evoked objects containing
# multiple inferential measures for each predictor in the design matrix.
print('predictors are:', list(reg))
print('fields are:', [field for field in getattr(reg['intercept'], '_fields')])
###############################################################################
# Plot model results
# ------------------
#
# Now we can access and plot the results of the linear regression analysis by
# calling :samp:`reg['{<name of predictor>}'].{<measure of interest>}` and
# using the
# :meth:`~mne.Evoked.plot_joint` method just as we would do with any other
# evoked object.
# Below we can see a clear effect of phase-coherence, with higher
# phase-coherence (i.e., better "face visibility") having a negative effect on
# the activity measured at occipital electrodes around 200 to 250 ms following
# stimulus onset.
reg['phase-coherence'].beta.plot_joint(ts_args=ts_args,
title='Effect of Phase-coherence',
times=[0.23])
###############################################################################
# We can also plot the corresponding T values.
# use unit=False and scale=1 to keep values at their original
# scale (i.e., avoid conversion to micro-volt).
ts_args = dict(xlim=(-0.25, 0.5),
unit=False)
topomap_args = dict(scalings=dict(eeg=1),
average=0.05)
# sphinx_gallery_thumbnail_number = 9
fig = reg['phase-coherence'].t_val.plot_joint(ts_args=ts_args,
topomap_args=topomap_args,
times=[0.23])
fig.axes[0].set_ylabel('T-value')
###############################################################################
# Conversely, there appears to be no (or very small) systematic effects when
# comparing Face A and Face B stimuli. This is largely consistent with the
# difference wave approach presented above.
ts_args = dict(xlim=(-0.25, 0.5))
reg['face a - face b'].beta.plot_joint(ts_args=ts_args,
title='Effect of Face A vs. Face B',
times=[0.23])
| bsd-3-clause | 1,804,015,382,770,537,700 | 40.69906 | 95 | 0.604947 | false |
bhdouglass/agui | agui/backends/pyside/extras/message.py | 1 | 1667 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2014 Brian Douglass [email protected]
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
from agui.backends.pyside.imports import *
from agui.aextras import AMessage
class Message(AMessage):
def message(self, window_title, title, message, icon, parent=None):
message2 = "<b>%s</b><br/><br/>%s" % (title, message)
self.message_alt(window_title, message2, icon, parent)
def message_alt(self, window_title, message, icon, parent=None):
self.dialog = QtGui.QMessageBox(None, window_title, message, QtGui.QMessageBox.Close, parent=parent)
self.dialog.setPixmap(icon.icon().pixmap(32, 32))
self.dialog.show()
def yes_no(self, window_title, message, icon=None, parent=None):
ans = QtGui.QMessageBox.question(self, window_title, message, QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.Yes, parent=parent)
value = self.no
if ans == QtGui.MessageBox.Yes:
value = self.yes
return value
| gpl-3.0 | -8,745,910,696,645,562,000 | 44.054054 | 153 | 0.709058 | false |
TsarFox/chandere2 | chandere/errors.py | 2 | 1423 | # Copyright (C) 2017 Jakob Kreuze, All Rights Reserved.
#
# This file is part of Chandere.
#
# Chandere is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Chandere is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with Chandere. If not, see <http://www.gnu.org/licenses/>.
"""Custom exceptions and functions for error detection."""
class ChandereError(Exception):
"""A custom exception to signal an error specific to Chandere.
Typically caught at the entry point where its contents are displayed
without a traceback.
"""
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
def check_http_status(code: int, url=None):
"""Checks an HTTP status code, throwing a ChandereError if the code
signifies an error status.
"""
if code != 200:
error = "Encountered HTTP/1.1 {}".format(code)
if url is not None:
error += " while fetching '{}'.".format(url)
raise ChandereError(error)
| gpl-3.0 | -8,284,206,975,727,391,000 | 36.447368 | 74 | 0.702038 | false |
mapillary/OpenSfM | opensfm/actions/export_geocoords.py | 2 | 5081 | import logging
import os
import numpy as np
import pyproj
from opensfm import io
from opensfm.dataset import DataSet, UndistortedDataSet
logger = logging.getLogger(__name__)
def run_dataset(
data: DataSet, proj, transformation, image_positions, reconstruction, dense, output
):
"""Export reconstructions in geographic coordinates
Args:
proj: PROJ.4 projection string
transformation : print cooordinate transformation matrix'
image_positions : export image positions
reconstruction : export reconstruction.json
dense : export dense point cloud (depthmaps/merged.ply)
output : path of the output file relative to the dataset
"""
if not (transformation or image_positions or reconstruction or dense):
logger.info("Nothing to do. At least on of the options: ")
logger.info(" --transformation, --image-positions, --reconstruction, --dense")
reference = data.load_reference()
projection = pyproj.Proj(proj)
t = _get_transformation(reference, projection)
if transformation:
output = output or "geocoords_transformation.txt"
output_path = os.path.join(data.data_path, output)
_write_transformation(t, output_path)
if image_positions:
reconstructions = data.load_reconstruction()
output = output or "image_geocoords.tsv"
output_path = os.path.join(data.data_path, output)
_transform_image_positions(reconstructions, t, output_path)
if reconstruction:
reconstructions = data.load_reconstruction()
for r in reconstructions:
_transform_reconstruction(r, t)
output = output or "reconstruction.geocoords.json"
data.save_reconstruction(reconstructions, output)
if dense:
output = output or "undistorted/depthmaps/merged.geocoords.ply"
output_path = os.path.join(data.data_path, output)
udata = data.undistorted_dataset()
_transform_dense_point_cloud(udata, t, output_path)
def _get_transformation(reference, projection):
"""Get the linear transform from reconstruction coords to geocoords."""
p = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 0]]
q = [_transform(point, reference, projection) for point in p]
transformation = np.array(
[
[q[0][0] - q[3][0], q[1][0] - q[3][0], q[2][0] - q[3][0], q[3][0]],
[q[0][1] - q[3][1], q[1][1] - q[3][1], q[2][1] - q[3][1], q[3][1]],
[q[0][2] - q[3][2], q[1][2] - q[3][2], q[2][2] - q[3][2], q[3][2]],
[0, 0, 0, 1],
]
)
return transformation
def _write_transformation(transformation, filename):
"""Write the 4x4 matrix transformation to a text file."""
with io.open_wt(filename) as fout:
for row in transformation:
fout.write(u" ".join(map(str, row)))
fout.write(u"\n")
def _transform(point, reference, projection):
"""Transform on point from local coords to a proj4 projection."""
lat, lon, altitude = reference.to_lla(point[0], point[1], point[2])
easting, northing = projection(lon, lat)
return [easting, northing, altitude]
def _transform_image_positions(reconstructions, transformation, output):
A, b = transformation[:3, :3], transformation[:3, 3]
rows = ["Image\tX\tY\tZ"]
for r in reconstructions:
for shot in r.shots.values():
o = shot.pose.get_origin()
to = np.dot(A, o) + b
row = [shot.id, to[0], to[1], to[2]]
rows.append("\t".join(map(str, row)))
text = "\n".join(rows + [""])
with open(output, "w") as fout:
fout.write(text)
def _transform_reconstruction(reconstruction, transformation):
"""Apply a transformation to a reconstruction in-place."""
A, b = transformation[:3, :3], transformation[:3, 3]
A1 = np.linalg.inv(A)
for shot in reconstruction.shots.values():
R = shot.pose.get_rotation_matrix()
shot.pose.set_rotation_matrix(np.dot(R, A1))
shot.pose.set_origin(np.dot(A, shot.pose.get_origin()) + b)
for point in reconstruction.points.values():
point.coordinates = list(np.dot(A, point.coordinates) + b)
def _transform_dense_point_cloud(udata: UndistortedDataSet, transformation, output_path):
"""Apply a transformation to the merged point cloud."""
A, b = transformation[:3, :3], transformation[:3, 3]
input_path = udata.point_cloud_file()
with io.open_rt(input_path) as fin:
with io.open_wt(output_path) as fout:
for i, line in enumerate(fin):
if i < 13:
fout.write(line)
else:
x, y, z, nx, ny, nz, red, green, blue = line.split()
x, y, z = np.dot(A, map(float, [x, y, z])) + b
nx, ny, nz = np.dot(A, map(float, [nx, ny, nz]))
fout.write(
"{} {} {} {} {} {} {} {} {}\n".format(
x, y, z, nx, ny, nz, red, green, blue
)
)
| bsd-2-clause | 220,917,560,449,082,400 | 35.292857 | 89 | 0.596339 | false |
brownhead/galah | galah/base/crypto/passcrypt.py | 3 | 2374 | from collections import namedtuple
import os
import pbkdf2
# The known algorithms
algorithms = ("pbkdf2", )
# A hash along with all the information used to create it
HashSeal = namedtuple("HashSeal", ("hash", "algorithm", "salt", "cost_factor"))
def hash(password, algorithm, salt, cost_factor, **kwargs):
"""
Returns a hashed password (not a HashSeal) in binary format.
**kwargs are passed to the underlying hash function unchanged.
"""
if algorithm == "pbkdf2":
return pbkdf2.pbkdf2_bin(str(password), salt, cost_factor, **kwargs)
else:
raise ValueError("algorithm: Specified algorithm is not recognized.")
def seal(password, algorithm = "pbkdf2", salt = None, cost_factor = 1000):
"Returns a HashSeal of the given password."
if salt is None:
salt = os.urandom(4)
return HashSeal(
hash = hash(password, algorithm, salt, cost_factor),
algorithm = algorithm,
salt = salt,
cost_factor = cost_factor
)
def check_seal(password, seal):
"Returns True if a password is the password used to create the given seal."
def safeStrCmp(a, b):
if len(a) != len(b):
return False
rv = True
for x, y in zip(a, b):
if x != y:
rv = False
return rv
# Create the hash of the password we will check against the seal. Note its
# important that password is a string (not unicode) because ord() needs to
# work with it.
check_hash = hash(password, seal.algorithm, seal.salt, seal.cost_factor)
return safeStrCmp(check_hash, seal.hash)
def serialize_seal(seal):
return ";".join((
seal.hash.encode("hex"),
seal.algorithm,
seal.salt.encode("hex"),
str(seal.cost_factor)
))
def deserialize_seal(seal):
parts = seal.split(";")
parts[0] = str(parts[0].decode("hex"))
parts[1] = unicode(parts[1])
parts[2] = str(parts[2].decode("hex"))
parts[3] = int(parts[3])
return HashSeal(*parts)
if __name__ == "__main__":
test_seal = seal("test")
print test_seal
assert check_seal("test", test_seal)
print serialize_seal(test_seal), deserialize_seal(serialize_seal(test_seal))
assert deserialize_seal(serialize_seal(test_seal)) == test_seal
| mit | -2,179,239,984,026,829,800 | 27.95122 | 80 | 0.609099 | false |
maas/maas | src/maasserver/forms/script.py | 1 | 24637 | # Copyright 2017-2019 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Script form."""
from datetime import timedelta
import json
from json import JSONDecodeError
import pipes
import re
from django.core.exceptions import ValidationError
from django.forms import (
BooleanField,
CharField,
DurationField,
FileField,
Form,
ModelForm,
)
import yaml
from maasserver.audit import create_audit_event
from maasserver.enum import ENDPOINT
from maasserver.fields import VersionedTextFileField
from maasserver.forms.parameters import ParametersForm
from maasserver.utils.forms import set_form_error
from metadataserver.enum import HARDWARE_TYPE, SCRIPT_PARALLEL, SCRIPT_TYPE
from metadataserver.models import Script
from metadataserver.models.script import (
translate_hardware_type,
translate_script_parallel,
translate_script_type,
)
from provisioningserver.events import EVENT_TYPES
class ScriptForm(ModelForm):
script_type = CharField(
label="Script type",
required=False,
help_text="Script type",
initial=str(SCRIPT_TYPE.TESTING),
)
hardware_type = CharField(
label="Hardware type",
required=False,
help_text="The hardware type the script configures or tests.",
initial=str(HARDWARE_TYPE.NODE),
)
parallel = CharField(
label="Parallel",
required=False,
help_text="Whether the script may run in parallel with other scripts.",
initial=str(SCRIPT_PARALLEL.DISABLED),
)
packages = CharField(
label="Packages",
required=False,
help_text="Packages to be installed with script.",
initial="",
)
timeout = DurationField(
label="Timeout",
required=False,
help_text="Timeout",
initial=timedelta(0),
)
script = VersionedTextFileField(label="Script", help_text="Script content")
comment = CharField(
label="Comment",
required=False,
help_text="Description of change",
initial="",
)
for_hardware = CharField(
label="For hardware",
required=False,
help_text="Hardware identifiers this script requires to run.",
initial="",
)
apply_configured_networking = BooleanField(required=False)
class Meta:
model = Script
fields = (
"name",
"title",
"description",
"tags",
"script_type",
"hardware_type",
"parallel",
"packages",
"timeout",
"destructive",
"script",
"for_hardware",
"may_reboot",
"recommission",
"apply_configured_networking",
)
def __init__(self, instance=None, data=None, edit_default=False, **kwargs):
self.edit_default = edit_default
if instance is None:
script_data_key = "data"
else:
script_data_key = "new_data"
data = data.copy()
if "comment" in data and "script" in data:
script_data = {
"comment": data.get("comment"),
script_data_key: data.get("script"),
}
data["script"] = script_data
data.pop("comment")
# Alias type to script_type to allow for consistent naming in the API.
if "type" in data and "script_type" not in data:
data["script_type"] = data["type"]
# self.data is a QueryDict. pop returns a list containing the value
# while directly accessing it returns just the value.
data.pop("type")
super().__init__(instance=instance, data=data, **kwargs)
if instance is None:
for field in ["name", "script"]:
self.fields[field].required = True
else:
for field in ["name", "script"]:
self.fields[field].required = False
self.fields["script"].initial = instance.script
# Reading the embedded YAML must happen at the end of initialization
# so the fields set are validated.
if "script" in self.data:
self._read_script()
def _validate_results(self, results={}):
valid = True
if isinstance(results, list):
for result in results:
if not isinstance(result, str):
set_form_error(
self,
"results",
"Each result in a result list must be a string.",
)
valid = False
elif isinstance(results, dict):
for result in results.values():
if not isinstance(result, dict):
set_form_error(
self,
"results",
"Each result in a result dictionary must be a "
"dictionary.",
)
elif "title" not in result:
set_form_error(
self,
"results",
"title must be included in a result dictionary.",
)
valid = False
else:
for key in ["title", "description"]:
if key in result and not isinstance(result[key], str):
set_form_error(
self, "results", "%s must be a string." % key
)
valid = False
else:
set_form_error(
self,
"results",
"results must be a list of strings or a dictionary of "
"dictionaries.",
)
valid = False
return valid
def _clean_script(self, parsed_yaml):
"""Clean script data and validate input."""
# Tags and timeout may not be updated from new embedded YAML. This
# allows users to receive updated scripts from an upstream maintainer,
# such as Canonical, while maintaining user defined tags and timeout.
# Tags must be a comma seperated string for the form.
tags = parsed_yaml.pop("tags", None)
if (
tags is not None
and self.instance.id is None
and "tags" not in self.data
):
tags_valid = True
if isinstance(tags, str):
self.data["tags"] = tags
elif isinstance(tags, list):
for tag in tags:
if not isinstance(tag, str):
tags_valid = False
continue
if tags_valid:
self.data["tags"] = ",".join(tags)
else:
tags_valid = False
if not tags_valid:
set_form_error(
self,
"tags",
"Embedded tags must be a string of comma seperated "
"values, or a list of strings.",
)
# Timeout must be a string for the form.
timeout = parsed_yaml.pop("timeout", None)
if (
timeout is not None
and self.instance.id is None
and "timeout" not in self.data
):
self.data["timeout"] = str(timeout)
# Packages and for_hardware must be a JSON string for the form.
for key in ["packages", "for_hardware"]:
value = parsed_yaml.pop(key, None)
if value is not None and key not in self.data:
self.data[key] = json.dumps(value)
for key, value in parsed_yaml.items():
if key in self.fields:
error = False
if key not in self.data:
self.data[key] = value
elif key == "script_type":
# The deprecated Commissioning API always sets the
# script_type to commissioning as it has always only
# accepted commissioning scripts while the form sets
# the default type to testing. If the YAML matches the
# type allow it.
try:
if translate_script_type(
value
) != translate_script_type(self.data[key]):
error = True
except ValidationError:
error = True
elif value != self.data[key]:
# Only allow form data for fields defined in the YAML if
# the data matches.
error = True
if error:
set_form_error(
self,
key,
"May not override values defined in embedded YAML.",
)
def _read_script(self):
"""Read embedded YAML configuration in a script.
Search for supported MAAS script metadata in the script and
read the values. Leading '#' are ignored. If the values are
fields they will be entered in the form.
"""
yaml_delim = re.compile(
r"\s*#\s*-+\s*(Start|End) MAAS (?P<version>\d+\.\d+) "
r"script metadata\s+-+",
re.I,
)
found_version = None
yaml_content = ""
if isinstance(self.data["script"], dict):
if "new_data" in self.data["script"]:
script = self.data["script"]["new_data"]
else:
script = self.data["script"]["data"]
else:
script = self.data["script"]
script_splitlines = script.splitlines()
if len(script_splitlines) >= 1 and not script_splitlines[0].startswith(
"#!/"
):
set_form_error(self, "script", "Must start with shebang.")
for line in script_splitlines[1:]:
m = yaml_delim.search(line)
if m is not None:
if found_version is None and m.group("version") == "1.0":
# Found the start of the embedded YAML
found_version = m.group("version")
continue
elif found_version == m.group("version"):
# Found the end of the embedded YAML
break
elif found_version is not None and line.strip() != "":
# Capture all lines inbetween the deliminator
if "#" not in line:
set_form_error(self, "script", 'Missing "#" on YAML line.')
return
yaml_content += "%s\n" % line.split("#", 1)[1]
try:
parsed_yaml = yaml.safe_load(yaml_content)
except yaml.YAMLError as err:
set_form_error(self, "script", "Invalid YAML: %s" % err)
return
if not isinstance(parsed_yaml, dict):
return
self.instance.results = parsed_yaml.pop("results", {})
self.instance.parameters = parsed_yaml.pop("parameters", {})
self._clean_script(parsed_yaml)
def clean_packages(self):
if self.cleaned_data["packages"] == "":
return self.instance.packages
else:
packages = json.loads(self.cleaned_data["packages"])
# Automatically convert into a list incase only one package is
# needed.
for key in ["apt", "snap", "url"]:
if key in packages and not isinstance(packages[key], list):
packages[key] = [packages[key]]
for key in ["apt", "url"]:
if key in packages:
for package in packages[key]:
if not isinstance(package, str):
set_form_error(
self,
"packages",
"Each %s package must be a string." % key,
)
if "snap" in packages:
for package in packages["snap"]:
if isinstance(package, dict):
if "name" not in package or not isinstance(
package["name"], str
):
set_form_error(
self,
"packages",
"Snap package name must be defined.",
)
if "channel" in package and package["channel"] not in [
"stable",
"edge",
"beta",
"candidate",
]:
set_form_error(
self,
"packages",
"Snap channel must be stable, edge, beta, "
"or candidate.",
)
if "mode" in package and package["mode"] not in [
"classic",
"dev",
"jail",
]:
set_form_error(
self,
"packages",
"Snap mode must be classic, dev, or jail.",
)
elif not isinstance(package, str):
set_form_error(
self, "packages", "Snap package must be a string."
)
return packages
def clean_for_hardware(self):
"""Convert from JSON and validate for_hardware input."""
if self.cleaned_data["for_hardware"] == "":
return self.instance.for_hardware
try:
for_hardware = json.loads(self.cleaned_data["for_hardware"])
except JSONDecodeError:
for_hardware = self.cleaned_data["for_hardware"]
if isinstance(for_hardware, str):
for_hardware = for_hardware.split(",")
if not isinstance(for_hardware, list):
set_form_error(self, "for_hardware", "Must be a list or string")
return
regex = re.compile(
r"^modalias:.+|pci:[\da-f]{4}:[\da-f]{4}|"
r"usb:[\da-f]{4}:[\da-f]{4}|"
r"system_vendor:.*|"
r"system_product:.*|"
r"system_version:.*|"
r"mainboard_vendor:.*|"
r"mainboard_product:.*$",
re.I,
)
for hw_id in for_hardware:
if regex.search(hw_id) is None:
set_form_error(
self,
"for_hardware",
"Hardware identifier '%s' must be a modalias, PCI ID, "
"USB ID, system vendor, system product, system version, "
"mainboard vendor, or mainboard product." % hw_id,
)
return for_hardware
def clean(self):
cleaned_data = super().clean()
# If a field wasn't passed in keep the old values when updating.
if self.instance.id is not None:
for field in self._meta.fields:
if field not in self.data:
cleaned_data[field] = getattr(self.instance, field)
script_type = cleaned_data["script_type"]
if script_type == "":
cleaned_data["script_type"] = self.instance.script_type
else:
try:
cleaned_data["script_type"] = translate_script_type(
script_type
)
except ValidationError as e:
set_form_error(self, "script_type", e)
hardware_type = cleaned_data["hardware_type"]
if hardware_type == "":
cleaned_data["hardware_type"] = self.instance.hardware_type
else:
try:
cleaned_data["hardware_type"] = translate_hardware_type(
hardware_type
)
except ValidationError as e:
set_form_error(self, "hardware_type", e)
parallel = cleaned_data["parallel"]
if parallel == "":
cleaned_data["parallel"] = self.instance.parallel
else:
try:
cleaned_data["parallel"] = translate_script_parallel(parallel)
except ValidationError as e:
set_form_error(self, "parallel", e)
return cleaned_data
def is_valid(self):
valid = super().is_valid()
if valid and self.instance.default and not self.edit_default:
for field in self.Meta.fields:
if field in ["tags", "timeout"]:
continue
if field in self.data:
set_form_error(
self,
field,
"Not allowed to change on default scripts.",
)
valid = False
name = self.data.get("name")
# none is used to tell the API to not run testing_scripts during
# commissioning.
if name is not None and name.lower() == "none":
set_form_error(self, "name", '"none" is a reserved name.')
valid = False
# The name can't be a digit as MAAS allows scripts to be selected by
# id.
if name is not None and name.isdigit():
set_form_error(self, "name", "Cannot be a number.")
valid = False
if name is not None and pipes.quote(name) != name:
set_form_error(
self,
"name",
"Name '%s' contains disallowed characters, e.g. space or quotes."
% name,
)
valid = False
# If comment and script exist __init__ combines both fields into a dict
# to pass to VersionedTextFileField.
if "comment" in self.data:
set_form_error(
self,
"comment",
'"comment" may only be used when specifying a "script" '
"as well.",
)
valid = False
if "script" in self.data:
if not self._validate_results(self.instance.results):
valid = False
if "parameters" in self.data:
params_form = ParametersForm(data=self.data.get("parameters"))
if not params_form.is_valid():
valid = False
if (
not valid
and self.instance.script_id is not None
and self.initial.get("script") != self.instance.script_id
and self.instance.script.id is not None
):
# If form validation failed cleanup any new VersionedTextFile
# created by the VersionedTextFileField.
self.instance.script.delete()
return valid
def save(self, *args, **kwargs):
request = kwargs.pop("request", None)
endpoint = kwargs.pop("endpoint", None)
script = super().save(*args, **kwargs)
# Create audit event log if endpoint and request supplied.
if request is not None and endpoint is not None:
create_audit_event(
EVENT_TYPES.SETTINGS,
endpoint,
request,
None,
description="Saved script '%s'." % script.name,
)
return script
class CommissioningScriptForm(Form):
"""CommissioningScriptForm for the UI
The CommissioningScriptForm accepts a commissioning script from the
settings page in the UI. This form handles accepting the file upload
and setting the script_type to commissioning if no script_script is
set in the embedded YAML. The ScriptForm above validates the script
itself.
"""
content = FileField(label="Commissioning script", allow_empty_file=False)
def __init__(self, instance=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._form = None
def clean_content(self):
content = self.cleaned_data["content"]
script_name = content.name
script_content = content.read().decode()
try:
script = Script.objects.get(name=script_name)
except Script.DoesNotExist:
form = ScriptForm(data={"script": script_content})
# If the form isn't valid due to the name it may be because the
# embedded YAML doesn't define a name. Try again defining it.
if not form.is_valid() and "name" in form.errors:
form = ScriptForm(
data={"name": script_name, "script": script_content}
)
else:
form = ScriptForm(data={"script": script_content}, instance=script)
self._form = form
return content
def is_valid(self):
valid = super().is_valid()
# If content is empty self.clean_content isn't run.
if self._form is not None and not self._form.is_valid():
# This form only has content so all errors must be on that field.
if "content" not in self.errors:
self.errors["content"] = []
for key, errors in self._form.errors.items():
for error in errors:
self.errors["content"].append("%s: %s" % (key, error))
return False
else:
return valid
def save(self, request, *args, **kwargs):
script = self._form.save(
*args,
**kwargs,
commit=False,
request=request,
endpoint=ENDPOINT.UI
)
# If the embedded script data did not set a script type,
# set it to commissioning.
if "script_type" not in self._form.data:
script.script_type = SCRIPT_TYPE.COMMISSIONING
script.save()
class TestingScriptForm(Form):
"""TestingScriptForm for the UI
The TestingScriptForm accepts a test script from the
settings page in the UI. This form handles accepting the file upload
and setting the script_type to test if no script_script is
set in the embedded YAML. The ScriptForm above validates the script
itself.
"""
content = FileField(label="Test script", allow_empty_file=False)
def __init__(self, instance=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._form = None
def clean_content(self):
content = self.cleaned_data["content"]
script_name = content.name
script_content = content.read().decode()
try:
script = Script.objects.get(name=script_name)
except Script.DoesNotExist:
form = ScriptForm(data={"script": script_content})
# If the form isn't valid due to the name it may be because the
# embedded YAML doesn't define a name. Try again defining it.
if not form.is_valid() and "name" in form.errors:
form = ScriptForm(
data={"name": script_name, "script": script_content}
)
else:
form = ScriptForm(data={"script": script_content}, instance=script)
self._form = form
return content
def is_valid(self):
valid = super().is_valid()
# If content is empty self.clean_content isn't run.
if self._form is not None and not self._form.is_valid():
# This form only has content so all errors must be on that field.
if "content" not in self.errors:
self.errors["content"] = []
for key, errors in self._form.errors.items():
for error in errors:
self.errors["content"].append("%s: %s" % (key, error))
return False
else:
return valid
def save(self, request, *args, **kwargs):
script = self._form.save(
*args,
**kwargs,
commit=False,
request=request,
endpoint=ENDPOINT.UI
)
# If the embedded script data did not set a script type,
# set it to testing.
if "script_type" not in self._form.data:
script.script_type = SCRIPT_TYPE.TESTING
script.save()
| agpl-3.0 | -8,405,805,228,826,134,000 | 35.124633 | 81 | 0.507367 | false |
eskwire/evesrp | evesrp/migrate/versions/5795c29b2c7a_.py | 2 | 2180 | """Change RelativeModifier.value to a Numeric/Decimal
Revision ID: 5795c29b2c7a
Revises: 19506187e7aa
Create Date: 2014-07-23 14:43:45.748696
"""
# revision identifiers, used by Alembic.
revision = '5795c29b2c7a'
down_revision = '19506187e7aa'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
from decimal import Decimal
def upgrade():
relative_modifier = table('relative_modifier',
column('id', sa.Integer),
column('value', sa.Float),
column('numeric_value', sa.Numeric(precision=8, scale=5)))
op.add_column('relative_modifier',
sa.Column('numeric_value', sa.Numeric(precision=8, scale=5)))
conn = op.get_bind()
sel = select([relative_modifier.c.id, relative_modifier.c.value])
results = conn.execute(sel)
q = Decimal(10) ** -5
for id_, float_value in results:
decimal_value = Decimal(float_value).quantize(q)
up = update(relative_modifier).where(relative_modifier.c.id == id_)\
.values({'numeric_value': decimal_value})
conn.execute(up)
op.drop_column('relative_modifier', 'value')
op.alter_column('relative_modifier', 'numeric_value', nullable=True,
new_column_name='value', existing_type=sa.Numeric(precision=8,
scale=5))
def downgrade():
relative_modifier = table('relative_modifier',
column('id', sa.Integer),
column('value', sa.Numeric(precision=8, scale=5)),
column('float_value', sa.Float))
op.add_column('relative_modifier', sa.Column('float_value', sa.Float))
conn = op.get_bind()
sel = select([relative_modifier.c.id, relative_modifier.c.value])
results = conn.execute(sel)
for id_, decimal_value in results:
float_value = float(decimal_value)
up = update(relative_modifier).where(relative_modifier.c.id == id_)\
.values({'float_value': float_value})
conn.execute(up)
op.drop_column('relative_modifier', 'value')
op.alter_column('relative_modifier', 'float_value', nullable=True,
new_column_name='value', existing_type=sa.Float)
| bsd-2-clause | -8,314,331,209,944,756,000 | 37.245614 | 76 | 0.647706 | false |
jakobkolb/MayaSim | Experiments/mayasim_X0_old_video.py | 1 | 7670 | """
This experiment is dedicated to look at single runs with different parameters
for ecosystem and agriculture benefits and make videos from them to grasp what
actually happens.
In addition, I would like to plot some macro trajectories of these runs and
compare them to see, if there are some more signatures, that can be discovered.
One result of these experiments is (so far) that settlements rarely die back.
Population peaks, but settlement number stays at its peak values.
"""
import getpass
import glob
import itertools as it
import numpy as np
import os
import shutil
import sys
import imageio
import pandas as pd
from Python.pymofa import experiment_handling as eh
from Python.MayaSim.model import Model
from Python.MayaSim.model import Parameters
from Python.MayaSim.visuals.custom_visuals import SnapshotVisuals as Visuals
def progress(count, total, suffix=''):
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '#' * filled_len + '-' * (bar_len - filled_len)
sys.stdout.write('[%s] %s%s ...%s\r' % (bar, percents, '%', suffix))
sys.stdout.flush() # As suggested by Rom Ruben
# Experiment with ecosystem benefits calculated as sum over cells in influence
def RUN_FUNC(r_eco, r_agg, mode, N, t_max, filename):
"""
run function sets up experiment for different parameters of ecosysten and
agriculture income. Each experiment saves snapshots of its state (in terms
of images) to the folder named 'filename'
These images are collected afterwards.
Parameters
----------
r_eco : float
weight of ecosystem income in gdp/capita
r_agg : float
weight of agriculture income in gdp/capita
mode : string
one of ['sum', 'mean'] sets the mode for income calculation
from land use
N : int
number of initial settlements
t_max : int
number of time steps to run the model
filename : string, path like
path that is used to save the model output
Returns
-------
"""
if os.path.exists(filename):
shutil.rmtree(filename)
os.makedirs(filename)
location = filename
m = Model(n=N, output_data_location=location, interactive_output=True)
m.output_level = 'trajectory'
m.population_control = False
m.eco_income_mode = mode
m.crop_income_mode = mode
m.r_es_sum = r_eco
m.r_bca_sum = r_agg
m.precipitation_modulation = True
res = {}
res["initials"] = pd.DataFrame({"Settlement X Possitions":
m.settlement_positions[0],
"Settlement Y Possitions":
m.settlement_positions[1],
"Population": m.population})
res["Parameters"] = pd.Series({key:
getattr(m, key)
for key in dir(Parameters)
if not key.startswith('__')
and not callable(key)})
# Saves a series of image_{:03d}.jpg images of the run to this location
# as well as the trajectory of macro quantities
m.run(t_max)
return 1
# define evaluation function for video assembly
def resaving(handle):
def make_video(filename):
init_data = np.load(filename[0] + '/init_frame.pkl')
init_data['location'] = SAVE_PATH_RAW
vis = Visuals(**init_data)
filenames = np.sort(glob.glob(filename[0] + '/frame*'))
writer = imageio.get_writer(filename[0] + '/run.mp4', fps=10)
for i, f in enumerate(filenames):
fig = vis.update_plots(**np.load(f))
fig.set_size_inches([944 / fig.dpi, 672 / fig.dpi])
fig.canvas.draw()
data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8,
sep='')
data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,))
writer.append_data(data)
progress(i, len(filenames))
writer.close()
return open(filename[0] + '/run.mp4').read()
EVA1 = {"video":
make_video}
EVA2 = {"trajectory":
lambda fnames: pd.concat([np.load(f + '/trajectory.pkl')
for f in fnames])}
handle.resave(EVA2, NAME2)
handle.resave(EVA1, NAME1)
# check which machine we are on and set paths accordingly
if getpass.getuser() == "kolb":
SAVE_PATH_RAW = "/p/tmp/kolb/Mayasim/output_data/X6"
SAVE_PATH_RES = "/home/kolb/Mayasim/output_data/X6"
elif getpass.getuser() == "jakob":
SAVE_PATH_RAW = "/home/jakob/PhD/Project_MayaSim/Python/output_data/raw/X6"
SAVE_PATH_RES = "/home/jakob/PhD/Project_MayaSim/Python/output_data/X6"
else:
SAVE_PATH_RAW = "./RAW"
SAVE_PATH_RES = "./RES"
print(SAVE_PATH_RAW)
print(SAVE_PATH_RES)
# get parameters from command line and set sub experiment
# and testing mode accordingly
if len(sys.argv) > 1:
sub_experiment = int(sys.argv[1])
else:
sub_experiment = 0
if len(sys.argv) > 2:
testing = bool(sys.argv[2])
else:
testing = False
# set experiment parameters
N, tmax, r_eco, r_bca = [30], [500], [0.2], [0.0002]
r_bca_m, r_eco_m = [1.1], [10.]
r_bcas = [0.1, 0.15, 0.2, 0.25, 0.3]
r_ecos = [0.0001, 0.00015, 0.0002, 0.00025]
# set up experiment specific paths and parameter combinations
if testing:
tmax, r_bcas, r_ecos = [10], [0.2], [.0002]
SAVE_PATH_RAW += '_test'
SAVE_PATH_RES += '_test'
if sub_experiment == 0:
PARAM_COMBS = list(it.product(r_ecos, r_bcas, ['sum'], N, tmax))
SAVE_PATH_RAW += '_eco_sum'
SAVE_PATH_RES += '_eco_sum'
elif sub_experiment == 1:
PARAM_COMBS = list(it.product(r_eco_m, r_bca_m, ['mean'], N, tmax))
SAVE_PATH_RAW += '_eco_mean'
SAVE_PATH_RES += '_eco_mean'
NAME1 = "video"
NAME2 = "trajectory"
INDEX = {0: "r_eco", 1: "r_bca"}
SAMPLE_SIZE = 1
# make folders, run experiment and data collection
if sub_experiment in [0, 1]:
for path in [SAVE_PATH_RES, SAVE_PATH_RAW]:
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError:
print(path, ' already existed')
path += "/"
handle = eh.experiment_handling(SAMPLE_SIZE,
PARAM_COMBS,
INDEX, SAVE_PATH_RAW,
SAVE_PATH_RES)
#handle.compute(RUN_FUNC)
resaving(handle)
# unpack video collection and save in separate files,
# analyse trajectory data.
elif sub_experiment == 2:
for experiment in ['_eco_sum', '_eco_mean']:
data_found = False
loc = SAVE_PATH_RES + experiment + '/' + NAME1
try:
data = np.load(loc)
data_found = True
except IOError:
print(loc + ' does not exist')
if data_found:
names = data.index.names
values = data.index.values
for index in values:
filename = '{}={}_{}={}.mp4'.format(names[0], index[0],
names[1], index[1])
with open(SAVE_PATH_RES + experiment
+ '/' + filename, 'wb') as of:
of.write(data.xs(index)[0])
data_found = False
loc = SAVE_PATH_RES + experiment + '/' + NAME2
try:
data = np.load(loc)
data_found = True
except IOError:
print(loc + ' does not exist')
if data_found:
print data
| gpl-3.0 | -1,500,722,050,968,249,300 | 29.68 | 79 | 0.580443 | false |
thismachinechills/cheapbook | send.py | 1 | 1217 | from email.message import EmailMessage
from smtplib import SMTP
from base import SENDER_EMAIL, EMAIL_USERNAME, SMTP_SERVER, EMAIL_PASSWORD, SMTP_PORT, MacBook, RECEIVER_EMAIL
def create_msg(recv: str, subject: str, body: str, sender: str=SENDER_EMAIL) -> EmailMessage:
msg = EmailMessage()
msg['To'] = recv
msg['From'] = sender
msg['Subject'] = subject
msg.set_content(body)
return msg
def macbook_msg(macbook: MacBook,
recv: str=RECEIVER_EMAIL,
sender: str=SENDER_EMAIL) -> EmailMessage:
title = "[%s] %s" % (macbook.price, macbook.title)
body = "%s\n%s" % (macbook.link, macbook.specs)
return create_msg(recv, title, body, sender)
def send_msg_ssl(msg: EmailMessage, username: str=EMAIL_USERNAME,
passwd: str=EMAIL_PASSWORD, server: str=SMTP_SERVER,
port: int=SMTP_PORT):
with SMTP(server, port) as smtp_server:
smtp_server.ehlo()
smtp_server.starttls()
smtp_server.login(username, passwd)
smtp_server.send_message(msg)
def send_macbook_msg(macbook: MacBook) -> None:
print("-> Sending", macbook)
send_msg_ssl(macbook_msg(macbook))
print("-> Sent", macbook)
| agpl-3.0 | -446,865,695,134,209,000 | 28.682927 | 110 | 0.64585 | false |
vertix/deep_rl | game2048.py | 1 | 3288 | import numpy as np
import matplotlib.pyplot as plt
from matplotlib import colors
import random
import sys
import theano
import base
class Game2048(base.Environment):
def __init__(self, seed=None):
self._state = np.zeros([4, 4], dtype=theano.config.floatX)
# Set up the start position
self._state[1, 1] = 1
self._state[2, 2] = 1
if seed is not None:
random.seed(seed)
def num_of_actions(self):
return 4
def GetState(self):
return self._state
@staticmethod
def _StackRow(row):
"""Takes the row and joins it from higher indexes to lower"""
clean_row = row[row != 0]
result = []
i = 0
reward = 0
while i < clean_row.size:
if (i == clean_row.size - 1) or clean_row[i] != clean_row[i + 1]:
# No joining happens
result.append(clean_row[i])
i += 1
else: # Two consequtive blocks join
result.append(clean_row[i] + 1)
reward = 2. ** (clean_row[i] + 1)
i += 2
return np.array(result + [0] * (4 - len(result)), dtype=row.dtype), reward
def ProcessAction(self, action):
"""Performs one step given selected action. Returns step reward."""
if action < 0 or action > 3:
return
reward = 0.
if action == 0: # up
for i in range(4):
self._state[:, i], rew = Game2048._StackRow(self._state[:, i])
reward += rew
elif action == 1: # down
for i in range(4):
self._state[::-1, i], rew = Game2048._StackRow(self._state[::-1, i])
reward += rew
elif action == 2: # left
for i in range(4):
self._state[i, :], rew = Game2048._StackRow(self._state[i, :])
reward += rew
elif action == 3: # right
for i in range(4):
self._state[i, ::-1], rew = Game2048._StackRow(self._state[i, ::-1])
reward += rew
else:
return 0.
empty_cells = []
for x in range(4):
for y in range(4):
if self._state[x, y] == 0:
empty_cells.append((x, y))
if not empty_cells:
self._state = None # Terminal state
else:
cell = random.choice(empty_cells)
self._state[cell] = random.choice([1, 1, 1, 1, 1, 1, 1, 1, 1, 2])
print self._state, reward
return reward
if __name__ == "__main__":
game = Game2048()
state = game.GetState()
fig = plt.figure()
ax = fig.add_subplot(111)
cmap = colors.ListedColormap(['black', 'red', 'green', 'blue', 'yellow',
'orange', 'lime', 'white'])
bounds = [-0.5, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5]
norm = colors.BoundaryNorm(bounds, cmap.N)
window = ax.imshow(state, cmap=cmap, norm=norm, interpolation='none')
def OnKeyPress(event, env):
action = 0
if event.key == 'up':
action = 0
elif event.key == 'down':
action = 1
elif event.key == 'left':
action = 2
elif event.key == 'right':
action = 3
elif event.key == 'q':
sys.exit()
else:
return
print 'Action %d' % action
env.ProcessAction(action)
window.set_data(env.GetState())
fig.canvas.draw()
cid = fig.canvas.mpl_connect('key_press_event',
lambda e: OnKeyPress(e, game))
plt.show()
| mit | -2,232,923,184,935,911,200 | 25.95082 | 78 | 0.545012 | false |
formiano/enigma2-4.4 | lib/python/Components/TimerList.py | 6 | 6715 | from enigma import eListboxPythonMultiContent, eListbox, gFont, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_VALIGN_TOP, RT_VALIGN_BOTTOM, getDesktop
from HTMLComponent import HTMLComponent
from GUIComponent import GUIComponent
from Tools.FuzzyDate import FuzzyTime
from Tools.LoadPixmap import LoadPixmap
from timer import TimerEntry
from Tools.Directories import resolveFilename, SCOPE_ACTIVE_SKIN
class TimerList(HTMLComponent, GUIComponent, object):
#
# | <Name of the Timer> <Service> <orb.pos>|
# | <state> <start, end> |
#
def buildTimerEntry(self, timer, processed):
screenwidth = getDesktop(0).size().width()
height = self.l.getItemSize().height()
width = self.l.getItemSize().width()
res = [ None ]
x = (2*width) // 3
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 50, 2, x-24, 35, 3, RT_HALIGN_LEFT|RT_VALIGN_TOP, timer.name))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 26, 2, x-24, 25, 1, RT_HALIGN_LEFT|RT_VALIGN_TOP, timer.name))
text = ("%s %s") % (timer.service_ref.getServiceName(), self.getOrbitalPos(timer.service_ref))
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, 0, width-x-2, 35, 3, RT_HALIGN_RIGHT|RT_VALIGN_TOP, text))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, 0, width-x-2, 25, 0, RT_HALIGN_RIGHT|RT_VALIGN_TOP, text))
days = ( _("Mon"), _("Tue"), _("Wed"), _("Thu"), _("Fri"), _("Sat"), _("Sun") )
begin = FuzzyTime(timer.begin)
if timer.repeated:
repeatedtext = []
flags = timer.repeated
for x in (0, 1, 2, 3, 4, 5, 6):
if flags & 1 == 1:
repeatedtext.append(days[x])
flags >>= 1
repeatedtext = ", ".join(repeatedtext)
if self.iconRepeat:
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 5, 3, 30, 30, self.iconRepeat))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 2, 2, 20, 20, self.iconRepeat))
else:
repeatedtext = begin[0] # date
if timer.justplay:
text = repeatedtext + ((" %s "+ _("(ZAP)")) % (begin[1]))
else:
text = repeatedtext + ((" %s ... %s (%d " + _("mins") + ")") % (begin[1], FuzzyTime(timer.end)[1], (timer.end - timer.begin) / 60))
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 148, 37, width-150, 35, 3, RT_HALIGN_RIGHT|RT_VALIGN_BOTTOM, text))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 148, 24, width-150, 25, 1, RT_HALIGN_RIGHT|RT_VALIGN_BOTTOM, text))
icon = None
if not processed:
if timer.state == TimerEntry.StateWaiting:
state = _("waiting")
icon = self.iconWait
elif timer.state == TimerEntry.StatePrepared:
state = _("about to start")
icon = self.iconPrepared
elif timer.state == TimerEntry.StateRunning:
if timer.justplay:
state = _("zapped")
icon = self.iconZapped
else:
state = _("recording...")
icon = self.iconRecording
elif timer.state == TimerEntry.StateEnded:
state = _("done!")
icon = self.iconDone
else:
state = _("<unknown>")
icon = None
else:
state = _("done!")
icon = self.iconDone
if timer.disabled:
state = _("disabled")
icon = self.iconDisabled
if timer.failed:
state = _("failed")
icon = self.iconFailed
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 50, 40, 150, 35, 3, RT_HALIGN_LEFT|RT_VALIGN_TOP, state))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 26, 24, 90, 20, 1, RT_HALIGN_LEFT|RT_VALIGN_TOP, state))
if icon:
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 5, 40, 30, 30, icon))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 2, 25, 20, 20, icon))
if timer.isAutoTimer:
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 5, 3, 30, 30, self.iconAutoTimer))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 2, 2, 20, 20, self.iconAutoTimer))
line = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "div-h.png"))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 0, height-2, width, 2, line))
return res
def __init__(self, list):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.l.setBuildFunc(self.buildTimerEntry)
self.l.setFont(0, gFont("Regular", 20))
self.l.setFont(1, gFont("Regular", 18))
self.l.setFont(2, gFont("Regular", 16))
self.l.setFont(3, gFont("Regular", 28))
self.l.setFont(4, gFont("Regular", 28))
self.l.setFont(5, gFont("Regular", 24))
self.l.setItemHeight(50)
self.l.setList(list)
self.iconWait = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_wait.png"))
self.iconRecording = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_rec.png"))
self.iconPrepared = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_prep.png"))
self.iconDone = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_done.png"))
self.iconRepeat = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_rep.png"))
self.iconZapped = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_zap.png"))
self.iconDisabled = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_off.png"))
self.iconFailed = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_failed.png"))
self.iconAutoTimer = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/timer_autotimer.png"))
def getCurrent(self):
cur = self.l.getCurrentSelection()
return cur and cur[0]
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
instance.setContent(self.l)
self.instance = instance
instance.setWrapAround(True)
def moveToIndex(self, index):
self.instance.moveSelectionTo(index)
def getCurrentIndex(self):
return self.instance.getCurrentIndex()
currentIndex = property(getCurrentIndex, moveToIndex)
currentSelection = property(getCurrent)
def moveDown(self):
self.instance.moveSelection(self.instance.moveDown)
def invalidate(self):
self.l.invalidate()
def entryRemoved(self, idx):
self.l.entryRemoved(idx)
def getOrbitalPos(self, ref):
refstr = None
if hasattr(ref, 'sref'):
refstr = str(ref.sref)
else:
refstr = str(ref)
if '%3a//' in refstr:
return "%s" % _("Stream")
op = int(refstr.split(':', 10)[6][:-4] or "0",16)
if op == 0xeeee:
return "%s" % _("DVB-T")
if op == 0xffff:
return "%s" % _("DVB_C")
direction = 'E'
if op > 1800:
op = 3600 - op
direction = 'W'
return ("%d.%d\xc2\xb0%s") % (op // 10, op % 10, direction)
| gpl-2.0 | -493,889,607,817,011,000 | 36.305556 | 140 | 0.689799 | false |
vityurkiv/Ox | modules/tensor_mechanics/tests/drucker_prager/small_deform3.py | 23 | 3585 | #!/usr/bin/env python
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
def expected(scheme, sqrtj2):
cohesion = 10
friction_degrees = 35
tip_smoother = 8
friction = friction_degrees * np.pi / 180.0
if (scheme == "native"):
aaa = cohesion
bbb = np.tan(friction)
elif (scheme == "outer_tip"):
aaa = 2 * np.sqrt(3) * cohesion * np.cos(friction) / (3.0 - np.sin(friction))
bbb = 2 * np.sin(friction) / np.sqrt(3) / (3.0 - np.sin(friction))
elif (scheme == "inner_tip"):
aaa = 2 * np.sqrt(3) * cohesion * np.cos(friction) / (3.0 + np.sin(friction))
bbb = 2 * np.sin(friction) / np.sqrt(3) / (3.0 + np.sin(friction))
elif (scheme == "lode_zero"):
aaa = cohesion * np.cos(friction)
bbb = np.sin(friction) / 3.0
elif (scheme == "inner_edge"):
aaa = 3 * cohesion * np.cos(friction) / np.sqrt(9.0 + 3.0 * np.power(np.sin(friction), 2))
bbb = np.sin(friction) / np.sqrt(9.0 + 3.0 * np.power(np.sin(friction), 2))
return (aaa - np.sqrt(tip_smoother * tip_smoother + sqrtj2 * sqrtj2)) / bbb
def sigma_mean(stress):
return (stress[0] + stress[3] + stress[5])/3.0
def sigma_bar(stress):
mean = sigma_mean(stress)
return np.sqrt(0.5 * (np.power(stress[0] - mean, 2) + 2*stress[1]*stress[1] + 2*stress[2]*stress[2] + np.power(stress[3] - mean, 2) + 2*stress[4]*stress[4] + np.power(stress[5] - mean, 2)))
def third_inv(stress):
mean = sigma_mean(stress)
return (stress[0] - mean)*(stress[3] - mean)*(stress[5] - mean)
def lode_angle(stress):
bar = sigma_bar(stress)
third = third_inv(stress)
return np.arcsin(-1.5 * np.sqrt(3.0) * third / np.power(bar, 3)) / 3.0
def moose_result(fn):
f = open(fn)
x = []
y = []
for line in f:
if not line.strip():
continue
line = line.strip()
if line.startswith("time") or line.startswith("0"):
continue
line = map(float, line.split(","))
if line[1] < -1E-10:
continue # this is an elastic deformation
trace = 3.0 * sigma_mean(line[3:])
bar = sigma_bar(line[3:])
x.append(trace)
y.append(bar)
f.close()
return (x, y)
plt.figure()
sqrtj2 = np.arange(0, 30, 0.25)
plt.plot(expected("native", sqrtj2), sqrtj2, 'k-', label = 'expected (native)')
mr = moose_result("gold/small_deform3_native.csv")
plt.plot(mr[0], mr[1], 'k^', label = 'MOOSE (native)')
plt.plot(expected("outer_tip", sqrtj2), sqrtj2, 'g-', label = 'expected (outer_tip)')
mr = moose_result("gold/small_deform3_outer_tip.csv")
plt.plot(mr[0], mr[1], 'g^', label = 'MOOSE (outer_tip)')
plt.plot(expected("inner_tip", sqrtj2), sqrtj2, 'b-', label = 'expected (inner_tip)')
mr = moose_result("gold/small_deform3_inner_tip.csv")
plt.plot(mr[0], mr[1], 'b^', label = 'MOOSE (inner_tip)')
plt.plot(expected("lode_zero", sqrtj2), sqrtj2, 'c-', label = 'expected (lode_zero)')
mr = moose_result("gold/small_deform3_lode_zero.csv")
plt.plot(mr[0], mr[1], 'c^', label = 'MOOSE (lode_zero)')
plt.plot(expected("inner_edge", sqrtj2), sqrtj2, 'r-', label = 'expected (inner_edge)')
mr = moose_result("gold/small_deform3_inner_edge.csv")
plt.plot(mr[0], mr[1], 'r^', label = 'MOOSE (inner_edge)')
legend = plt.legend(bbox_to_anchor=(1.16, 0.95))
for label in legend.get_texts():
label.set_fontsize('small')
plt.xlabel("Tr(stress)")
plt.ylabel("sqrt(J2)")
plt.title("Drucker-Prager yield function on meridional plane")
plt.axis([-25, 15, 0, 25])
plt.savefig("small_deform3.png")
sys.exit(0)
| lgpl-2.1 | -6,270,647,653,232,130,000 | 33.805825 | 193 | 0.600279 | false |
ghickman/django | tests/model_inheritance/tests.py | 19 | 17422 | from __future__ import unicode_literals
from operator import attrgetter
from django.core.exceptions import FieldError, ValidationError
from django.core.management import call_command
from django.db import connection
from django.test import TestCase, TransactionTestCase
from django.test.utils import CaptureQueriesContext
from django.utils import six
from .models import (
Base, Chef, CommonInfo, Copy, GrandChild, GrandParent, ItalianRestaurant,
MixinModel, ParkingLot, Place, Post, Restaurant, Student, SubBase,
Supplier, Title, Worker,
)
class ModelInheritanceTests(TestCase):
def test_abstract(self):
# The Student and Worker models both have 'name' and 'age' fields on
# them and inherit the __str__() method, just as with normal Python
# subclassing. This is useful if you want to factor out common
# information for programming purposes, but still completely
# independent separate models at the database level.
w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker")
Worker.objects.create(name="Barney", age=34, job="Quarry worker")
s = Student.objects.create(name="Pebbles", age=5, school_class="1B")
self.assertEqual(six.text_type(w1), "Worker Fred")
self.assertEqual(six.text_type(s), "Student Pebbles")
# The children inherit the Meta class of their parents (if they don't
# specify their own).
self.assertQuerysetEqual(
Worker.objects.values("name"), [
{"name": "Barney"},
{"name": "Fred"},
],
lambda o: o
)
# Since Student does not subclass CommonInfo's Meta, it has the effect
# of completely overriding it. So ordering by name doesn't take place
# for Students.
self.assertEqual(Student._meta.ordering, [])
# However, the CommonInfo class cannot be used as a normal model (it
# doesn't exist as a model).
self.assertRaises(AttributeError, lambda: CommonInfo.objects.all())
def test_reverse_relation_for_different_hierarchy_tree(self):
# Even though p.supplier for a Place 'p' (a parent of a Supplier), a
# Restaurant object cannot access that reverse relation, since it's not
# part of the Place-Supplier Hierarchy.
self.assertQuerysetEqual(Place.objects.filter(supplier__name="foo"), [])
self.assertRaises(FieldError, Restaurant.objects.filter, supplier__name="foo")
def test_model_with_distinct_accessors(self):
# The Post model has distinct accessors for the Comment and Link models.
post = Post.objects.create(title="Lorem Ipsum")
post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True)
post.attached_link_set.create(
content="The Web framework for perfections with deadlines.",
url="http://www.djangoproject.com/"
)
# The Post model doesn't have an attribute called
# 'attached_%(class)s_set'.
self.assertRaises(
AttributeError, getattr, post, "attached_%(class)s_set"
)
def test_meta_fields_and_ordering(self):
# Make sure Restaurant and ItalianRestaurant have the right fields in
# the right order.
self.assertEqual(
[f.name for f in Restaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef"]
)
self.assertEqual(
[f.name for f in ItalianRestaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef", "restaurant_ptr", "serves_gnocchi"],
)
self.assertEqual(Restaurant._meta.ordering, ["-rating"])
def test_custompk_m2m(self):
b = Base.objects.create()
b.titles.add(Title.objects.create(title="foof"))
s = SubBase.objects.create(sub_id=b.id)
b = Base.objects.get(pk=s.id)
self.assertNotEqual(b.pk, s.pk)
# Low-level test for related_val
self.assertEqual(s.titles.related_val, (s.id,))
# Higher level test for correct query values (title foof not
# accidentally found).
self.assertQuerysetEqual(s.titles.all(), [])
def test_update_parent_filtering(self):
"""
Test that updating a field of a model subclass doesn't issue an UPDATE
query constrained by an inner query.
Refs #10399
"""
supplier = Supplier.objects.create(
name='Central market',
address='610 some street',
)
# Capture the expected query in a database agnostic way
with CaptureQueriesContext(connection) as captured_queries:
Place.objects.filter(pk=supplier.pk).update(name=supplier.name)
expected_sql = captured_queries[0]['sql']
# Capture the queries executed when a subclassed model instance is saved.
with CaptureQueriesContext(connection) as captured_queries:
supplier.save(update_fields=('name',))
for query in captured_queries:
sql = query['sql']
if 'UPDATE' in sql:
self.assertEqual(expected_sql, sql)
def test_eq(self):
# Equality doesn't transfer in multitable inheritance.
self.assertNotEqual(Place(id=1), Restaurant(id=1))
self.assertNotEqual(Restaurant(id=1), Place(id=1))
def test_mixin_init(self):
m = MixinModel()
self.assertEqual(m.other_attr, 1)
class ModelInheritanceDataTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.restaurant = Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2,
)
chef = Chef.objects.create(name="Albert")
cls.italian_restaurant = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=chef,
)
def test_filter_inherited_model(self):
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_update_inherited_model(self):
self.italian_restaurant.address = "1234 W. Elm"
self.italian_restaurant.save()
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_parent_fields_available_for_filtering_in_child_model(self):
# Parent fields can be used directly in filters on the child model.
self.assertQuerysetEqual(
Restaurant.objects.filter(name="Demon Dogs"), [
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_filter_on_parent_returns_object_of_parent_type(self):
# Filters against the parent model return objects of the parent's type.
p = Place.objects.get(name="Demon Dogs")
self.assertIs(type(p), Place)
def test_parent_child_one_to_one_link(self):
# Since the parent and child are linked by an automatically created
# OneToOneField, you can get from the parent to the child by using the
# child's name.
self.assertEqual(
Place.objects.get(name="Demon Dogs").restaurant,
Restaurant.objects.get(name="Demon Dogs")
)
self.assertEqual(
Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
self.assertEqual(
Restaurant.objects.get(name="Ristorante Miron").italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
def test_parent_child_one_to_one_link_on_nonrelated_objects(self):
# This won't work because the Demon Dogs restaurant is not an Italian
# restaurant.
self.assertRaises(
ItalianRestaurant.DoesNotExist,
lambda: Place.objects.get(name="Demon Dogs").restaurant.italianrestaurant
)
def test_inherited_does_not_exist_exception(self):
# An ItalianRestaurant which does not exist is also a Place which does
# not exist.
self.assertRaises(
Place.DoesNotExist,
ItalianRestaurant.objects.get, name="The Noodle Void"
)
def test_inherited_multiple_objects_returned_exception(self):
# MultipleObjectsReturned is also inherited.
self.assertRaises(
Place.MultipleObjectsReturned,
Restaurant.objects.get, id__lt=12321
)
def test_related_objects_for_inherited_models(self):
# Related objects work just as they normally do.
s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St")
s1.customers .set([self.restaurant, self.italian_restaurant])
s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St")
s2.customers.set([self.italian_restaurant])
# This won't work because the Place we select is not a Restaurant (it's
# a Supplier).
p = Place.objects.get(name="Joe's Chickens")
self.assertRaises(
Restaurant.DoesNotExist, lambda: p.restaurant
)
self.assertEqual(p.supplier, s1)
self.assertQuerysetEqual(
self.italian_restaurant.provider.order_by("-name"), [
"Luigi's Pasta",
"Joe's Chickens"
],
attrgetter("name")
)
self.assertQuerysetEqual(
Restaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
],
attrgetter("name"),
)
ParkingLot.objects.create(
name="Main St", address="111 Main St", main_site=s1
)
ParkingLot.objects.create(
name="Well Lit", address="124 Sesame St", main_site=self.italian_restaurant
)
self.assertEqual(
Restaurant.objects.get(lot__name="Well Lit").name,
"Ristorante Miron"
)
def test_update_works_on_parent_and_child_models_at_once(self):
# The update() command can update fields in parent and child classes at
# once (although it executed multiple SQL queries to do so).
rows = Restaurant.objects.filter(
serves_hot_dogs=True, name__contains="D"
).update(
name="Demon Puppies", serves_hot_dogs=False
)
self.assertEqual(rows, 1)
r1 = Restaurant.objects.get(pk=self.restaurant.pk)
self.assertFalse(r1.serves_hot_dogs)
self.assertEqual(r1.name, "Demon Puppies")
def test_values_works_on_parent_model_fields(self):
# The values() command also works on fields from parent models.
self.assertQuerysetEqual(
ItalianRestaurant.objects.values("name", "rating"), [
{"rating": 4, "name": "Ristorante Miron"},
],
lambda o: o
)
def test_select_related_works_on_parent_model_fields(self):
# select_related works with fields from the parent object as if they
# were a normal part of the model.
self.assertNumQueries(
2, lambda: ItalianRestaurant.objects.all()[0].chef
)
self.assertNumQueries(
1, lambda: ItalianRestaurant.objects.select_related("chef")[0].chef
)
def test_select_related_defer(self):
"""
#23370 - Should be able to defer child fields when using
select_related() from parent to child.
"""
qs = (Restaurant.objects
.select_related("italianrestaurant")
.defer("italianrestaurant__serves_gnocchi")
.order_by("rating"))
# Test that the field was actually deferred
with self.assertNumQueries(2):
objs = list(qs.all())
self.assertTrue(objs[1].italianrestaurant.serves_gnocchi)
# Test that model fields where assigned correct values
self.assertEqual(qs[0].name, 'Demon Dogs')
self.assertEqual(qs[0].rating, 2)
self.assertEqual(qs[1].italianrestaurant.name, 'Ristorante Miron')
self.assertEqual(qs[1].italianrestaurant.rating, 4)
def test_update_query_counts(self):
"""
Test that update queries do not generate non-necessary queries.
Refs #18304.
"""
with self.assertNumQueries(3):
self.italian_restaurant.save()
def test_filter_inherited_on_null(self):
# Refs #12567
Supplier.objects.create(
name="Central market",
address="610 some street",
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=False), [
"Central market",
],
attrgetter("name")
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=True).order_by("name"), [
"Demon Dogs",
"Ristorante Miron",
],
attrgetter("name")
)
def test_exclude_inherited_on_null(self):
# Refs #12567
Supplier.objects.create(
name="Central market",
address="610 some street",
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=False).order_by("name"), [
"Demon Dogs",
"Ristorante Miron",
],
attrgetter("name")
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=True), [
"Central market",
],
attrgetter("name")
)
class InheritanceSameModelNameTests(TransactionTestCase):
available_apps = ['model_inheritance']
def setUp(self):
# The Title model has distinct accessors for both
# model_inheritance.Copy and model_inheritance_same_model_name.Copy
# models.
self.title = Title.objects.create(title='Lorem Ipsum')
def test_inheritance_related_name(self):
self.assertEqual(
self.title.attached_model_inheritance_copy_set.create(
content='Save $ on V1agr@',
url='http://v1agra.com/',
title='V1agra is spam',
), Copy.objects.get(
content='Save $ on V1agr@',
))
def test_inheritance_with_same_model_name(self):
with self.modify_settings(
INSTALLED_APPS={'append': ['model_inheritance.same_model_name']}):
call_command('migrate', verbosity=0, run_syncdb=True)
from .same_model_name.models import Copy
copy = self.title.attached_same_model_name_copy_set.create(
content='The Web framework for perfectionists with deadlines.',
url='http://www.djangoproject.com/',
title='Django Rocks'
)
self.assertEqual(
copy,
Copy.objects.get(
content='The Web framework for perfectionists with deadlines.',
))
# We delete the copy manually so that it doesn't block the flush
# command under Oracle (which does not cascade deletions).
copy.delete()
def test_related_name_attribute_exists(self):
# The Post model doesn't have an attribute called 'attached_%(app_label)s_%(class)s_set'.
self.assertFalse(hasattr(self.title, 'attached_%(app_label)s_%(class)s_set'))
class InheritanceUniqueTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.grand_parent = GrandParent.objects.create(
email='[email protected]',
first_name='grand',
last_name='parent',
)
def test_unique(self):
grand_child = GrandChild(
email=self.grand_parent.email,
first_name='grand',
last_name='child',
)
msg = 'Grand parent with this Email already exists.'
with self.assertRaisesMessage(ValidationError, msg):
grand_child.validate_unique()
def test_unique_together(self):
grand_child = GrandChild(
email='[email protected]',
first_name=self.grand_parent.first_name,
last_name=self.grand_parent.last_name,
)
msg = 'Grand parent with this First name and Last name already exists.'
with self.assertRaisesMessage(ValidationError, msg):
grand_child.validate_unique()
| bsd-3-clause | 3,465,780,177,792,821,000 | 37.20614 | 97 | 0.602744 | false |
mcmaxwell/idea_digital_agency | idea/idea/settings.py | 1 | 6618 | """
Django settings for skelet project.
Generated by 'django-admin startproject' using Django 1.8.14.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.utils.translation import ugettext_lazy as _
import cloudinary
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4$6(ctt7gg0(l((l0qfp==40js#um4)5=vj&4#o+dv)2e4g4wl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = [
'*'
]
#SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
#if DEBUG is True:
# SECURE_SSL_REDIRECT = False
#else:
# SECURE_SSL_REDIRECT = True
#SESSION_COOKIE_SECURE = True
#CSRF_COOKIE_SECURE = True
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#libs
'redactor',
'sorl.thumbnail',
'adminsortable2',
'modeltranslation',
'django_summernote',
# feincms
'feincms',
'mptt',
'feincms.module.page',
# local apps
'common',
'content',
'blog',
'cases',
#'vacansieses',
#'certificate',
#'services',
#'slider',
'info',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'idea.helpers.AdminLocaleMiddleware'
)
ADMIN_LANGUAGE_CODE = 'ru-RU'
ROOT_URLCONF = 'idea.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'idea/templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
'django.template.context_processors.i18n',
'info.context_processors.info',
],
},
},
]
WSGI_APPLICATION = 'idea.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'idea',
'USER': 'admin',
'PASSWORD': '1qw23er4',
'HOST': 'localhost',
'PORT': '',
}
}
SUMMERNOTE_CONFIG = {
'attachment_filesize_limit': 10000000 * 1024,
'width': 800,
'height': 570,
'toolbar': [
['style', ['style']],
['para', ['ul', 'ol', 'paragraph']],
['font', ['bold', 'italic', 'underline',
'clear']],
['height', ['height']],
['insert', ['link', 'picture', 'hr']],
['view', ['fullscreen', 'codeview']],
['help', ['help']],
['insert', ['link']],
],
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'uk'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LANGUAGES = [
('ru', _('Russian')),
('en', _('English')),
('uk', _('Ukrainian')),
]
LOCALE_PATHS = (
os.path.join(BASE_DIR, "locale/"),
)
EMAIL_HOST = 'smtp.yandex.ru'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = '0987007a'
EMAIL_PORT = 465
EMAIL_USE_TLS = False
EMAIL_USE_SSL = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
#STATIC_URL = 'https://s3.eu-west-2.amazonaws.com/idea/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '../builds/static/modules')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media',)
MEDIA_URL = '/media/'
# MODULES OPTIONS
REDACTOR_OPTIONS = {'lang': 'ru'}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue'
}
},
'formatters': {
'main_formatter': {
'format': '%(levelname)s:%(name)s: %(message)s '
'(%(asctime)s; %(filename)s:%(lineno)d)',
'datefmt': "%Y-%m-%d %H:%M:%S",
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'main_formatter',
},
'production_file': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, '../production.log'),
'maxBytes': 1024 * 1024 * 5, # 5 MB
'backupCount': 7,
'formatter': 'main_formatter',
'filters': ['require_debug_false'],
},
'null': {
"class": 'logging.NullHandler',
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'propagate': True,
},
'django': {
'handlers': ['null', ],
},
'py.warnings': {
'handlers': ['null', ],
},
'': {
'handlers': ['console', 'production_file',],
'level': "DEBUG",
},
}
}
try:
from local_settings import *
except ImportError:
pass
| mit | -6,166,163,307,230,685,000 | 23.879699 | 120 | 0.573738 | false |
lowtalker/splunk-sdk-python | tests/test_role.py | 2 | 4105 | #!/usr/bin/env python
#
# Copyright 2011-2014 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testlib
import logging
import splunklib.client as client
class RoleTestCase(testlib.SDKTestCase):
def setUp(self):
super(RoleTestCase, self).setUp()
self.role_name = testlib.tmpname()
self.role = self.service.roles.create(self.role_name)
def tearDown(self):
super(RoleTestCase, self).tearDown()
for role in self.service.roles:
if role.name.startswith('delete-me'):
self.service.roles.delete(role.name)
def check_role(self, role):
self.check_entity(role)
capabilities = role.service.capabilities
for capability in role.content.capabilities:
self.assertTrue(capability in capabilities)
def test_read(self):
for role in self.service.roles:
self.check_role(role)
role.refresh()
self.check_role(role)
def test_read_case_insensitive(self):
for role in self.service.roles:
a = self.service.roles[role.name.upper()]
b = self.service.roles[role.name.lower()]
self.assertEqual(a.name, b.name)
def test_create(self):
self.assertTrue(self.role_name in self.service.roles)
self.check_entity(self.role)
def test_delete(self):
self.assertTrue(self.role_name in self.service.roles)
self.service.roles.delete(self.role_name)
self.assertFalse(self.role_name in self.service.roles)
self.assertRaises(client.HTTPError, self.role.refresh)
def test_grant_and_revoke(self):
self.assertFalse('edit_user' in self.role.capabilities)
self.role.grant('edit_user')
self.role.refresh()
self.assertTrue('edit_user' in self.role.capabilities)
self.assertFalse('change_own_password' in self.role.capabilities)
self.role.grant('change_own_password')
self.role.refresh()
self.assertTrue('edit_user' in self.role.capabilities)
self.assertTrue('change_own_password' in self.role.capabilities)
self.role.revoke('edit_user')
self.role.refresh()
self.assertFalse('edit_user' in self.role.capabilities)
self.assertTrue('change_own_password' in self.role.capabilities)
self.role.revoke('change_own_password')
self.role.refresh()
self.assertFalse('edit_user' in self.role.capabilities)
self.assertFalse('change_own_password' in self.role.capabilities)
def test_invalid_grant(self):
self.assertRaises(client.NoSuchCapability, self.role.grant, 'i-am-an-invalid-capability')
def test_invalid_revoke(self):
self.assertRaises(client.NoSuchCapability, self.role.revoke, 'i-am-an-invalid-capability')
def test_revoke_capability_not_granted(self):
self.role.revoke('change_own_password')
def test_update(self):
kwargs = {}
if 'user' in self.role['imported_roles']:
kwargs['imported_roles'] = ''
else:
kwargs['imported_roles'] = ['user']
if self.role['srchJobsQuota'] is not None:
kwargs['srchJobsQuota'] = int(self.role['srchJobsQuota']) + 1
self.role.update(**kwargs)
self.role.refresh()
self.assertEqual(self.role['imported_roles'], kwargs['imported_roles'])
self.assertEqual(int(self.role['srchJobsQuota']), kwargs['srchJobsQuota'])
if __name__ == "__main__":
try:
import unittest2 as unittest
except ImportError:
import unittest
unittest.main()
| apache-2.0 | 2,148,482,591,509,390,000 | 35.651786 | 98 | 0.661632 | false |
hrayr-artunyan/shuup | shuup/campaigns/utils/sales_range.py | 2 | 1993 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.db.models import Q, Sum
from shuup.core.models import Payment
def get_total_sales(shop, customer):
aggregated_sales = Payment.objects.filter(
order__customer=customer,
order__shop=shop,
).aggregate(total_sales=Sum("amount_value"))
return aggregated_sales["total_sales"] or 0
def assign_to_group_based_on_sales(cls, shop, customer, sales_range=None):
total_sales = get_total_sales(shop, customer)
# Only ranges with sales bigger than min_value
query = Q(min_value__lte=total_sales)
# Ranges with max lower than sales or None
query &= Q(Q(max_value__gt=total_sales) | Q(max_value__isnull=True))
qs = cls.objects.active(shop)
if sales_range:
qs = qs.filter(pk=sales_range.pk)
matching_pks = set(qs.filter(query).values_list("pk", flat=True))
for sales_range in cls.objects.filter(pk__in=matching_pks):
sales_range.group.members.add(customer)
for sales_range in cls.objects.active(shop).exclude(pk__in=matching_pks):
sales_range.group.members.remove(customer)
def get_contacts_in_sales_range(shop, min_value, max_value):
total_sales = Payment.objects.filter(
order__shop=shop,
).values(
"order__customer_id"
).annotate(
total_sales=Sum("amount_value")
)
# Since https://github.com/django/django/commit/3bbaf84d6533fb61ac0038f2bbe52ee0d7b4fd10
# is introduced in Django 1.9a1 we can't filter total sales with min and max value
results = set()
for result in total_sales:
total_sales = result.get("total_sales")
if min_value <= total_sales and (max_value is None or max_value > total_sales):
results.add(result.get("order__customer_id"))
return results
| agpl-3.0 | -7,999,467,114,643,379,000 | 34.589286 | 92 | 0.680883 | false |
google/rekall | rekall-core/rekall/plugins/darwin/misc.py | 1 | 10140 | # Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Miscelaneous information gathering plugins."""
from __future__ import division
from builtins import str
from past.utils import old_div
__author__ = "Michael Cohen <[email protected]>"
import hashlib
import re
from rekall import obj
from rekall.plugins import core
from rekall.plugins.darwin import common
from rekall.plugins.renderers import visual_aides
from rekall_lib import utils
class DarwinDMSG(common.AbstractDarwinCommand):
"""Print the kernel debug messages."""
__name = "dmesg"
def render(self, renderer):
renderer.table_header([
("Message", "message", "<80")])
# This is a circular buffer with the write pointer at the msg_bufx
# member.
msgbuf = self.profile.get_constant_object(
"_msgbufp",
target="Pointer",
target_args=dict(
target="msgbuf"
)
)
# Make sure the buffer is not too large.
size = min(msgbuf.msg_size, 0x400000)
if 0 < msgbuf.msg_bufx < size:
data = self.kernel_address_space.read(msgbuf.msg_bufc, size)
data = data[msgbuf.msg_bufx: size] + data[0:msgbuf.msg_bufx]
data = re.sub(b"\x00", b"", data)
for x in data.splitlines():
renderer.table_row(x)
class DarwinMachineInfo(common.AbstractDarwinCommand):
"""Show information about this machine."""
__name = "machine_info"
def render(self, renderer):
renderer.table_header([("Attribute", "attribute", "20"),
("Value", "value", "10")])
info = self.profile.get_constant_object(
"_machine_info", "machine_info")
for member in info.members:
renderer.table_row(member, info.m(member))
class DarwinMount(common.AbstractDarwinCommand):
"""Show mount points."""
__name = "mount"
def render(self, renderer):
renderer.table_header([
("Device", "device", "30"),
("Mount Point", "mount_point", "60"),
("Type", "type", "")])
mount_list = self.profile.get_constant_object(
"_mountlist", "mount")
for mount in mount_list.walk_list("mnt_list.tqe_next", False):
renderer.table_row(mount.mnt_vfsstat.f_mntonname,
mount.mnt_vfsstat.f_mntfromname,
mount.mnt_vfsstat.f_fstypename)
class DarwinPhysicalMap(common.AbstractDarwinCommand):
"""Prints the EFI boot physical memory map."""
__name = "phys_map"
def render(self, renderer):
renderer.table_header([
("Physical Start", "phys", "[addrpad]"),
("Physical End", "phys", "[addrpad]"),
("Virtual", "virt", "[addrpad]"),
("Pages", "pages", ">10"),
("Type", "type", "")])
boot_params = self.profile.get_constant_object(
"_PE_state", "PE_state").bootArgs
# Code from:
# xnu-1699.26.8/osfmk/i386/AT386/model_dep.c:560
memory_map = self.profile.Array(
boot_params.MemoryMap,
vm=self.physical_address_space,
target="EfiMemoryRange",
target_size=int(boot_params.MemoryMapDescriptorSize),
count=(old_div(boot_params.MemoryMapSize,
boot_params.MemoryMapDescriptorSize)))
runs = []
for memory_range in memory_map:
start = memory_range.PhysicalStart
end = (memory_range.PhysicalStart
+ 0x1000
* memory_range.NumberOfPages)
runs.append(dict(
value=utils.SmartUnicode(memory_range.Type),
start=start, end=end))
renderer.table_row(
start,
end,
memory_range.VirtualStart.cast("Pointer"),
memory_range.NumberOfPages,
memory_range.Type)
# Render a heatmap.
# Automatically lower resolution for large images.
resolution = 0x1000 * 0x10 # 16 pages - conservative start.
column_count = 12
end = runs[-1]["end"]
# Keep it under 200 rows.
while end / resolution / column_count > 200:
resolution *= 2
notes = ("Resolution: %(pages)d pages (%(mb).2f MB) per cell.\n"
"Note that colors of overlapping regions are blended "
"using a weighted average. Letters in cells indicate "
"which regions from the legend are present. They are "
"ordered proportionally, by their respective page "
"counts in each cell.") % dict(pages=old_div(resolution, 0x1000),
mb=old_div(resolution, 1024.0 ** 2))
legend = visual_aides.MapLegend(
notes=notes,
legend=[("Am", "kEfiACPIMemoryNVS", (0x00, 0xff, 0x00)),
("Ar", "kEfiACPIReclaimMemory", (0xc7, 0xff, 0x50)),
("Bc", "kEfiBootServicesCode", (0xff, 0xa5, 0x00)),
("Bd", "kEfiBootServicesData", (0xff, 0x00, 0x00)),
("M", "kEfiConventionalMemory", (0xff, 0xff, 0xff)),
("Ec", "kEfiLoaderCode", (0x00, 0xff, 0xff)),
("Ed", "kEfiLoaderData", (0x00, 0x00, 0xff)),
("I", "kEfiMemoryMappedIO", (0xff, 0xff, 0x00)),
("X", "kEfiReservedMemoryType", (0x00, 0x00, 0x00)),
("Rc", "kEfiRuntimeServicesCode", (0xff, 0x00, 0xff)),
("Rd", "kEfiRuntimeServicesData", (0xff, 0x00, 0x50))])
heatmap = visual_aides.RunBasedMap(
caption="Offset (p)",
legend=legend,
runs=runs,
resolution=resolution,
column_count=column_count)
renderer.table_header([
dict(name="Visual mapping", width=120, style="full"),
dict(name="Legend", orientation="vertical", style="full",
width=40)])
renderer.table_row(heatmap, legend)
class DarwinBootParameters(common.AbstractDarwinCommand):
"""Prints the kernel command line."""
name = "boot_cmdline"
table_header = [
dict(name="cmdline", type="str"),
]
def collect(self):
boot_args = self.profile.get_constant_object(
"_PE_state", "PE_state").bootArgs
yield dict(cmdline=boot_args.CommandLine.cast("String"))
class DarwinSetProcessContext(core.SetProcessContextMixin,
common.ProcessFilterMixin,
common.AbstractDarwinCommand):
"""A cc plugin for windows."""
class DarwinVtoP(core.VtoPMixin, common.ProcessFilterMixin,
common.AbstractDarwinCommand):
"""Describe virtual to physical translation on darwin platforms."""
class DarwinImageFingerprint(common.AbstractDarwinParameterHook):
"""Fingerprint the current image.
This parameter tries to get something unique about the image quickly. The
idea is that two different images (even of the same system at different
points in time) will have very different fingerprints. The fingerprint is
used as a key to cache persistent information about the system.
Live systems can not have a stable fingerprint and so return a NoneObject()
here.
We return a list of tuples:
(physical_offset, expected_data)
The list uniquely identifies the image. If one were to read all physical
offsets and find the expected_data at these locations, then we have a very
high level of confidence that the image is unique and matches the
fingerprint.
"""
name = "image_fingerprint"
def calculate(self):
if self.session.physical_address_space.volatile:
return obj.NoneObject("No fingerprint for volatile image.")
result = []
profile = self.session.profile
phys_as = self.session.physical_address_space
address_space = self.session.GetParameter("default_address_space")
label = profile.get_constant_object("_osversion", "String")
result.append((address_space.vtop(label.obj_offset), label.v()))
label = profile.get_constant_object("_version", "String")
result.append((address_space.vtop(label.obj_offset), label.v()))
label = profile.get_constant_object("_sched_tick", "String",
length=8, term=None)
result.append((address_space.vtop(label.obj_offset), label.v()))
catfish_offset = self.session.GetParameter("catfish_offset")
result.append((catfish_offset, phys_as.read(catfish_offset, 8)))
# List of processes should also be pretty unique.
for task in self.session.plugins.pslist().filter_processes():
name = task.name.cast("String", length=30)
task_name_offset = address_space.vtop(name.obj_offset)
result.append((task_name_offset, name.v()))
return dict(
hash=hashlib.sha1(utils.SmartStr(result)).hexdigest(),
tests=result)
class DarwinHighestUserAddress(common.AbstractDarwinParameterHook):
"""The highest address for user mode/kernel mode division."""
name = "highest_usermode_address"
def calculate(self):
return 0x800000000000
| gpl-2.0 | 2,952,863,158,964,822,500 | 35.344086 | 84 | 0.60286 | false |
ProteinDF/ProteinDF_pytools | scripts/pdf-info-orb.py | 1 | 2823 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 The ProteinDF development team.
# see also AUTHORS and README if provided.
#
# This file is a part of the ProteinDF software package.
#
# The ProteinDF is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The ProteinDF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ProteinDF. If not, see <http://www.gnu.org/licenses/>.
"""
output info of orb
"""
import os
import sys
import argparse
try:
import msgpack
except:
import msgpack_pure as msgpack
import proteindf_tools as pdf
def main():
# parse args
parser = argparse.ArgumentParser(description='output info of orb')
parser.add_argument('orb_index',
nargs='+',
type=int,
help='orbital index (start=0)')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-d', '--db',
nargs='?',
action='store',
const='pdfresults.db',
help='ProteinDF results file')
group.add_argument('-p', '--param',
nargs='?',
action='store',
const='pdfparam.mpac',
help='ProteinDF parameter file')
parser.add_argument("-v", "--verbose",
action="store_true",
default=False)
parser.add_argument('-D', '--debug',
action='store_true',
default=False)
args = parser.parse_args()
# setting
verbose = args.verbose
if args.debug:
logging.basicConfig(level=logging.DEBUG)
orb_index = args.orb_index
orb_info = None
if args.db:
entry = pdf.PdfArchive(args.db)
orb_info = pdf.OrbInfo(entry)
elif args.param:
pdfparam = pdf.load_pdfparam(args.param)
orb_info = pdf.OrbInfo(pdfparam)
num_of_orbitals = orb_info.get_num_of_orbitals()
for i in orb_index:
if i < num_of_orbitals:
atom = orb_info.get_atom(i)
print('%dth: %s, shell(%s;%s)' % (i,
str(atom),
orb_info.get_shell_type(i),
orb_info.get_basis_type(i)))
if __name__ == '__main__':
main()
| gpl-3.0 | 1,469,378,510,724,045,000 | 31.448276 | 71 | 0.563939 | false |
thecsapprentice/phoenix-plumage-client | RenderNode.py | 1 | 14936 | #!/usr/bin/env python
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('pika').setLevel(logging.INFO)
LOG_FORMAT = ('%(levelname) -15s %(asctime)s %(name) -30s %(funcName) '
'-35s %(lineno) -5d: %(message)s')
LOGGER = logging.getLogger("Manager")
NODE_LOGGER = logging.getLogger("Process")
import pika
import time
import datetime
import subprocess
import json
import uuid
import socket
import sys
from requests_toolbelt import MultipartEncoder
import requests
import argparse
from node import RenderNode
import os
import traceback
import socket
import ConfigParser
import importlib
import io
import struct
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
encoded_object = list(obj.timetuple())[0:6]
else:
encoded_object =json.JSONEncoder.default(self, obj)
return encoded_object
class RenderWorker(object):
def __init__(self, comm_host, config ):
self._comm_host = comm_host
self._murl = config.get( 'settings', 'manager_url' );
# These are the communication members
self._connection = None
self.channel = None
self.active_queue = ""
self.active_queue_tag = None
self.queue_was_filled = False
# Load Renderer Modules
self._renderManager = RenderNode();
for module in config.options('modules'):
status = config.getboolean( 'modules', module )
if status:
print "Loading render module", module, "..."
mod = importlib.import_module(config.get(module,'module'))
renderer_args = dict( config.items(module))
renderer_args["scene_path"] = config.get('settings', 'scene_path' );
renderer = mod.BuildRenderer( renderer_args )
self._renderManager.register_renderer(renderer.NodeType(), renderer )
# periodic checks
self.last_render_check = datetime.datetime.now()
self.render_started = False
self._save_location = config.get( 'settings', 'save_cache_path' );
def initiate_broker_communications(self, ):
self._connection = None
while self._connection == None:
try:
LOGGER.info( "Connecting to Broker..." )
parameters = pika.URLParameters(self._comm_host)
self._connection = pika.BlockingConnection(parameters)
except Exception, e:
LOGGER.info(str(e))
LOGGER.info(traceback.format_exc())
time.sleep(5)
self.channel = self._connection.channel()
self.channel.queue_declare(queue='log_queue',
durable=True,
exclusive=False,
auto_delete=False)
LOGGER.info(' [*] Waiting for messages. To exit press CTRL+C')
self.channel.basic_qos(prefetch_count=1)
def spin_up_new_pid(self,config):
pass
def kill_pid(self, ):
pass
def check_pid_status(self, ):
pass
def send_status_update(self, ):
pass
def check_render(self, ):
self.last_render_check = datetime.datetime.now()
if self.render_started:
status = self._renderManager.status();
log_text = self._renderManager.last_log()
for line in log_text.split("\n"):
if line:
NODE_LOGGER.debug( line )
if status == "SUCCESS":
LOGGER.info("Render complete.")
render_info = self._renderManager.last_render_info()
self.channel.basic_publish(exchange='', routing_key="log_queue",
body = json.dumps( {"event":"render_finish",
"frame":render_info["frame"],
"scene":render_info["scene"],
"time":datetime.datetime.now(),
"uuid":render_info["uuid"],
"type":render_info["type"],
} , cls=DateTimeEncoder),
properties=pika.BasicProperties(
delivery_mode = 2,
app_id = socket.gethostname(),
)
)
# Save off the render to disk somewhere
url = self._murl+"/upload_render?uuid="+render_info["uuid"]
LOGGER.info("Uploading completed render to %s" % url)
last_render = self._renderManager.last_render()
all_files_sent = True
for label in last_render.keys():
file_list = {}
file_list[label] = ( label+"."+self._renderManager.extension(), last_render[label] )
print "Sending image {:s}".format( label+"."+self._renderManager.extension() )
try:
requests.post( url, files=file_list)
except Exception, e:
LOGGER.warning("Failed to upload the render %s: %s" % (label, str(e)) )
all_files_sent = False
if all_files_sent:
self.channel.basic_ack(delivery_tag = self._renderManager.tag)
else:
self.channel.basic_reject(delivery_tag=self._renderManager.tag, requeue=True);
self.render_started = False
elif status == "FAILURE":
LOGGER.info("Render failed.")
render_info = self._renderManager.last_render_info()
self.channel.basic_publish(exchange='', routing_key="log_queue",
body = json.dumps( {"event":"render_fail",
"frame":render_info["frame"],
"scene":render_info["scene"],
"time":datetime.datetime.now(),
"uuid":render_info["uuid"],
"type":render_info["type"],
} , cls=DateTimeEncoder),
properties=pika.BasicProperties(
delivery_mode = 2,
app_id = socket.gethostname(),
)
)
self.render_started = False
self.channel.basic_ack(delivery_tag = self._renderManager.tag)
else:
pass
def pull_and_update_queue(self,):
url = self._murl+"/available_jobs"
try:
res = requests.get( url )
except Exception, e:
LOGGER.warning("Failed to retreieve job priority list from manager: %s", str(e))
else:
if len(res.json()) == 0 and self.active_queue != "":
LOGGER.info("No jobs available. Disconnecting from the last queue.")
if self.active_queue_tag != None:
self.channel.basic_cancel( consumer_tag=self.active_queue_tag )
self.active_queue = ""
for job in res.json():
if self._renderManager.can_handle_render( job[1] ):
if self.active_queue != job[0]:
LOGGER.info( "New job has priority. Switching to queue: render_%s" % job[0] )
if self.active_queue_tag != None:
self.channel.basic_cancel( consumer_tag=self.active_queue_tag )
self.active_queue = job[0]
self.active_queue_tag = self.channel.basic_consume(self.callback, queue='render_%s' % self.active_queue )
break;
else:
print "Can't handle render jobs of type '%s', skipping to next job in queue..." % job[1]
def callback(self, ch, method, properties, body):
response_message = {}
response_message["status"] = ""
body_config = dict()
try:
body_config = json.loads( body );
except:
LOGGER.error("Failed to parse command: %s", str(body))
ch.basic_ack(delivery_tag = method.delivery_tag)
return;
if body_config["command"] == "render":
self.check_render();
if self._renderManager.status() != "RUNNING":
LOGGER.info("Caught a render job...")
try:
frame = body_config["frame"]
scene_file = body_config["scene"]
rendertype = body_config["type"]
uuid = body_config["uuid"]
except:
LOGGER.error("Render command was malformed. Discarding...")
ch.basic_ack(delivery_tag = method.delivery_tag)
else:
if not self._renderManager.can_handle_render(rendertype):
ch.basic_reject(delivery_tag = method.delivery_tag, requeue=True);
else:
#ch.basic_ack(delivery_tag = method.delivery_tag)
LOGGER.info("Rendering frame %s for scene %s of type %s", str(frame), scene_file, rendertype);
self._renderManager.render_single_frame_of_type( scene_file, frame, uuid, rendertype );
self._renderManager.tag = method.delivery_tag
self.render_started = True
self.channel.basic_publish(exchange='', routing_key="log_queue",
body = json.dumps( {"event":"render_start",
"frame":frame,
"scene":scene_file,
"time":datetime.datetime.now(),
"uuid":uuid,
"type":rendertype,
} , cls=DateTimeEncoder),
properties=pika.BasicProperties(
delivery_mode = 2,
app_id = socket.gethostname(),
)
)
else:
ch.basic_reject(delivery_tag = method.delivery_tag, requeue=True);
def run(self, ):
self.initiate_broker_communications()
self.send_status_update();
while True:
try:
self._connection.process_data_events();
except pika.exceptions.ConnectionClosed, e:
LOGGER.warning("Lost connection to management, killing any active processes")
self.kill_pid()
LOGGER.warning("Connection lost. Reconnecting...")
self.initiate_broker_communications()
except KeyboardInterrupt:
LOGGER.info("Recieved kill command from terminal, shutting down.")
self.check_render()
self.kill_pid()
break;
self.check_pid_status();
#if (datetime.now() - self.last_update).seconds >= 120:
# self.send_status_update();
if (datetime.datetime.now() - self.last_render_check).seconds >= 5:
self.check_render()
sys.stdout.flush()
sys.stderr.flush()
if not self.render_started:
self.pull_and_update_queue()
time.sleep(.5);
def valid_path(path):
if os.path.isdir( path ):
return path
else:
msg = '"%s" is not a valid path name.' % path
raise argparse.ArgumentTypeError(msg)
def valid_file(path):
if os.path.isfile( path ):
return path
else:
msg = '"%s" is not a valid file name.' % path
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Plumage Render Node")
parser.add_argument("-c", "--conf_file", type=valid_file,
help="Specify config file", metavar="FILE")
args, remaining_argv = parser.parse_known_args()
defaults = {
"ampq_server" : "http://localhost:5672",
"ampq_user" : "guest",
"ampq_password": "guest",
"save_cache_path":"/tmp",
"manager_url":"http://localhost:8888",
"scene_path":"/tmp",
}
config = ConfigParser.SafeConfigParser()
if args.conf_file:
config.read([args.conf_file])
defaults = dict(config.items("settings"))
parser.set_defaults(**defaults)
parser.add_argument('-a','--ampq_server', type=str )
parser.add_argument('-U','--ampq_user', type=str )
parser.add_argument('-P','--ampq_password', type=str )
parser.add_argument('-S','--save_cache_path', type=valid_path)
parser.add_argument('-m','--manager_url', type=str)
parser.add_argument('-s','--scene_path', type=valid_path )
args = parser.parse_args(remaining_argv)
if not config.has_section('settings'):
config.add_section('settings');
print vars(args)
for item, value in vars(args).items():
config.set( 'settings', item, str(value) );
ampq_url = 'http://%s:%s@%s'%(config.get('settings','ampq_user'),
config.get('settings','ampq_password'),
config.get('settings','ampq_server') );
print "AMPQ:", ampq_url
worker = RenderWorker( ampq_url, config )
worker.run();
| bsd-3-clause | 4,707,114,049,887,521,000 | 41.674286 | 129 | 0.470809 | false |
axptwig/Wu-Bernstein-Distributed-Systems-Implementation | time_table.py | 2 | 1170 | import json
# A class to represent the time table's held by each node
class TimeTable(object):
def __init__(self, dim):
self.dim = dim
self.init_table()
def init_table(self):
self.table = []
for _ in range(self.dim):
self.table.append([0] * self.dim)
# When syncing with a new node, just update all of our clock times to
# the max of both tables
def sync(self, t2, self_id, sync_id):
assert len(t2.table) == len(self.table)
for i in range(self.dim):
self.table[self_id][i] = t2.table[sync_id][i]
for i in range(self.dim):
for j in range(self.dim):
self.table[i][j] = max(self.table[i][j], t2.table[i][j])
# Call this when a node performs a local "insert" or "delete" operation
def update(self, node, count):
self.table[node][node] = count
def get(self, i, j):
return self.table[i][j]
def to_JSON(self):
return json.dumps(self, default=lambda o: o.__dict__,
sort_keys=True)
@staticmethod
def load(js, dim):
a = TimeTable(dim)
a.__dict__ = js
return a | gpl-2.0 | -8,378,801,963,301,662,000 | 27.560976 | 75 | 0.563248 | false |
burnpanck/chaco | docs/source/sphinxext/docscrape.py | 5 | 14245 | """Extract reference documentation from the NumPy source tree.
"""
import inspect
import textwrap
import re
import pydoc
from StringIO import StringIO
from warnings import warn
class Reader(object):
"""A line-based string reader.
"""
def __init__(self, data):
"""
Parameters
----------
data : str
String with lines separated by '\n'.
"""
if isinstance(data,list):
self._str = data
else:
self._str = data.split('\n') # store string as list of lines
self.reset()
def __getitem__(self, n):
return self._str[n]
def reset(self):
self._l = 0 # current line nr
def read(self):
if not self.eof():
out = self[self._l]
self._l += 1
return out
else:
return ''
def seek_next_non_empty_line(self):
for l in self[self._l:]:
if l.strip():
break
else:
self._l += 1
def eof(self):
return self._l >= len(self._str)
def read_to_condition(self, condition_func):
start = self._l
for line in self[start:]:
if condition_func(line):
return self[start:self._l]
self._l += 1
if self.eof():
return self[start:self._l+1]
return []
def read_to_next_empty_line(self):
self.seek_next_non_empty_line()
def is_empty(line):
return not line.strip()
return self.read_to_condition(is_empty)
def read_to_next_unindented_line(self):
def is_unindented(line):
return (line.strip() and (len(line.lstrip()) == len(line)))
return self.read_to_condition(is_unindented)
def peek(self,n=0):
if self._l + n < len(self._str):
return self[self._l + n]
else:
return ''
def is_empty(self):
return not ''.join(self._str).strip()
class NumpyDocString(object):
def __init__(self,docstring):
docstring = docstring.split('\n')
# De-indent paragraph
try:
indent = min(len(s) - len(s.lstrip()) for s in docstring
if s.strip())
except ValueError:
indent = 0
for n,line in enumerate(docstring):
docstring[n] = docstring[n][indent:]
self._doc = Reader(docstring)
self._parsed_data = {
'Signature': '',
'Summary': '',
'Extended Summary': [],
'Parameters': [],
'Returns': [],
'Raises': [],
'Warns': [],
'Other Parameters': [],
'Attributes': [],
'Methods': [],
'See Also': [],
'Notes': [],
'References': '',
'Examples': '',
'index': {}
}
self._parse()
def __getitem__(self,key):
return self._parsed_data[key]
def __setitem__(self,key,val):
if not self._parsed_data.has_key(key):
warn("Unknown section %s" % key)
else:
self._parsed_data[key] = val
def _is_at_section(self):
self._doc.seek_next_non_empty_line()
if self._doc.eof():
return False
l1 = self._doc.peek().strip() # e.g. Parameters
if l1.startswith('.. index::'):
return True
l2 = self._doc.peek(1).strip() # ----------
return l2.startswith('-'*len(l1))
def _strip(self,doc):
i = 0
j = 0
for i,line in enumerate(doc):
if line.strip(): break
for j,line in enumerate(doc[::-1]):
if line.strip(): break
return doc[i:len(doc)-j]
def _read_to_next_section(self):
section = self._doc.read_to_next_empty_line()
while not self._is_at_section() and not self._doc.eof():
if not self._doc.peek(-1).strip(): # previous line was empty
section += ['']
section += self._doc.read_to_next_empty_line()
return section
def _read_sections(self):
while not self._doc.eof():
data = self._read_to_next_section()
name = data[0].strip()
if name.startswith('..'): # index section
yield name, data[1:]
elif len(data) < 2:
yield StopIteration
else:
yield name, self._strip(data[2:])
def _parse_param_list(self,content):
r = Reader(content)
params = []
while not r.eof():
header = r.read().strip()
if ' : ' in header:
arg_name, arg_type = header.split(' : ')[:2]
else:
arg_name, arg_type = header, ''
desc = r.read_to_next_unindented_line()
for n,line in enumerate(desc):
desc[n] = line.strip()
desc = desc #'\n'.join(desc)
params.append((arg_name,arg_type,desc))
return params
def _parse_see_also(self, content):
"""
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, func_name3
"""
functions = []
current_func = None
rest = []
for line in content:
if not line.strip(): continue
if ':' in line:
if current_func:
functions.append((current_func, rest))
r = line.split(':', 1)
current_func = r[0].strip()
r[1] = r[1].strip()
if r[1]:
rest = [r[1]]
else:
rest = []
elif not line.startswith(' '):
if current_func:
functions.append((current_func, rest))
current_func = None
rest = []
if ',' in line:
for func in line.split(','):
func = func.strip()
if func:
functions.append((func, []))
elif line.strip():
current_func = line.strip()
elif current_func is not None:
rest.append(line.strip())
if current_func:
functions.append((current_func, rest))
return functions
def _parse_index(self, section, content):
"""
.. index: default
:refguide: something, else, and more
"""
def strip_each_in(lst):
return [s.strip() for s in lst]
out = {}
section = section.split('::')
if len(section) > 1:
out['default'] = strip_each_in(section[1].split(','))[0]
for line in content:
line = line.split(':')
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out
def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
return
summary = self._doc.read_to_next_empty_line()
summary_str = " ".join([s.strip() for s in summary]).strip()
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
self['Signature'] = summary_str
if not self._is_at_section():
self['Summary'] = self._doc.read_to_next_empty_line()
else:
self['Summary'] = summary
if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section()
def _parse(self):
self._doc.reset()
self._parse_summary()
for (section,content) in self._read_sections():
if not section.startswith('..'):
section = ' '.join([s.capitalize() for s in section.split(' ')])
if section in ('Parameters', 'Attributes', 'Methods',
'Returns', 'Raises', 'Warns'):
self[section] = self._parse_param_list(content)
elif section.startswith('.. index::'):
self['index'] = self._parse_index(section, content)
elif section == 'See Also':
self['See Also'] = self._parse_see_also(content)
else:
self[section] = content
# string conversion routines
def _str_header(self, name, symbol='-'):
return [name, len(name)*symbol]
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
if self['Signature']:
return [self['Signature'].replace('*','\*')] + ['']
else:
return ['']
def _str_summary(self):
if self['Summary']:
return self['Summary'] + ['']
else:
return []
def _str_extended_summary(self):
if self['Extended Summary']:
return self['Extended Summary'] + ['']
else:
return []
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_header(name)
for param,param_type,desc in self[name]:
out += ['%s : %s' % (param, param_type)]
out += self._str_indent(desc)
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += self[name]
out += ['']
return out
def _str_see_also(self, func_role):
if not self['See Also']: return []
out = []
out += self._str_header("See Also")
last_had_desc = True
for func, desc in self['See Also']:
if func_role:
link = ':%s:`%s`' % (func_role, func)
else:
link = "`%s`_" % func
if desc or last_had_desc:
out += ['']
out += [link]
else:
out[-1] += ", %s" % link
if desc:
out += self._str_indent(desc)
last_had_desc = True
else:
last_had_desc = False
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.iteritems():
if section == 'default':
continue
out += [' :%s: %s' % (section, ', '.join(references))]
return out
def __str__(self, func_role=''):
out = []
out += self._str_signature()
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters','Returns','Raises'):
out += self._str_param_list(param_list)
out += self._str_see_also(func_role)
for s in ('Notes','References','Examples'):
out += self._str_section(s)
out += self._str_index()
return '\n'.join(out)
def indent(str,indent=4):
indent_str = ' '*indent
if str is None:
return indent_str
lines = str.split('\n')
return '\n'.join(indent_str + l for l in lines)
def header(text, style='-'):
return text + '\n' + style*len(text) + '\n'
class FunctionDoc(NumpyDocString):
def __init__(self, func, role='func'):
self._f = func
self._role = role # e.g. "func" or "meth"
try:
NumpyDocString.__init__(self,inspect.getdoc(func) or '')
except ValueError, e:
print '*'*78
print "ERROR: '%s' while parsing `%s`" % (e, self._f)
print '*'*78
#print "Docstring follows:"
#print doclines
#print '='*78
if not self['Signature']:
func, func_name = self.get_func()
try:
# try to read signature
argspec = inspect.getargspec(func)
argspec = inspect.formatargspec(*argspec)
argspec = argspec.replace('*','\*')
signature = '%s%s' % (func_name, argspec)
except TypeError, e:
signature = '%s()' % func_name
self['Signature'] = signature
def get_func(self):
func_name = getattr(self._f, '__name__', self.__class__.__name__)
if hasattr(self._f, '__class__') or inspect.isclass(self._f):
func = getattr(self._f, '__call__', self._f.__init__)
else:
func = self._f
return func, func_name
def __str__(self):
out = ''
func, func_name = self.get_func()
signature = self['Signature'].replace('*', '\*')
roles = {'func': 'function',
'meth': 'method'}
if self._role:
if not roles.has_key(self._role):
print "Warning: invalid role %s" % self._role
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
func_name)
out += super(FunctionDoc, self).__str__(func_role=self._role)
return out
class ClassDoc(NumpyDocString):
def __init__(self,cls,modulename='',func_doc=FunctionDoc):
if not inspect.isclass(cls):
raise ValueError("Initialise using a class. Got %r" % cls)
self._cls = cls
if modulename and not modulename.endswith('.'):
modulename += '.'
self._mod = modulename
self._name = cls.__name__
self._func_doc = func_doc
NumpyDocString.__init__(self, pydoc.getdoc(cls))
@property
def methods(self):
return [name for name,func in inspect.getmembers(self._cls)
if not name.startswith('_') and callable(func)]
def __str__(self):
out = ''
out += super(ClassDoc, self).__str__()
out += "\n\n"
#for m in self.methods:
# print "Parsing `%s`" % m
# out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n'
# out += '.. index::\n single: %s; %s\n\n' % (self._name, m)
return out
| bsd-3-clause | -3,429,747,056,001,722,400 | 28.677083 | 80 | 0.471955 | false |
scmsqhn/changhongmall | app/mylog/logtools.py | 1 | 1872 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2017-08-21 17:03:43
# @Author : qin_hai_ning ([email protected])
# @Link : https://www.triboys.com/
# @Version : $Id$
import os
import time
import datetime
import json
import traceback
def p(i):
print "[x] ", str(i)
print "\r\n"
def sav_log(intxt):
print intxt
try:
logtxt = open('/var/www/chmall/log/tmplog.txt','a+')
formattxt = obtain_log_txt_json(intxt)
logtxt.write(formattxt)
logtxt.write("\r\n")
print "sav_log"
except Exception as e:
traceback.print_exc()
raise
else:
pass
finally:
pass
def sav_2_file(intxt, filepath):
p("sav_2_file")
try:
logtxt = open(filepath,'a+')
formattxt = obtain_log_txt_json_2(intxt)
logtxt.write(formattxt)
logtxt.write("\r\n")
print "sav_log"
except Exception as e:
traceback.print_exc()
raise
else:
pass
finally:
pass
def obtain_log_txt(intxt):
title = "[x] "
dummy = " "
now = datetime.datetime.now()
otherStyleTime = now.strftime(u"%Y-%m-%d %H:%M:%S")
otherStyleTime = otherStyleTime.decode('utf-8')
output = str(title) + str(dummy) + str(otherStyleTime) + str(dummy) + str(intxt) + str(dummy) + '\r\n'
return output
def obtain_log_txt_json(intxt):
now = datetime.datetime.now()
otherStyleTime = now.strftime(u"%Y-%m-%d %H:%M:%S")
otherStyleTime = otherStyleTime.decode('utf-8')
print intxt
injson = json.loads(intxt)
for i in injson:
print i
print injson
injson["time"] = otherStyleTime
output = json.dumps(injson)
return output
def obtain_log_txt_json_2(injson):
now = datetime.datetime.now()
otherStyleTime = now.strftime(u"%Y-%m-%d %H:%M:%S")
otherStyleTime = otherStyleTime.decode('utf-8')
injson["time"] = otherStyleTime
output = json.dumps(injson)
print output
return output
| mit | -7,137,370,883,715,607,000 | 20.285714 | 103 | 0.637286 | false |
spapadim/OctoPrint | src/octoprint/filemanager/__init__.py | 7 | 15591 | # coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
import os
import octoprint.plugin
import octoprint.util
from octoprint.events import eventManager, Events
from .destinations import FileDestinations
from .analysis import QueueEntry, AnalysisQueue
from .storage import LocalFileStorage
from .util import AbstractFileWrapper, StreamWrapper, DiskFileWrapper
extensions = dict(
)
def full_extension_tree():
result = dict(
# extensions for 3d model files
model=dict(
stl=["stl"]
),
# extensions for printable machine code
machinecode=dict(
gcode=["gcode", "gco", "g"]
)
)
extension_tree_hooks = octoprint.plugin.plugin_manager().get_hooks("octoprint.filemanager.extension_tree")
for name, hook in extension_tree_hooks.items():
try:
hook_result = hook()
if hook_result is None or not isinstance(hook_result, dict):
continue
result = octoprint.util.dict_merge(result, hook_result)
except:
logging.getLogger(__name__).exception("Exception while retrieving additional extension tree entries from hook {name}".format(name=name))
return result
def get_extensions(type, subtree=None):
if not subtree:
subtree = full_extension_tree()
for key, value in subtree.items():
if key == type:
return get_all_extensions(subtree=value)
elif isinstance(value, dict):
sub_extensions = get_extensions(type, subtree=value)
if sub_extensions:
return sub_extensions
return None
def get_all_extensions(subtree=None):
if not subtree:
subtree = full_extension_tree()
result = []
if isinstance(subtree, dict):
for key, value in subtree.items():
if isinstance(value, dict):
result += get_all_extensions(value)
elif isinstance(value, (list, tuple)):
result += value
elif isinstance(subtree, (list, tuple)):
result = subtree
return result
def get_path_for_extension(extension, subtree=None):
if not subtree:
subtree = full_extension_tree()
for key, value in subtree.items():
if isinstance(value, (list, tuple)) and extension in value:
return [key]
elif isinstance(value, dict):
path = get_path_for_extension(extension, subtree=value)
if path:
return [key] + path
return None
def valid_extension(extension, type=None):
if not type:
return extension in get_all_extensions()
else:
extensions = get_extensions(type)
if extensions:
return extension in extensions
def valid_file_type(filename, type=None):
_, extension = os.path.splitext(filename)
extension = extension[1:].lower()
return valid_extension(extension, type=type)
def get_file_type(filename):
_, extension = os.path.splitext(filename)
extension = extension[1:].lower()
return get_path_for_extension(extension)
class NoSuchStorage(Exception):
pass
class FileManager(object):
def __init__(self, analysis_queue, slicing_manager, printer_profile_manager, initial_storage_managers=None):
self._logger = logging.getLogger(__name__)
self._analysis_queue = analysis_queue
self._analysis_queue.register_finish_callback(self._on_analysis_finished)
self._storage_managers = dict()
if initial_storage_managers:
self._storage_managers.update(initial_storage_managers)
self._slicing_manager = slicing_manager
self._printer_profile_manager = printer_profile_manager
import threading
self._slicing_jobs = dict()
self._slicing_jobs_mutex = threading.Lock()
self._slicing_progress_callbacks = []
self._last_slicing_progress = None
self._progress_plugins = []
self._preprocessor_hooks = dict()
def initialize(self):
self.reload_plugins()
def worker():
self._logger.info("Adding backlog items from all storage types to analysis queue...".format(**locals()))
for storage_type, storage_manager in self._storage_managers.items():
self._determine_analysis_backlog(storage_type, storage_manager)
import threading
thread = threading.Thread(target=worker)
thread.daemon = True
thread.start()
def reload_plugins(self):
self._progress_plugins = octoprint.plugin.plugin_manager().get_implementations(octoprint.plugin.ProgressPlugin)
self._preprocessor_hooks = octoprint.plugin.plugin_manager().get_hooks("octoprint.filemanager.preprocessor")
def register_slicingprogress_callback(self, callback):
self._slicing_progress_callbacks.append(callback)
def unregister_slicingprogress_callback(self, callback):
self._slicing_progress_callbacks.remove(callback)
def _determine_analysis_backlog(self, storage_type, storage_manager):
counter = 0
for entry, path, printer_profile in storage_manager.analysis_backlog:
file_type = get_file_type(path)[-1]
# we'll use the default printer profile for the backlog since we don't know better
queue_entry = QueueEntry(entry, file_type, storage_type, path, self._printer_profile_manager.get_default())
self._analysis_queue.enqueue(queue_entry, high_priority=False)
counter += 1
self._logger.info("Added {counter} items from storage type \"{storage_type}\" to analysis queue".format(**locals()))
def add_storage(self, storage_type, storage_manager):
self._storage_managers[storage_type] = storage_manager
self._determine_analysis_backlog(storage_type, storage_manager)
def remove_storage(self, type):
if not type in self._storage_managers:
return
del self._storage_managers[type]
@property
def slicing_enabled(self):
return self._slicing_manager.slicing_enabled
@property
def registered_slicers(self):
return self._slicing_manager.registered_slicers
@property
def default_slicer(self):
return self._slicing_manager.default_slicer
def slice(self, slicer_name, source_location, source_path, dest_location, dest_path,
position=None, profile=None, printer_profile_id=None, overrides=None, callback=None, callback_args=None):
absolute_source_path = self.path_on_disk(source_location, source_path)
def stlProcessed(source_location, source_path, tmp_path, dest_location, dest_path, start_time, printer_profile_id, callback, callback_args, _error=None, _cancelled=False, _analysis=None):
try:
if _error:
eventManager().fire(Events.SLICING_FAILED, {"stl": source_path, "gcode": dest_path, "reason": _error})
elif _cancelled:
eventManager().fire(Events.SLICING_CANCELLED, {"stl": source_path, "gcode": dest_path})
else:
source_meta = self.get_metadata(source_location, source_path)
hash = source_meta["hash"]
import io
links = [("model", dict(name=source_path))]
_, stl_name = self.split_path(source_location, source_path)
file_obj = StreamWrapper(os.path.basename(dest_path),
io.BytesIO(u";Generated from {stl_name} {hash}\n".format(**locals()).encode("ascii", "replace")),
io.FileIO(tmp_path, "rb"))
printer_profile = self._printer_profile_manager.get(printer_profile_id)
self.add_file(dest_location, dest_path, file_obj, links=links, allow_overwrite=True, printer_profile=printer_profile, analysis=_analysis)
end_time = time.time()
eventManager().fire(Events.SLICING_DONE, {"stl": source_path, "gcode": dest_path, "time": end_time - start_time})
if callback is not None:
if callback_args is None:
callback_args = ()
callback(*callback_args)
finally:
os.remove(tmp_path)
source_job_key = (source_location, source_path)
dest_job_key = (dest_location, dest_path)
with self._slicing_jobs_mutex:
if source_job_key in self._slicing_jobs:
del self._slicing_jobs[source_job_key]
if dest_job_key in self._slicing_jobs:
del self._slicing_jobs[dest_job_key]
slicer = self._slicing_manager.get_slicer(slicer_name)
import time
start_time = time.time()
eventManager().fire(Events.SLICING_STARTED, {"stl": source_path, "gcode": dest_path, "progressAvailable": slicer.get_slicer_properties()["progress_report"] if slicer else False})
import tempfile
f = tempfile.NamedTemporaryFile(suffix=".gco", delete=False)
temp_path = f.name
f.close()
with self._slicing_jobs_mutex:
source_job_key = (source_location, source_path)
dest_job_key = (dest_location, dest_path)
if dest_job_key in self._slicing_jobs:
job_slicer_name, job_absolute_source_path, job_temp_path = self._slicing_jobs[dest_job_key]
self._slicing_manager.cancel_slicing(job_slicer_name, job_absolute_source_path, job_temp_path)
del self._slicing_jobs[dest_job_key]
self._slicing_jobs[dest_job_key] = self._slicing_jobs[source_job_key] = (slicer_name, absolute_source_path, temp_path)
args = (source_location, source_path, temp_path, dest_location, dest_path, start_time, printer_profile_id, callback, callback_args)
self._slicing_manager.slice(slicer_name,
absolute_source_path,
temp_path,
profile,
stlProcessed,
position=position,
callback_args=args,
overrides=overrides,
printer_profile_id=printer_profile_id,
on_progress=self.on_slicing_progress,
on_progress_args=(slicer_name, source_location, source_path, dest_location, dest_path))
def on_slicing_progress(self, slicer, source_location, source_path, dest_location, dest_path, _progress=None):
if not _progress:
return
progress_int = int(_progress * 100)
if self._last_slicing_progress != progress_int:
self._last_slicing_progress = progress_int
for callback in self._slicing_progress_callbacks:
try: callback.sendSlicingProgress(slicer, source_location, source_path, dest_location, dest_path, progress_int)
except: self._logger.exception("Exception while pushing slicing progress")
if progress_int:
def call_plugins(slicer, source_location, source_path, dest_location, dest_path, progress):
for plugin in self._progress_plugins:
try:
plugin.on_slicing_progress(slicer, source_location, source_path, dest_location, dest_path, progress)
except:
self._logger.exception("Exception while sending slicing progress to plugin %s" % plugin._identifier)
import threading
thread = threading.Thread(target=call_plugins, args=(slicer, source_location, source_path, dest_location, dest_path, progress_int))
thread.daemon = False
thread.start()
def get_busy_files(self):
return self._slicing_jobs.keys()
def file_exists(self, destination, path):
return self._storage(destination).file_exists(path)
def list_files(self, destinations=None, path=None, filter=None, recursive=None):
if not destinations:
destinations = self._storage_managers.keys()
if isinstance(destinations, (str, unicode, basestring)):
destinations = [destinations]
result = dict()
for dst in destinations:
result[dst] = self._storage_managers[dst].list_files(path=path, filter=filter, recursive=recursive)
return result
def add_file(self, destination, path, file_object, links=None, allow_overwrite=False, printer_profile=None, analysis=None):
if printer_profile is None:
printer_profile = self._printer_profile_manager.get_current_or_default()
for hook in self._preprocessor_hooks.values():
try:
hook_file_object = hook(path, file_object, links=links, printer_profile=printer_profile, allow_overwrite=allow_overwrite)
except:
self._logger.exception("Error when calling preprocessor hook {}, ignoring".format(hook))
continue
if hook_file_object is not None:
file_object = hook_file_object
file_path = self._storage(destination).add_file(path, file_object, links=links, printer_profile=printer_profile, allow_overwrite=allow_overwrite)
absolute_path = self._storage(destination).path_on_disk(file_path)
if analysis is None:
file_type = get_file_type(absolute_path)
if file_type:
queue_entry = QueueEntry(file_path, file_type[-1], destination, absolute_path, printer_profile)
self._analysis_queue.enqueue(queue_entry, high_priority=True)
else:
self._add_analysis_result(destination, path, analysis)
eventManager().fire(Events.UPDATED_FILES, dict(type="printables"))
return file_path
def remove_file(self, destination, path):
self._storage(destination).remove_file(path)
eventManager().fire(Events.UPDATED_FILES, dict(type="printables"))
def add_folder(self, destination, path, ignore_existing=True):
folder_path = self._storage(destination).add_folder(path, ignore_existing=ignore_existing)
eventManager().fire(Events.UPDATED_FILES, dict(type="printables"))
return folder_path
def remove_folder(self, destination, path, recursive=True):
self._storage(destination).remove_folder(path, recursive=recursive)
eventManager().fire(Events.UPDATED_FILES, dict(type="printables"))
def get_metadata(self, destination, path):
return self._storage(destination).get_metadata(path)
def add_link(self, destination, path, rel, data):
self._storage(destination).add_link(path, rel, data)
def remove_link(self, destination, path, rel, data):
self._storage(destination).remove_link(path, rel, data)
def log_print(self, destination, path, timestamp, print_time, success, printer_profile):
try:
if success:
self._storage(destination).add_history(path, dict(timestamp=timestamp, printTime=print_time, success=success, printerProfile=printer_profile))
else:
self._storage(destination).add_history(path, dict(timestamp=timestamp, success=success, printerProfile=printer_profile))
eventManager().fire(Events.METADATA_STATISTICS_UPDATED, dict(storage=destination, path=path))
except NoSuchStorage:
# if there's no storage configured where to log the print, we'll just not log it
pass
def set_additional_metadata(self, destination, path, key, data, overwrite=False, merge=False):
self._storage(destination).set_additional_metadata(path, key, data, overwrite=overwrite, merge=merge)
def remove_additional_metadata(self, destination, path, key):
self._storage(destination).remove_additional_metadata(path, key)
def path_on_disk(self, destination, path):
return self._storage(destination).path_on_disk(path)
def sanitize(self, destination, path):
return self._storage(destination).sanitize(path)
def sanitize_name(self, destination, name):
return self._storage(destination).sanitize_name(name)
def sanitize_path(self, destination, path):
return self._storage(destination).sanitize_path(path)
def split_path(self, destination, path):
return self._storage(destination).split_path(path)
def join_path(self, destination, *path):
return self._storage(destination).join_path(*path)
def path_in_storage(self, destination, path):
return self._storage(destination).path_in_storage(path)
def _storage(self, destination):
if not destination in self._storage_managers:
raise NoSuchStorage("No storage configured for destination {destination}".format(**locals()))
return self._storage_managers[destination]
def _add_analysis_result(self, destination, path, result):
if not destination in self._storage_managers:
return
storage_manager = self._storage_managers[destination]
storage_manager.set_additional_metadata(path, "analysis", result)
def _on_analysis_finished(self, entry, result):
self._add_analysis_result(entry.location, entry.path, result)
| agpl-3.0 | 2,879,504,384,667,062,000 | 36.745763 | 189 | 0.718584 | false |
aisk/grumpy | compiler/stmt_test.py | 1 | 19368 | # coding=utf-8
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for StatementVisitor."""
from __future__ import unicode_literals
import re
import subprocess
import textwrap
import unittest
import pythonparser
from pythonparser import ast
from grumpy.compiler import block
from grumpy.compiler import shard_test
from grumpy.compiler import stmt
from grumpy.compiler import util
class StatementVisitorTest(unittest.TestCase):
def testAssertNoMsg(self):
self.assertEqual((0, 'AssertionError()\n'), _GrumpRun(textwrap.dedent("""\
try:
assert False
except AssertionError as e:
print repr(e)""")))
def testAssertMsg(self):
want = (0, "AssertionError('foo',)\n")
self.assertEqual(want, _GrumpRun(textwrap.dedent("""\
try:
assert False, 'foo'
except AssertionError as e:
print repr(e)""")))
def testBareAssert(self):
# Assertion errors at the top level of a block should raise:
# https://github.com/google/grumpy/issues/18
want = (0, 'ok\n')
self.assertEqual(want, _GrumpRun(textwrap.dedent("""\
def foo():
assert False
try:
foo()
except AssertionError:
print 'ok'
else:
print 'bad'""")))
def testAssignAttribute(self):
self.assertEqual((0, '123\n'), _GrumpRun(textwrap.dedent("""\
e = Exception()
e.foo = 123
print e.foo""")))
def testAssignName(self):
self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\
foo = 'bar'
print foo""")))
def testAssignMultiple(self):
self.assertEqual((0, 'baz baz\n'), _GrumpRun(textwrap.dedent("""\
foo = bar = 'baz'
print foo, bar""")))
def testAssignSubscript(self):
self.assertEqual((0, "{'bar': None}\n"), _GrumpRun(textwrap.dedent("""\
foo = {}
foo['bar'] = None
print foo""")))
def testAssignTuple(self):
self.assertEqual((0, 'a b\n'), _GrumpRun(textwrap.dedent("""\
baz = ('a', 'b')
foo, bar = baz
print foo, bar""")))
def testAugAssign(self):
self.assertEqual((0, '42\n'), _GrumpRun(textwrap.dedent("""\
foo = 41
foo += 1
print foo""")))
def testAugAssignBitAnd(self):
self.assertEqual((0, '3\n'), _GrumpRun(textwrap.dedent("""\
foo = 7
foo &= 3
print foo""")))
def testAugAssignUnsupportedOp(self):
expected = 'augmented assignment op not implemented'
self.assertRaisesRegexp(util.ParseError, expected,
_ParseAndVisit, 'foo **= bar')
def testClassDef(self):
self.assertEqual((0, "<type 'type'>\n"), _GrumpRun(textwrap.dedent("""\
class Foo(object):
pass
print type(Foo)""")))
def testClassDefWithVar(self):
self.assertEqual((0, 'abc\n'), _GrumpRun(textwrap.dedent("""\
class Foo(object):
bar = 'abc'
print Foo.bar""")))
def testDeleteAttribute(self):
self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\
class Foo(object):
bar = 42
del Foo.bar
print hasattr(Foo, 'bar')""")))
def testDeleteClassLocal(self):
self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\
class Foo(object):
bar = 'baz'
del bar
print hasattr(Foo, 'bar')""")))
def testDeleteGlobal(self):
self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\
foo = 42
del foo
print 'foo' in globals()""")))
def testDeleteLocal(self):
self.assertEqual((0, 'ok\n'), _GrumpRun(textwrap.dedent("""\
def foo():
bar = 123
del bar
try:
print bar
raise AssertionError
except UnboundLocalError:
print 'ok'
foo()""")))
def testDeleteNonexistentLocal(self):
self.assertRaisesRegexp(
util.ParseError, 'cannot delete nonexistent local',
_ParseAndVisit, 'def foo():\n del bar')
def testDeleteSubscript(self):
self.assertEqual((0, '{}\n'), _GrumpRun(textwrap.dedent("""\
foo = {'bar': 'baz'}
del foo['bar']
print foo""")))
def testExprCall(self):
self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\
def foo():
print 'bar'
foo()""")))
def testExprNameGlobal(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
foo = 42
foo""")))
def testExprNameLocal(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
foo = 42
def bar():
foo
bar()""")))
def testFor(self):
self.assertEqual((0, '1\n2\n3\n'), _GrumpRun(textwrap.dedent("""\
for i in (1, 2, 3):
print i""")))
def testForBreak(self):
self.assertEqual((0, '1\n'), _GrumpRun(textwrap.dedent("""\
for i in (1, 2, 3):
print i
break""")))
def testForContinue(self):
self.assertEqual((0, '1\n2\n3\n'), _GrumpRun(textwrap.dedent("""\
for i in (1, 2, 3):
print i
continue
raise AssertionError""")))
def testForElse(self):
self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\
for i in (1,):
print 'foo'
else:
print 'bar'""")))
def testForElseBreakNotNested(self):
self.assertRaisesRegexp(
util.ParseError, "'continue' not in loop",
_ParseAndVisit, 'for i in (1,):\n pass\nelse:\n continue')
def testForElseContinueNotNested(self):
self.assertRaisesRegexp(
util.ParseError, "'continue' not in loop",
_ParseAndVisit, 'for i in (1,):\n pass\nelse:\n continue')
def testFunctionDecorator(self):
self.assertEqual((0, '<b>foo</b>\n'), _GrumpRun(textwrap.dedent("""\
def bold(fn):
return lambda: '<b>' + fn() + '</b>'
@bold
def foo():
return 'foo'
print foo()""")))
def testFunctionDecoratorWithArg(self):
self.assertEqual((0, '<b id=red>foo</b>\n'), _GrumpRun(textwrap.dedent("""\
def tag(name):
def bold(fn):
return lambda: '<b id=' + name + '>' + fn() + '</b>'
return bold
@tag('red')
def foo():
return 'foo'
print foo()""")))
def testFunctionDef(self):
self.assertEqual((0, 'bar baz\n'), _GrumpRun(textwrap.dedent("""\
def foo(a, b):
print a, b
foo('bar', 'baz')""")))
def testFunctionDefGenerator(self):
self.assertEqual((0, "['foo', 'bar']\n"), _GrumpRun(textwrap.dedent("""\
def gen():
yield 'foo'
yield 'bar'
print list(gen())""")))
def testFunctionDefGeneratorReturnValue(self):
self.assertRaisesRegexp(
util.ParseError, 'returning a value in a generator function',
_ParseAndVisit, 'def foo():\n yield 1\n return 2')
def testFunctionDefLocal(self):
self.assertEqual((0, 'baz\n'), _GrumpRun(textwrap.dedent("""\
def foo():
def bar():
print 'baz'
bar()
foo()""")))
def testIf(self):
self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\
if 123:
print 'foo'
if '':
print 'bar'""")))
def testIfElif(self):
self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\
if True:
print 'foo'
elif False:
print 'bar'
if False:
print 'foo'
elif True:
print 'bar'""")))
def testIfElse(self):
self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\
if True:
print 'foo'
else:
print 'bar'
if False:
print 'foo'
else:
print 'bar'""")))
def testImport(self):
self.assertEqual((0, "<type 'dict'>\n"), _GrumpRun(textwrap.dedent("""\
import sys
print type(sys.modules)""")))
def testImportConflictingPackage(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
import time
from __go__.time import Now""")))
def testImportNative(self):
self.assertEqual((0, '1 1000000000\n'), _GrumpRun(textwrap.dedent("""\
from __go__.time import Nanosecond, Second
print Nanosecond, Second""")))
def testImportGrump(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
from __go__.grumpy import Assert
Assert(__frame__(), True, 'bad')""")))
def testImportNativeModuleRaises(self):
regexp = r'for native imports use "from __go__\.xyz import \.\.\." syntax'
self.assertRaisesRegexp(util.ParseError, regexp, _ParseAndVisit,
'import __go__.foo')
def testImportNativeType(self):
self.assertEqual((0, "<type 'Duration'>\n"), _GrumpRun(textwrap.dedent("""\
from __go__.time import type_Duration as Duration
print Duration""")))
def testPrintStatement(self):
self.assertEqual((0, 'abc 123\nfoo bar\n'), _GrumpRun(textwrap.dedent("""\
print 'abc',
print '123'
print 'foo', 'bar'""")))
def testImportFromFuture(self):
testcases = [
('from __future__ import print_function', stmt.FUTURE_PRINT_FUNCTION),
('from __future__ import generators', 0),
('from __future__ import generators, print_function',
stmt.FUTURE_PRINT_FUNCTION),
]
for i, tc in enumerate(testcases):
source, want_flags = tc
mod = pythonparser.parse(textwrap.dedent(source))
node = mod.body[0]
got = stmt.import_from_future(node)
msg = '#{}: want {}, got {}'.format(i, want_flags, got)
self.assertEqual(want_flags, got, msg=msg)
def testImportFromFutureParseError(self):
testcases = [
# NOTE: move this group to testImportFromFuture as they are implemented
# by grumpy
('from __future__ import absolute_import',
r'future feature \w+ not yet implemented'),
('from __future__ import division',
r'future feature \w+ not yet implemented'),
('from __future__ import unicode_literals',
r'future feature \w+ not yet implemented'),
('from __future__ import braces', 'not a chance'),
('from __future__ import nonexistant_feature',
r'future feature \w+ is not defined'),
]
for tc in testcases:
source, want_regexp = tc
mod = pythonparser.parse(source)
node = mod.body[0]
self.assertRaisesRegexp(util.ParseError, want_regexp,
stmt.import_from_future, node)
def testImportWildcardMemberRaises(self):
regexp = r'wildcard member import is not implemented: from foo import *'
self.assertRaisesRegexp(util.ParseError, regexp, _ParseAndVisit,
'from foo import *')
regexp = (r'wildcard member import is not '
r'implemented: from __go__.foo import *')
self.assertRaisesRegexp(util.ParseError, regexp, _ParseAndVisit,
'from __go__.foo import *')
def testVisitFuture(self):
testcases = [
('from __future__ import print_function',
stmt.FUTURE_PRINT_FUNCTION, 1),
("""\
"module docstring"
from __future__ import print_function
""", stmt.FUTURE_PRINT_FUNCTION, 3),
("""\
"module docstring"
from __future__ import print_function, with_statement
from __future__ import nested_scopes
""", stmt.FUTURE_PRINT_FUNCTION, 4),
]
for tc in testcases:
source, flags, lineno = tc
mod = pythonparser.parse(textwrap.dedent(source))
future_features = stmt.visit_future(mod)
self.assertEqual(future_features.parser_flags, flags)
self.assertEqual(future_features.future_lineno, lineno)
def testVisitFutureParseError(self):
testcases = [
# future after normal imports
"""\
import os
from __future__ import print_function
""",
# future after non-docstring expression
"""
asd = 123
from __future__ import print_function
"""
]
for source in testcases:
mod = pythonparser.parse(textwrap.dedent(source))
self.assertRaisesRegexp(util.ParseError, stmt.late_future,
stmt.visit_future, mod)
def testFutureFeaturePrintFunction(self):
want = "abc\n123\nabc 123\nabcx123\nabc 123 "
self.assertEqual((0, want), _GrumpRun(textwrap.dedent("""\
"module docstring is ok to proceed __future__"
from __future__ import print_function
print('abc')
print(123)
print('abc', 123)
print('abc', 123, sep='x')
print('abc', 123, end=' ')""")))
def testRaiseExitStatus(self):
self.assertEqual(1, _GrumpRun('raise Exception')[0])
def testRaiseInstance(self):
self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\
try:
raise RuntimeError('foo')
print 'bad'
except RuntimeError as e:
print e""")))
def testRaiseTypeAndArg(self):
self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\
try:
raise KeyError('foo')
print 'bad'
except KeyError as e:
print e""")))
def testRaiseAgain(self):
self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\
try:
try:
raise AssertionError('foo')
except AssertionError:
raise
except Exception as e:
print e""")))
def testRaiseTraceback(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
import sys
try:
try:
raise Exception
except:
e, _, tb = sys.exc_info()
raise e, None, tb
except:
e2, _, tb2 = sys.exc_info()
assert e is e2
assert tb is tb2""")))
def testReturn(self):
self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\
def foo():
return 'bar'
print foo()""")))
def testTryBareExcept(self):
self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\
try:
raise AssertionError
except:
pass""")))
def testTryElse(self):
self.assertEqual((0, 'foo baz\n'), _GrumpRun(textwrap.dedent("""\
try:
print 'foo',
except:
print 'bar'
else:
print 'baz'""")))
def testTryMultipleExcept(self):
self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\
try:
raise AssertionError
except RuntimeError:
print 'foo'
except AssertionError:
print 'bar'
except:
print 'baz'""")))
def testTryFinally(self):
result = _GrumpRun(textwrap.dedent("""\
try:
print 'foo',
finally:
print 'bar'
try:
print 'foo',
raise Exception
finally:
print 'bar'"""))
self.assertEqual(1, result[0])
# Some platforms show "exit status 1" message so don't test strict equality.
self.assertIn('foo bar\nfoo bar\nException\n', result[1])
def testWhile(self):
self.assertEqual((0, '2\n1\n'), _GrumpRun(textwrap.dedent("""\
i = 2
while i:
print i
i -= 1""")))
def testWhileElse(self):
self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\
while False:
print 'foo'
else:
print 'bar'""")))
def testWith(self):
self.assertEqual((0, 'enter\n1\nexit\nenter\n2\nexit\n3\n'),
_GrumpRun(textwrap.dedent("""\
class ContextManager(object):
def __enter__(self):
print "enter"
def __exit__(self, exc_type, value, traceback):
print "exit"
a = ContextManager()
with a:
print 1
try:
with a:
print 2
raise RuntimeError
except RuntimeError:
print 3
""")))
def testWithAs(self):
self.assertEqual((0, '1 2 3\n'),
_GrumpRun(textwrap.dedent("""\
class ContextManager(object):
def __enter__(self):
return (1, (2, 3))
def __exit__(self, *args):
pass
with ContextManager() as [x, (y, z)]:
print x, y, z
""")))
def testWriteExceptDispatcherBareExcept(self):
visitor = stmt.StatementVisitor(_MakeModuleBlock())
handlers = [ast.ExceptHandler(type=ast.Name(id='foo')),
ast.ExceptHandler(type=None)]
self.assertEqual(visitor._write_except_dispatcher( # pylint: disable=protected-access
'exc', 'tb', handlers), [1, 2])
expected = re.compile(r'ResolveGlobal\(.*foo.*\bIsInstance\(.*'
r'goto Label1.*goto Label2', re.DOTALL)
self.assertRegexpMatches(visitor.writer.getvalue(), expected)
def testWriteExceptDispatcherBareExceptionNotLast(self):
visitor = stmt.StatementVisitor(_MakeModuleBlock())
handlers = [ast.ExceptHandler(type=None),
ast.ExceptHandler(type=ast.Name(id='foo'))]
self.assertRaisesRegexp(util.ParseError, r"default 'except:' must be last",
visitor._write_except_dispatcher, # pylint: disable=protected-access
'exc', 'tb', handlers)
def testWriteExceptDispatcherMultipleExcept(self):
visitor = stmt.StatementVisitor(_MakeModuleBlock())
handlers = [ast.ExceptHandler(type=ast.Name(id='foo')),
ast.ExceptHandler(type=ast.Name(id='bar'))]
self.assertEqual(visitor._write_except_dispatcher( # pylint: disable=protected-access
'exc', 'tb', handlers), [1, 2])
expected = re.compile(
r'ResolveGlobal\(.*foo.*\bif .*\bIsInstance\(.*\{.*goto Label1.*'
r'ResolveGlobal\(.*bar.*\bif .*\bIsInstance\(.*\{.*goto Label2.*'
r'\bRaise\(exc\.ToObject\(\), nil, tb\.ToObject\(\)\)', re.DOTALL)
self.assertRegexpMatches(visitor.writer.getvalue(), expected)
def _MakeModuleBlock():
return block.ModuleBlock('__main__', 'grumpy', 'grumpy/lib', '<test>', '',
stmt.FutureFeatures())
def _ParseAndVisit(source):
mod = pythonparser.parse(source)
future_features = stmt.visit_future(mod)
b = block.ModuleBlock('__main__', 'grumpy', 'grumpy/lib', '<test>',
source, future_features)
visitor = stmt.StatementVisitor(b)
visitor.visit(mod)
return visitor
def _GrumpRun(cmd):
p = subprocess.Popen(['grumprun'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = p.communicate(cmd)
return p.returncode, out
if __name__ == '__main__':
shard_test.main()
| apache-2.0 | -3,171,470,073,996,481,000 | 29.939297 | 97 | 0.572955 | false |
rr-/szurubooru | server/szurubooru/tests/api/test_comment_deleting.py | 1 | 2270 | import pytest
from szurubooru import api, db, errors, model
from szurubooru.func import comments
@pytest.fixture(autouse=True)
def inject_config(config_injector):
config_injector(
{
"privileges": {
"comments:delete:own": model.User.RANK_REGULAR,
"comments:delete:any": model.User.RANK_MODERATOR,
},
}
)
def test_deleting_own_comment(user_factory, comment_factory, context_factory):
user = user_factory()
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
result = api.comment_api.delete_comment(
context_factory(params={"version": 1}, user=user),
{"comment_id": comment.comment_id},
)
assert result == {}
assert db.session.query(model.Comment).count() == 0
def test_deleting_someones_else_comment(
user_factory, comment_factory, context_factory
):
user1 = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_MODERATOR)
comment = comment_factory(user=user1)
db.session.add(comment)
db.session.commit()
api.comment_api.delete_comment(
context_factory(params={"version": 1}, user=user2),
{"comment_id": comment.comment_id},
)
assert db.session.query(model.Comment).count() == 0
def test_trying_to_delete_someones_else_comment_without_privileges(
user_factory, comment_factory, context_factory
):
user1 = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user1)
db.session.add(comment)
db.session.commit()
with pytest.raises(errors.AuthError):
api.comment_api.delete_comment(
context_factory(params={"version": 1}, user=user2),
{"comment_id": comment.comment_id},
)
assert db.session.query(model.Comment).count() == 1
def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(comments.CommentNotFoundError):
api.comment_api.delete_comment(
context_factory(
params={"version": 1},
user=user_factory(rank=model.User.RANK_REGULAR),
),
{"comment_id": 1},
)
| gpl-3.0 | 2,521,386,937,692,045,300 | 30.971831 | 78 | 0.644934 | false |
khalibartan/Antidote-DM | Antidotes DM/youtube_dl/extractor/srgssr.py | 7 | 6606 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
parse_iso8601,
qualities,
)
class SRGSSRIE(InfoExtractor):
_VALID_URL = r'(?:https?://tp\.srgssr\.ch/p(?:/[^/]+)+\?urn=urn|srgssr):(?P<bu>srf|rts|rsi|rtr|swi):(?:[^:]+:)?(?P<type>video|audio):(?P<id>[0-9a-f\-]{36}|\d+)'
_ERRORS = {
'AGERATING12': 'To protect children under the age of 12, this video is only available between 8 p.m. and 6 a.m.',
'AGERATING18': 'To protect children under the age of 18, this video is only available between 11 p.m. and 5 a.m.',
# 'ENDDATE': 'For legal reasons, this video was only available for a specified period of time.',
'GEOBLOCK': 'For legal reasons, this video is only available in Switzerland.',
'LEGAL': 'The video cannot be transmitted for legal reasons.',
'STARTDATE': 'This video is not yet available. Please try again later.',
}
def get_media_data(self, bu, media_type, media_id):
media_data = self._download_json(
'http://il.srgssr.ch/integrationlayer/1.0/ue/%s/%s/play/%s.json' % (bu, media_type, media_id),
media_id)[media_type.capitalize()]
if media_data.get('block') and media_data['block'] in self._ERRORS:
raise ExtractorError('%s said: %s' % (
self.IE_NAME, self._ERRORS[media_data['block']]), expected=True)
return media_data
def _real_extract(self, url):
bu, media_type, media_id = re.match(self._VALID_URL, url).groups()
if bu == 'rts':
return self.url_result('rts:%s' % media_id, 'RTS')
media_data = self.get_media_data(bu, media_type, media_id)
metadata = media_data['AssetMetadatas']['AssetMetadata'][0]
title = metadata['title']
description = metadata.get('description')
created_date = media_data.get('createdDate') or metadata.get('createdDate')
timestamp = parse_iso8601(created_date)
thumbnails = [{
'id': image.get('id'),
'url': image['url'],
} for image in media_data.get('Image', {}).get('ImageRepresentations', {}).get('ImageRepresentation', [])]
preference = qualities(['LQ', 'MQ', 'SD', 'HQ', 'HD'])
formats = []
for source in media_data.get('Playlists', {}).get('Playlist', []) + media_data.get('Downloads', {}).get('Download', []):
protocol = source.get('@protocol')
for asset in source['url']:
asset_url = asset['text']
quality = asset['@quality']
format_id = '%s-%s' % (protocol, quality)
if protocol == 'HTTP-HDS':
formats.extend(self._extract_f4m_formats(
asset_url + '?hdcore=3.4.0', media_id,
f4m_id=format_id, fatal=False))
elif protocol == 'HTTP-HLS':
formats.extend(self._extract_m3u8_formats(
asset_url, media_id, 'mp4', 'm3u8_native',
m3u8_id=format_id, fatal=False))
else:
ext = None
if protocol == 'RTMP':
ext = self._search_regex(r'([a-z0-9]+):[^/]+', asset_url, 'ext')
formats.append({
'format_id': format_id,
'url': asset_url,
'preference': preference(quality),
'ext': ext,
})
self._sort_formats(formats)
return {
'id': media_id,
'title': title,
'description': description,
'timestamp': timestamp,
'thumbnails': thumbnails,
'formats': formats,
}
class SRGSSRPlayIE(InfoExtractor):
IE_DESC = 'srf.ch, rts.ch, rsi.ch, rtr.ch and swissinfo.ch play sites'
_VALID_URL = r'https?://(?:(?:www|play)\.)?(?P<bu>srf|rts|rsi|rtr|swissinfo)\.ch/play/(?:tv|radio)/[^/]+/(?P<type>video|audio)/[^?]+\?id=(?P<id>[0-9a-f\-]{36}|\d+)'
_TESTS = [{
'url': 'http://www.srf.ch/play/tv/10vor10/video/snowden-beantragt-asyl-in-russland?id=28e1a57d-5b76-4399-8ab3-9097f071e6c5',
'md5': '4cd93523723beff51bb4bee974ee238d',
'info_dict': {
'id': '28e1a57d-5b76-4399-8ab3-9097f071e6c5',
'ext': 'm4v',
'upload_date': '20130701',
'title': 'Snowden beantragt Asyl in Russland',
'timestamp': 1372713995,
}
}, {
# No Speichern (Save) button
'url': 'http://www.srf.ch/play/tv/top-gear/video/jaguar-xk120-shadow-und-tornado-dampflokomotive?id=677f5829-e473-4823-ac83-a1087fe97faa',
'md5': '0a274ce38fda48c53c01890651985bc6',
'info_dict': {
'id': '677f5829-e473-4823-ac83-a1087fe97faa',
'ext': 'flv',
'upload_date': '20130710',
'title': 'Jaguar XK120, Shadow und Tornado-Dampflokomotive',
'description': 'md5:88604432b60d5a38787f152dec89cd56',
'timestamp': 1373493600,
},
}, {
'url': 'http://www.rtr.ch/play/radio/actualitad/audio/saira-tujetsch-tuttina-cuntinuar-cun-sedrun-muster-turissem?id=63cb0778-27f8-49af-9284-8c7a8c6d15fc',
'info_dict': {
'id': '63cb0778-27f8-49af-9284-8c7a8c6d15fc',
'ext': 'mp3',
'upload_date': '20151013',
'title': 'Saira: Tujetsch - tuttina cuntinuar cun Sedrun Mustér Turissem',
'timestamp': 1444750398,
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
'url': 'http://www.rts.ch/play/tv/-/video/le-19h30?id=6348260',
'md5': '67a2a9ae4e8e62a68d0e9820cc9782df',
'info_dict': {
'id': '6348260',
'display_id': '6348260',
'ext': 'mp4',
'duration': 1796,
'title': 'Le 19h30',
'description': '',
'uploader': '19h30',
'upload_date': '20141201',
'timestamp': 1417458600,
'thumbnail': 're:^https?://.*\.image',
'view_count': int,
},
'params': {
# m3u8 download
'skip_download': True,
}
}]
def _real_extract(self, url):
bu, media_type, media_id = re.match(self._VALID_URL, url).groups()
# other info can be extracted from url + '&layout=json'
return self.url_result('srgssr:%s:%s:%s' % (bu[:3], media_type, media_id), 'SRGSSR')
| gpl-2.0 | 6,193,893,812,252,663,000 | 40.803797 | 168 | 0.534746 | false |
alirizakeles/zato | code/zato-common/src/zato/common/ipc/__init__.py | 1 | 3629 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2016, Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from datetime import datetime
from tempfile import gettempdir
# ZeroMQ
import zmq.green as zmq
# Zato
from zato.common import DATA_FORMAT, NO_DEFAULT_VALUE
from zato.common.util import get_logger_for_class, make_repr, new_cid, spawn_greenlet
# ################################################################################################################################
class Request(object):
def __init__(self, publisher_tag, publisher_pid, payload='', request_id=None):
self.publisher_tag = publisher_tag
self.publisher_pid = publisher_pid
self.action = NO_DEFAULT_VALUE
self.service = ''
self._payload = payload
self.payload_type = type(payload).__name__
self.data_format = DATA_FORMAT.DICT
self.request_id = request_id or 'ipc.{}'.format(new_cid())
self.target_pid = None
self.reply_to_tag = ''
self.reply_to_fifo = ''
self.in_reply_to = ''
self.creation_time_utc = datetime.utcnow()
@property
def payload(self):
return self._payload
@payload.setter
def payload(self, value):
self._payload = value
self.payload_type = type(self._payload)
def __repr__(self):
return make_repr(self)
# ################################################################################################################################
class IPCBase(object):
""" Base class for core IPC objects.
"""
def __init__(self, name, pid):
self.name = name
self.pid = pid
self.ctx = zmq.Context()
spawn_greenlet(self.set_up_sockets)
self.keep_running = True
self.logger = get_logger_for_class(self.__class__)
self.log_connected()
def __repr__(self):
return make_repr(self)
def set_up_sockets(self):
raise NotImplementedError('Needs to be implemented in subclasses')
def log_connected(self):
raise NotImplementedError('Needs to be implemented in subclasses')
def close(self):
raise NotImplementedError('Needs to be implemented in subclasses')
# ################################################################################################################################
class IPCEndpoint(IPCBase):
""" A participant in IPC conversations, i.e. either publisher or subscriber.
"""
socket_method = None
socket_type = None
def __init__(self, name, pid):
self.address = self.get_address(name)
super(IPCEndpoint, self).__init__(name, pid)
def get_address(self, address):
ipc_socket_type = 'pub' if self.socket_type == 'sub' else 'sub'
return 'ipc://{}'.format(os.path.join(gettempdir(), 'zato-ipc-{}-{}'.format(address, ipc_socket_type)))
def set_up_sockets(self):
self.socket = self.ctx.socket(getattr(zmq, self.socket_type.upper()))
self.socket.setsockopt(zmq.LINGER, 0)
getattr(self.socket, self.socket_method)(self.address)
def log_connected(self):
self.logger.info('Established %s/%s to %s (pid: %s)', self.socket_type, self.socket_method, self.address, self.pid)
def close(self):
self.keep_running = False
self.socket.close()
self.ctx.term()
# ################################################################################################################################
| gpl-3.0 | -7,792,049,101,765,687,000 | 32.601852 | 130 | 0.542574 | false |
diegocortassa/TACTIC | src/context/client/tactic-api-python-4.0.api04/Lib/xmllib.py | 25 | 35795 | """A parser for XML, using the derived class as static DTD."""
# Author: Sjoerd Mullender.
import re
import string
import warnings
warnings.warn("The xmllib module is obsolete. Use xml.sax instead.",
DeprecationWarning, 2)
del warnings
version = '0.3'
class Error(RuntimeError):
pass
# Regular expressions used for parsing
_S = '[ \t\r\n]+' # white space
_opS = '[ \t\r\n]*' # optional white space
_Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*' # valid XML name
_QStr = "(?:'[^']*'|\"[^\"]*\")" # quoted XML string
illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
interesting = re.compile('[]&<]')
amp = re.compile('&')
ref = re.compile('&(' + _Name + '|#[0-9]+|#x[0-9a-fA-F]+)[^-a-zA-Z0-9._:]')
entityref = re.compile('&(?P<name>' + _Name + ')[^-a-zA-Z0-9._:]')
charref = re.compile('&#(?P<char>[0-9]+[^0-9]|x[0-9a-fA-F]+[^0-9a-fA-F])')
space = re.compile(_S + '$')
newline = re.compile('\n')
attrfind = re.compile(
_S + '(?P<name>' + _Name + ')'
'(' + _opS + '=' + _opS +
'(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
starttagopen = re.compile('<' + _Name)
starttagend = re.compile(_opS + '(?P<slash>/?)>')
starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
'(?P<attrs>(?:'+attrfind.pattern+')*)'+
starttagend.pattern)
endtagopen = re.compile('</')
endbracket = re.compile(_opS + '>')
endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
tagfind = re.compile(_Name)
cdataopen = re.compile(r'<!\[CDATA\[')
cdataclose = re.compile(r'\]\]>')
# this matches one of the following:
# SYSTEM SystemLiteral
# PUBLIC PubidLiteral SystemLiteral
_SystemLiteral = '(?P<%s>'+_QStr+')'
_PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
"'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
_ExternalId = '(?:SYSTEM|' \
'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
')'+_S+_SystemLiteral%'syslit'
doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
'(?:'+_S+_ExternalId+')?'+_opS)
xmldecl = re.compile('<\?xml'+_S+
'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
'(?:'+_S+'encoding'+_opS+'='+_opS+
"(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
'"[A-Za-z][-A-Za-z0-9._]*"))?'
'(?:'+_S+'standalone'+_opS+'='+_opS+
'(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
_opS+'\?>')
procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
procclose = re.compile(_opS + r'\?>')
commentopen = re.compile('<!--')
commentclose = re.compile('-->')
doubledash = re.compile('--')
attrtrans = string.maketrans(' \r\n\t', ' ')
# definitions for XML namespaces
_NCName = '[a-zA-Z_][-a-zA-Z0-9._]*' # XML Name, minus the ":"
ncname = re.compile(_NCName + '$')
qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
'(?P<local>' + _NCName + ')$')
xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
# XML parser base class -- find tags and call handler functions.
# Usage: p = XMLParser(); p.feed(data); ...; p.close().
# The dtd is defined by deriving a class which defines methods with
# special names to handle tags: start_foo and end_foo to handle <foo>
# and </foo>, respectively. The data between tags is passed to the
# parser by calling self.handle_data() with some data as argument (the
# data may be split up in arbitrary chunks).
class XMLParser:
attributes = {} # default, to be overridden
elements = {} # default, to be overridden
# parsing options, settable using keyword args in __init__
__accept_unquoted_attributes = 0
__accept_missing_endtag_name = 0
__map_case = 0
__accept_utf8 = 0
__translate_attribute_references = 1
# Interface -- initialize and reset this instance
def __init__(self, **kw):
self.__fixed = 0
if 'accept_unquoted_attributes' in kw:
self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
if 'accept_missing_endtag_name' in kw:
self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
if 'map_case' in kw:
self.__map_case = kw['map_case']
if 'accept_utf8' in kw:
self.__accept_utf8 = kw['accept_utf8']
if 'translate_attribute_references' in kw:
self.__translate_attribute_references = kw['translate_attribute_references']
self.reset()
def __fixelements(self):
self.__fixed = 1
self.elements = {}
self.__fixdict(self.__dict__)
self.__fixclass(self.__class__)
def __fixclass(self, kl):
self.__fixdict(kl.__dict__)
for k in kl.__bases__:
self.__fixclass(k)
def __fixdict(self, dict):
for key in dict.keys():
if key[:6] == 'start_':
tag = key[6:]
start, end = self.elements.get(tag, (None, None))
if start is None:
self.elements[tag] = getattr(self, key), end
elif key[:4] == 'end_':
tag = key[4:]
start, end = self.elements.get(tag, (None, None))
if end is None:
self.elements[tag] = start, getattr(self, key)
# Interface -- reset this instance. Loses all unprocessed data
def reset(self):
self.rawdata = ''
self.stack = []
self.nomoretags = 0
self.literal = 0
self.lineno = 1
self.__at_start = 1
self.__seen_doctype = None
self.__seen_starttag = 0
self.__use_namespaces = 0
self.__namespaces = {'xml':None} # xml is implicitly declared
# backward compatibility hack: if elements not overridden,
# fill it in ourselves
if self.elements is XMLParser.elements:
self.__fixelements()
# For derived classes only -- enter literal mode (CDATA) till EOF
def setnomoretags(self):
self.nomoretags = self.literal = 1
# For derived classes only -- enter literal mode (CDATA)
def setliteral(self, *args):
self.literal = 1
# Interface -- feed some data to the parser. Call this as
# often as you want, with as little or as much text as you
# want (may include '\n'). (This just saves the text, all the
# processing is done by goahead().)
def feed(self, data):
self.rawdata = self.rawdata + data
self.goahead(0)
# Interface -- handle the remaining data
def close(self):
self.goahead(1)
if self.__fixed:
self.__fixed = 0
# remove self.elements so that we don't leak
del self.elements
# Interface -- translate references
def translate_references(self, data, all = 1):
if not self.__translate_attribute_references:
return data
i = 0
while 1:
res = amp.search(data, i)
if res is None:
return data
s = res.start(0)
res = ref.match(data, s)
if res is None:
self.syntax_error("bogus `&'")
i = s+1
continue
i = res.end(0)
str = res.group(1)
rescan = 0
if str[0] == '#':
if str[1] == 'x':
str = chr(int(str[2:], 16))
else:
str = chr(int(str[1:]))
if data[i - 1] != ';':
self.syntax_error("`;' missing after char reference")
i = i-1
elif all:
if str in self.entitydefs:
str = self.entitydefs[str]
rescan = 1
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
else:
self.syntax_error("reference to unknown entity `&%s;'" % str)
str = '&' + str + ';'
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
# when we get here, str contains the translated text and i points
# to the end of the string that is to be replaced
data = data[:s] + str + data[i:]
if rescan:
i = s
else:
i = s + len(str)
# Interface - return a dictionary of all namespaces currently valid
def getnamespace(self):
nsdict = {}
for t, d, nst in self.stack:
nsdict.update(d)
return nsdict
# Internal -- handle data as far as reasonable. May leave state
# and data to be processed by a subsequent call. If 'end' is
# true, force handling all data as if followed by EOF marker.
def goahead(self, end):
rawdata = self.rawdata
i = 0
n = len(rawdata)
while i < n:
if i > 0:
self.__at_start = 0
if self.nomoretags:
data = rawdata[i:n]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = n
break
res = interesting.search(rawdata, i)
if res:
j = res.start(0)
else:
j = n
if i < j:
data = rawdata[i:j]
if self.__at_start and space.match(data) is None:
self.syntax_error('illegal data at start of file')
self.__at_start = 0
if not self.stack and space.match(data) is None:
self.syntax_error('data not in content')
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = j
if i == n: break
if rawdata[i] == '<':
if starttagopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_starttag(i)
if k < 0: break
self.__seen_starttag = 1
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if endtagopen.match(rawdata, i):
k = self.parse_endtag(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if commentopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_comment(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if cdataopen.match(rawdata, i):
k = self.parse_cdata(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = xmldecl.match(rawdata, i)
if res:
if not self.__at_start:
self.syntax_error("<?xml?> declaration not at start of document")
version, encoding, standalone = res.group('version',
'encoding',
'standalone')
if version[1:-1] != '1.0':
raise Error('only XML version 1.0 supported')
if encoding: encoding = encoding[1:-1]
if standalone: standalone = standalone[1:-1]
self.handle_xml(encoding, standalone)
i = res.end(0)
continue
res = procopen.match(rawdata, i)
if res:
k = self.parse_proc(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = doctype.match(rawdata, i)
if res:
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
if self.__seen_doctype:
self.syntax_error('multiple DOCTYPE elements')
if self.__seen_starttag:
self.syntax_error('DOCTYPE not at beginning of document')
k = self.parse_doctype(res)
if k < 0: break
self.__seen_doctype = res.group('name')
if self.__map_case:
self.__seen_doctype = self.__seen_doctype.lower()
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
elif rawdata[i] == '&':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
res = charref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in charref")
i = i-1
if not self.stack:
self.syntax_error('data not in content')
self.handle_charref(res.group('char')[:-1])
self.lineno = self.lineno + res.group(0).count('\n')
continue
res = entityref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in entityref")
i = i-1
name = res.group('name')
if self.__map_case:
name = name.lower()
if name in self.entitydefs:
self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
n = len(rawdata)
i = res.start(0)
else:
self.unknown_entityref(name)
self.lineno = self.lineno + res.group(0).count('\n')
continue
elif rawdata[i] == ']':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
if n-i < 3:
break
if cdataclose.match(rawdata, i):
self.syntax_error("bogus `]]>'")
self.handle_data(rawdata[i])
i = i+1
continue
else:
raise Error('neither < nor & ??')
# We get here only if incomplete matches but
# nothing else
break
# end while
if i > 0:
self.__at_start = 0
if end and i < n:
data = rawdata[i]
self.syntax_error("bogus `%s'" % data)
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
self.rawdata = rawdata[i+1:]
return self.goahead(end)
self.rawdata = rawdata[i:]
if end:
if not self.__seen_starttag:
self.syntax_error('no elements in file')
if self.stack:
self.syntax_error('missing end tags')
while self.stack:
self.finish_endtag(self.stack[-1][0])
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
raise Error('unexpected call to handle_comment')
res = commentclose.search(rawdata, i+4)
if res is None:
return -1
if doubledash.search(rawdata, i+4, res.start(0)):
self.syntax_error("`--' inside comment")
if rawdata[res.start(0)-1] == '-':
self.syntax_error('comment cannot end in three dashes')
if not self.__accept_utf8 and \
illegal.search(rawdata, i+4, res.start(0)):
self.syntax_error('illegal character in comment')
self.handle_comment(rawdata[i+4: res.start(0)])
return res.end(0)
# Internal -- handle DOCTYPE tag, return length or -1 if not terminated
def parse_doctype(self, res):
rawdata = self.rawdata
n = len(rawdata)
name = res.group('name')
if self.__map_case:
name = name.lower()
pubid, syslit = res.group('pubid', 'syslit')
if pubid is not None:
pubid = pubid[1:-1] # remove quotes
pubid = ' '.join(pubid.split()) # normalize
if syslit is not None: syslit = syslit[1:-1] # remove quotes
j = k = res.end(0)
if k >= n:
return -1
if rawdata[k] == '[':
level = 0
k = k+1
dq = sq = 0
while k < n:
c = rawdata[k]
if not sq and c == '"':
dq = not dq
elif not dq and c == "'":
sq = not sq
elif sq or dq:
pass
elif level <= 0 and c == ']':
res = endbracket.match(rawdata, k+1)
if res is None:
return -1
self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
return res.end(0)
elif c == '<':
level = level + 1
elif c == '>':
level = level - 1
if level < 0:
self.syntax_error("bogus `>' in DOCTYPE")
k = k+1
res = endbracketfind.match(rawdata, k)
if res is None:
return -1
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in DOCTYPE')
self.handle_doctype(name, pubid, syslit, None)
return res.end(0)
# Internal -- handle CDATA tag, return length or -1 if not terminated
def parse_cdata(self, i):
rawdata = self.rawdata
if rawdata[i:i+9] != '<![CDATA[':
raise Error('unexpected call to parse_cdata')
res = cdataclose.search(rawdata, i+9)
if res is None:
return -1
if not self.__accept_utf8 and \
illegal.search(rawdata, i+9, res.start(0)):
self.syntax_error('illegal character in CDATA')
if not self.stack:
self.syntax_error('CDATA not in content')
self.handle_cdata(rawdata[i+9:res.start(0)])
return res.end(0)
__xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
# Internal -- handle a processing instruction tag
def parse_proc(self, i):
rawdata = self.rawdata
end = procclose.search(rawdata, i)
if end is None:
return -1
j = end.start(0)
if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
self.syntax_error('illegal character in processing instruction')
res = tagfind.match(rawdata, i+2)
if res is None:
raise Error('unexpected call to parse_proc')
k = res.end(0)
name = res.group(0)
if self.__map_case:
name = name.lower()
if name == 'xml:namespace':
self.syntax_error('old-fashioned namespace declaration')
self.__use_namespaces = -1
# namespace declaration
# this must come after the <?xml?> declaration (if any)
# and before the <!DOCTYPE> (if any).
if self.__seen_doctype or self.__seen_starttag:
self.syntax_error('xml:namespace declaration too late in document')
attrdict, namespace, k = self.parse_attributes(name, k, j)
if namespace:
self.syntax_error('namespace declaration inside namespace declaration')
for attrname in attrdict.keys():
if not attrname in self.__xml_namespace_attributes:
self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
if not 'ns' in attrdict or not 'prefix' in attrdict:
self.syntax_error('xml:namespace without required attributes')
prefix = attrdict.get('prefix')
if ncname.match(prefix) is None:
self.syntax_error('xml:namespace illegal prefix value')
return end.end(0)
if prefix in self.__namespaces:
self.syntax_error('xml:namespace prefix not unique')
self.__namespaces[prefix] = attrdict['ns']
else:
if name.lower() == 'xml':
self.syntax_error('illegal processing instruction target name')
self.handle_proc(name, rawdata[k:j])
return end.end(0)
# Internal -- parse attributes between i and j
def parse_attributes(self, tag, i, j):
rawdata = self.rawdata
attrdict = {}
namespace = {}
while i < j:
res = attrfind.match(rawdata, i)
if res is None:
break
attrname, attrvalue = res.group('name', 'value')
if self.__map_case:
attrname = attrname.lower()
i = res.end(0)
if attrvalue is None:
self.syntax_error("no value specified for attribute `%s'" % attrname)
attrvalue = attrname
elif attrvalue[:1] == "'" == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
elif not self.__accept_unquoted_attributes:
self.syntax_error("attribute `%s' value not quoted" % attrname)
res = xmlns.match(attrname)
if res is not None:
# namespace declaration
ncname = res.group('ncname')
namespace[ncname or ''] = attrvalue or None
if not self.__use_namespaces:
self.__use_namespaces = len(self.stack)+1
continue
if '<' in attrvalue:
self.syntax_error("`<' illegal in attribute value")
if attrname in attrdict:
self.syntax_error("attribute `%s' specified twice" % attrname)
attrvalue = attrvalue.translate(attrtrans)
attrdict[attrname] = self.translate_references(attrvalue)
return attrdict, namespace, i
# Internal -- handle starttag, return length or -1 if not terminated
def parse_starttag(self, i):
rawdata = self.rawdata
# i points to start of tag
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
tag = starttagmatch.match(rawdata, i)
if tag is None or tag.end(0) != end.end(0):
self.syntax_error('garbage in starttag')
return end.end(0)
nstag = tagname = tag.group('tagname')
if self.__map_case:
nstag = tagname = nstag.lower()
if not self.__seen_starttag and self.__seen_doctype and \
tagname != self.__seen_doctype:
self.syntax_error('starttag does not match DOCTYPE')
if self.__seen_starttag and not self.stack:
self.syntax_error('multiple elements on top level')
k, j = tag.span('attrs')
attrdict, nsdict, k = self.parse_attributes(tagname, k, j)
self.stack.append((tagname, nsdict, nstag))
if self.__use_namespaces:
res = qname.match(tagname)
else:
res = None
if res is not None:
prefix, nstag = res.group('prefix', 'local')
if prefix is None:
prefix = ''
ns = None
for t, d, nst in self.stack:
if prefix in d:
ns = d[prefix]
if ns is None and prefix != '':
ns = self.__namespaces.get(prefix)
if ns is not None:
nstag = ns + ' ' + nstag
elif prefix != '':
nstag = prefix + ':' + nstag # undo split
self.stack[-1] = tagname, nsdict, nstag
# translate namespace of attributes
attrnamemap = {} # map from new name to old name (used for error reporting)
for key in attrdict.keys():
attrnamemap[key] = key
if self.__use_namespaces:
nattrdict = {}
for key, val in attrdict.items():
okey = key
res = qname.match(key)
if res is not None:
aprefix, key = res.group('prefix', 'local')
if self.__map_case:
key = key.lower()
if aprefix is not None:
ans = None
for t, d, nst in self.stack:
if aprefix in d:
ans = d[aprefix]
if ans is None:
ans = self.__namespaces.get(aprefix)
if ans is not None:
key = ans + ' ' + key
else:
key = aprefix + ':' + key
nattrdict[key] = val
attrnamemap[key] = okey
attrdict = nattrdict
attributes = self.attributes.get(nstag)
if attributes is not None:
for key in attrdict.keys():
if not key in attributes:
self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
for key, val in attributes.items():
if val is not None and not key in attrdict:
attrdict[key] = val
method = self.elements.get(nstag, (None, None))[0]
self.finish_starttag(nstag, attrdict, method)
if tag.group('slash') == '/':
self.finish_endtag(tagname)
return tag.end(0)
# Internal -- parse endtag
def parse_endtag(self, i):
rawdata = self.rawdata
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
res = tagfind.match(rawdata, i+2)
if res is None:
if self.literal:
self.handle_data(rawdata[i])
return i+1
if not self.__accept_missing_endtag_name:
self.syntax_error('no name specified in end tag')
tag = self.stack[-1][0]
k = i+2
else:
tag = res.group(0)
if self.__map_case:
tag = tag.lower()
if self.literal:
if not self.stack or tag != self.stack[-1][0]:
self.handle_data(rawdata[i])
return i+1
k = res.end(0)
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in end tag')
self.finish_endtag(tag)
return end.end(0)
# Internal -- finish processing of start tag
def finish_starttag(self, tagname, attrdict, method):
if method is not None:
self.handle_starttag(tagname, method, attrdict)
else:
self.unknown_starttag(tagname, attrdict)
# Internal -- finish processing of end tag
def finish_endtag(self, tag):
self.literal = 0
if not tag:
self.syntax_error('name-less end tag')
found = len(self.stack) - 1
if found < 0:
self.unknown_endtag(tag)
return
else:
found = -1
for i in range(len(self.stack)):
if tag == self.stack[i][0]:
found = i
if found == -1:
self.syntax_error('unopened end tag')
return
while len(self.stack) > found:
if found < len(self.stack) - 1:
self.syntax_error('missing close tag for %s' % self.stack[-1][2])
nstag = self.stack[-1][2]
method = self.elements.get(nstag, (None, None))[1]
if method is not None:
self.handle_endtag(nstag, method)
else:
self.unknown_endtag(nstag)
if self.__use_namespaces == len(self.stack):
self.__use_namespaces = 0
del self.stack[-1]
# Overridable -- handle xml processing instruction
def handle_xml(self, encoding, standalone):
pass
# Overridable -- handle DOCTYPE
def handle_doctype(self, tag, pubid, syslit, data):
pass
# Overridable -- handle start tag
def handle_starttag(self, tag, method, attrs):
method(attrs)
# Overridable -- handle end tag
def handle_endtag(self, tag, method):
method()
# Example -- handle character reference, no need to override
def handle_charref(self, name):
try:
if name[0] == 'x':
n = int(name[1:], 16)
else:
n = int(name)
except ValueError:
self.unknown_charref(name)
return
if not 0 <= n <= 255:
self.unknown_charref(name)
return
self.handle_data(chr(n))
# Definition of entities -- derived classes may override
entitydefs = {'lt': '<', # must use charref
'gt': '>',
'amp': '&', # must use charref
'quot': '"',
'apos': ''',
}
# Example -- handle data, should be overridden
def handle_data(self, data):
pass
# Example -- handle cdata, could be overridden
def handle_cdata(self, data):
pass
# Example -- handle comment, could be overridden
def handle_comment(self, data):
pass
# Example -- handle processing instructions, could be overridden
def handle_proc(self, name, data):
pass
# Example -- handle relatively harmless syntax errors, could be overridden
def syntax_error(self, message):
raise Error('Syntax error at line %d: %s' % (self.lineno, message))
# To be overridden -- handlers for unknown objects
def unknown_starttag(self, tag, attrs): pass
def unknown_endtag(self, tag): pass
def unknown_charref(self, ref): pass
def unknown_entityref(self, name):
self.syntax_error("reference to unknown entity `&%s;'" % name)
class TestXMLParser(XMLParser):
def __init__(self, **kw):
self.testdata = ""
XMLParser.__init__(self, **kw)
def handle_xml(self, encoding, standalone):
self.flush()
print 'xml: encoding =',encoding,'standalone =',standalone
def handle_doctype(self, tag, pubid, syslit, data):
self.flush()
print 'DOCTYPE:',tag, repr(data)
def handle_data(self, data):
self.testdata = self.testdata + data
if len(repr(self.testdata)) >= 70:
self.flush()
def flush(self):
data = self.testdata
if data:
self.testdata = ""
print 'data:', repr(data)
def handle_cdata(self, data):
self.flush()
print 'cdata:', repr(data)
def handle_proc(self, name, data):
self.flush()
print 'processing:',name,repr(data)
def handle_comment(self, data):
self.flush()
r = repr(data)
if len(r) > 68:
r = r[:32] + '...' + r[-32:]
print 'comment:', r
def syntax_error(self, message):
print 'error at line %d:' % self.lineno, message
def unknown_starttag(self, tag, attrs):
self.flush()
if not attrs:
print 'start tag: <' + tag + '>'
else:
print 'start tag: <' + tag,
for name, value in attrs.items():
print name + '=' + '"' + value + '"',
print '>'
def unknown_endtag(self, tag):
self.flush()
print 'end tag: </' + tag + '>'
def unknown_entityref(self, ref):
self.flush()
print '*** unknown entity ref: &' + ref + ';'
def unknown_charref(self, ref):
self.flush()
print '*** unknown char ref: &#' + ref + ';'
def close(self):
XMLParser.close(self)
self.flush()
def test(args = None):
import sys, getopt
from time import time
if not args:
args = sys.argv[1:]
opts, args = getopt.getopt(args, 'st')
klass = TestXMLParser
do_time = 0
for o, a in opts:
if o == '-s':
klass = XMLParser
elif o == '-t':
do_time = 1
if args:
file = args[0]
else:
file = 'test.xml'
if file == '-':
f = sys.stdin
else:
try:
f = open(file, 'r')
except IOError, msg:
print file, ":", msg
sys.exit(1)
data = f.read()
if f is not sys.stdin:
f.close()
x = klass()
t0 = time()
try:
if do_time:
x.feed(data)
x.close()
else:
for c in data:
x.feed(c)
x.close()
except Error, msg:
t1 = time()
print msg
if do_time:
print 'total time: %g' % (t1-t0)
sys.exit(1)
t1 = time()
if do_time:
print 'total time: %g' % (t1-t0)
if __name__ == '__main__':
test()
| epl-1.0 | -8,053,630,126,297,608,000 | 36.489247 | 109 | 0.471323 | false |
proyectofedora-infra/kickstart-fedora-online | setup.py | 1 | 2320 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# setup.py
#
# Copyright 2014 Eduardo Echeverria <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import sys
import os
import subprocess
from setuptools import setup
PUBLISH_CMD = 'python setup.py register sdist upload'
TEST_PUBLISH_CMD = 'python setup.py register -r test sdist upload -r test'
if 'publish' in sys.argv:
status = subprocess.call(PUBLISH_CMD, shell=True)
sys.exit(status)
if 'publish_test' in sys.argv:
status = subprocess.call(TEST_PUBLISH_CMD, shell=True)
sys.exit()
def read(fname):
with open(fname) as fp:
content = fp.read()
return content
setup(
name='kickstart-fedora-online',
version='0.1.0',
description='Fedora iso creator online.'
long_description=read('README.md'),
author='Valentin Basel, Eduardo Echeverria',
author_email='[email protected] | [email protected]',
url='https://github.com/proyectofedora-infra/kickstart-fedora-online',
install_requires=['Flask', 'pykickstart'],
license=read('LICENSE'),
zip_safe=False,
packages=['kickstart'],
keywords='kickstart, fedora, iso',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
entry_points={
'console_scripts': [
'kickstart = kickstart:main'
]
}
)
| gpl-3.0 | -481,605,024,021,190,140 | 30.351351 | 85 | 0.678879 | false |
Atlas-Sailed-Co/oppia | extensions/rules/music_phrase.py | 20 | 4026 | # coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules for MusicPhrase objects."""
__author__ = 'Michael Wagner'
from extensions.rules import base
NOTE_MAP = {'C4': 60, 'D4': 62, 'E4': 64, 'F4': 65, 'G4': 67, 'A4': 69,
'B4': 71, 'C5': 72, 'D5': 74, 'E5': 76, 'F5': 77, 'G5': 79,
'A5': 81}
def _get_midi_note_value(note):
if isinstance(note, dict):
if note['readableNoteName'] in NOTE_MAP:
return NOTE_MAP[note['readableNoteName']]
else:
raise Exception('Invalid music note %s.' % note)
def _convert_sequence_to_midi(sequence):
return [_get_midi_note_value(note) for note in sequence]
class Equals(base.MusicPhraseRule):
description = 'is equal to {{x|MusicPhrase}}'
def _evaluate(self, subject):
return (_convert_sequence_to_midi(subject) ==
_convert_sequence_to_midi(self.x))
class IsLongerThan(base.MusicPhraseRule):
description = 'has more than {{k|NonnegativeInt}} notes'
def _evaluate(self, subject):
return len(_convert_sequence_to_midi(subject)) > self.k
class HasLengthInclusivelyBetween(base.MusicPhraseRule):
description = ('has between {{a|NonnegativeInt}} and '
'{{b|NonnegativeInt}} notes, inclusive')
def _evaluate(self, subject):
return (self.a <= len(_convert_sequence_to_midi(subject)) <= self.b)
class IsEqualToExceptFor(base.MusicPhraseRule):
description = ('is equal to {{x|MusicPhrase}} '
'except for {{k|NonnegativeInt}} notes')
def _evaluate(self, subject):
midi_target_sequence = _convert_sequence_to_midi(self.x)
midi_user_sequence = _convert_sequence_to_midi(subject)
num_correct_notes_needed = len(midi_target_sequence) - self.k
if len(midi_user_sequence) != len(midi_target_sequence):
return False
num_correct_notes = (
sum(1 for x in zip(
midi_target_sequence, midi_user_sequence) if x[0] == x[1])
)
return len(midi_target_sequence) - num_correct_notes <= self.k
class IsTranspositionOf(base.MusicPhraseRule):
description = ('is a transposition of {{x|MusicPhrase}} '
'by {{y|Int}} semitones')
def _evaluate(self, subject):
target_sequence_length = len(self.x)
if len(subject) != target_sequence_length:
return False
midi_target_sequence = _convert_sequence_to_midi(self.x)
midi_user_sequence = _convert_sequence_to_midi(subject)
for i in range(target_sequence_length):
if midi_user_sequence[i] - self.y != midi_target_sequence[i]:
return False
return True
class IsTranspositionOfExceptFor(base.MusicPhraseRule):
description = ('is a transposition of {{x|MusicPhrase}} '
'by {{y|Int}} semitones '
'except for {{k|NonnegativeInt}} notes')
def _evaluate(self, subject):
midi_target_sequence = _convert_sequence_to_midi(self.x)
midi_user_sequence = _convert_sequence_to_midi(subject)
target_sequence_length = len(midi_target_sequence)
if len(midi_user_sequence) != target_sequence_length:
return False
num_correct_notes = (
sum(1 for x in zip(
midi_target_sequence, midi_user_sequence) if x[0] == x[1] - self.y)
)
return len(midi_target_sequence) - num_correct_notes <= self.k
| apache-2.0 | 2,392,818,603,576,730,000 | 34.946429 | 83 | 0.63388 | false |
Lillevik/python_irc_bot | functions.py | 1 | 3919 | import requests, json, datetime, threading
from datetime import datetime
import sqlite3
def get_name(msg):
try:
if "PRIVMSG" in msg[0]:
return msg[0].split('!')[0].split(':')[1]
except IndexError:
return "Empty."
def get_message(msg):
try:
if "PRIVMSG" in msg[0]:
return msg[0].split(" :")[1]
except IndexError:
return "Empty."
def get_sender(msg, nick):
try:
chan = msg[0].split("PRIVMSG")[1].split(" :")[0].split()[0]
if "#" in chan:
return chan
else:
return nick
except IndexError:
return nick
def get_random_joke():
return json.loads(requests.get("http://api.icndb.com/jokes/random?limitTo=[nerdy]").text)['value']['joke']
def is_fine(msg):
fine = True
# Dirty but passable
illegal = "qazwsxedcrfvtgbyhnujmik,lo-øpåæ'¨\+0987654321!\"^\#¤%&/()=?`<>*`'"
for c in msg:
if c in illegal:
fine = False
if not fine and '.' not in msg:
fine = False
return fine
def is_valid_leet(msg):
""" Check if msg is a valid 'leetable' message. """
isspace = msg.isspace()
ait = is_fine(msg)
return isspace or ait
def react_leet(msg, a, n):
if msg:
now = datetime.now()
is_leet = (now.hour == 13) and (now.minute == 37)
if is_leet:
if is_valid_leet(msg):
print("{} is on leet. [{}:{}:{}]".format(n, now.hour, now.minute, now.second))
a.append(n)
def print_split_lines(text):
for line in text:
if not "PING" in line:
print(line)
def update_streak_graph(serverid):
conn = sqlite3.connect("leet.db")
score_data = conn.cursor().execute("""
SELECT User.nick, Score.user_id, Score.score, Score.streak, Score.server_id, Score.cash
FROM Score
JOIN User ON Score.user_id = User.id
WHERE server_id = ?;""", (serverid,)).fetchall()
now = datetime.now()
for score in score_data:
conn.execute("INSERT INTO Graph_data (day, streak, user_id, server_id) VALUES (?,?,?,?);",
(now.date(), score[3], score[1], serverid))
conn.execute("UPDATE Score SET cash = ? WHERE Score.server_id = ? AND Score.user_id = ?;", ((score[3] * 10), score[4], score[1]))
conn.commit()
conn.close()
def query_place_names(place_name):
conn = sqlite3.connect('places.db')
result = conn.execute("SELECT Stadnamn, engelskXml, Kommune FROM noreg where Stadnamn LIKE ? ORDER BY Prioritet ASC LIMIT 3;", ('%' + place_name + '%',))
rows = result.fetchall()
place_type = 'norge'
if rows:
return rows, place_type
else:
result = conn.execute("SELECT StadnamnBokmal, engelskXml, LandsnamnBokmål FROM verda where StadnamnBokmal LIKE ? LIMIT 3;", ('%' + place_name + '%',))
rows = result.fetchall()
place_type = 'verden'
return rows, place_type
def get_help(message):
commands = {
"!help" : "Lists available commands.",
"!roll" : "Responds with a number between 1-100.",
"!forecast" : "Responds with the forecast for the next hour in Bergen, Norway",
"!u [longurl]" : "Responds with a shortened url passed through the goo.gl api.",
"!urls" : "Returns the last 5 urls for the sender channel or nick.",
"!joke" : "Responds with a random joke from the chucknorris joke api, category nerdy.",
"hello" : "Responds with hello"
}
words = message.split(" ")
if len(words) > 1:
return commands[words[1].rstrip()]
return "Available commands are: " + ", ".join(commands.keys())
def run_bots(bots):
for bot in bots:
try:
threading.Thread(target=bot.run_bot).start()
threading.Thread(target=bot.check_time).start()
except:
print(bot.host)
print("Error: unable to start thread")
| gpl-3.0 | 1,756,707,241,105,340,000 | 30.055556 | 158 | 0.582673 | false |
reillysiemens/pianodb | pianodb/routes.py | 1 | 1802 | import falcon
import msgpack
from pianodb.pianodb import update_db
class ValidatorComponent:
def process_response(self, req, resp, resource):
# Verify authentication
if req.get_header('X-Auth-Token') != resource.token:
raise falcon.HTTPUnauthorized(
title='Authentication required',
description='Missing or invalid authentication token')
if not req.content_type == 'application/msgpack':
raise falcon.HTTPUnsupportedMediaType('Payload must be msgpack')
class SongFinish:
def __init__(self, token):
self.token = token
self.song_finish_fields = (
'artist',
'title',
'album',
'coverArt',
'stationName',
'songDuration',
'songPlayed',
'rating',
'detailUrl'
)
def on_post(self, req, resp):
# TODO: What happens if we can't read from the stream?
try:
songfinish = msgpack.unpackb(req.stream.read(), encoding='utf-8')
except msgpack.exceptions.UnpackValueError:
msg = 'Could not unpack msgpack data'
raise falcon.HTTPBadRequest('Bad request', msg)
# Attempt to validate the songfinish keys.
try:
if not all(k in songfinish.keys() for k in self.song_finish_fields):
msg = 'Missing required songfinish field'
raise falcon.HTTPBadRequest('Bad request', msg)
except AttributeError:
msg = 'Invalid datatype'
raise falcon.HTTPBadRequest('Bad request', msg)
update_db(songfinish)
resp.data = msgpack.packb({'created': True})
resp.content_type = 'application/msgpack'
resp.status = falcon.HTTP_201
| isc | 5,324,986,685,758,568,000 | 30.614035 | 80 | 0.588235 | false |
serverdensity/sd-agent-core-plugins | etcd/check.py | 1 | 9487 | # (C) Datadog, Inc. 2015-2017
# (C) Cory G Watson <[email protected]> 2014-2015
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# 3rd party
import requests
# project
from checks import AgentCheck
from config import _is_affirmative
from util import headers
class Etcd(AgentCheck):
DEFAULT_TIMEOUT = 5
SERVICE_CHECK_NAME = 'etcd.can_connect'
HEALTH_SERVICE_CHECK_NAME = 'etcd.healthy'
HEALTH_KEY = 'health'
STORE_RATES = {
'getsSuccess': 'etcd.store.gets.success',
'getsFail': 'etcd.store.gets.fail',
'setsSuccess': 'etcd.store.sets.success',
'setsFail': 'etcd.store.sets.fail',
'deleteSuccess': 'etcd.store.delete.success',
'deleteFail': 'etcd.store.delete.fail',
'updateSuccess': 'etcd.store.update.success',
'updateFail': 'etcd.store.update.fail',
'createSuccess': 'etcd.store.create.success',
'createFail': 'etcd.store.create.fail',
'compareAndSwapSuccess': 'etcd.store.compareandswap.success',
'compareAndSwapFail': 'etcd.store.compareandswap.fail',
'compareAndDeleteSuccess': 'etcd.store.compareanddelete.success',
'compareAndDeleteFail': 'etcd.store.compareanddelete.fail',
'expireCount': 'etcd.store.expire.count'
}
STORE_GAUGES = {
'watchers': 'etcd.store.watchers'
}
SELF_GAUGES = {
'sendPkgRate': 'etcd.self.send.pkgrate',
'sendBandwidthRate': 'etcd.self.send.bandwidthrate',
'recvPkgRate': 'etcd.self.recv.pkgrate',
'recvBandwidthRate': 'etcd.self.recv.bandwidthrate'
}
SELF_RATES = {
'recvAppendRequestCnt': 'etcd.self.recv.appendrequest.count',
'sendAppendRequestCnt': 'etcd.self.send.appendrequest.count'
}
LEADER_COUNTS = {
# Rates
'fail': 'etcd.leader.counts.fail',
'success': 'etcd.leader.counts.success',
}
LEADER_LATENCY = {
# Gauges
'current': 'etcd.leader.latency.current',
'average': 'etcd.leader.latency.avg',
'minimum': 'etcd.leader.latency.min',
'maximum': 'etcd.leader.latency.max',
'standardDeviation': 'etcd.leader.latency.stddev',
}
def check(self, instance):
if 'url' not in instance:
raise Exception('etcd instance missing "url" value.')
# Load values from the instance config
url = instance['url']
instance_tags = instance.get('tags', [])
# Load the ssl configuration
ssl_params = {
'ssl_keyfile': instance.get('ssl_keyfile'),
'ssl_certfile': instance.get('ssl_certfile'),
'ssl_cert_validation': _is_affirmative(instance.get('ssl_cert_validation', True)),
'ssl_ca_certs': instance.get('ssl_ca_certs'),
}
for key, param in ssl_params.items():
if param is None:
del ssl_params[key]
# Get a copy of tags for the CRIT statuses
critical_tags = list(instance_tags)
# Append the instance's URL in case there are more than one, that
# way they can tell the difference!
instance_tags.append("url:{0}".format(url))
timeout = float(instance.get('timeout', self.DEFAULT_TIMEOUT))
is_leader = False
# Gather self health status
sc_state = AgentCheck.UNKNOWN
health_status = self._get_health_status(url, ssl_params, timeout, critical_tags)
if health_status is not None:
sc_state = AgentCheck.OK if self._is_healthy(health_status) else AgentCheck.CRITICAL
self.service_check(self.HEALTH_SERVICE_CHECK_NAME, sc_state, tags=instance_tags)
# Gather self metrics
self_response = self._get_self_metrics(url, ssl_params, timeout, critical_tags)
if self_response is not None:
if self_response['state'] == 'StateLeader':
is_leader = True
instance_tags.append('etcd_state:leader')
else:
instance_tags.append('etcd_state:follower')
for key in self.SELF_RATES:
if key in self_response:
self.rate(self.SELF_RATES[key], self_response[key], tags=instance_tags)
else:
self.log.warn("Missing key {0} in stats.".format(key))
for key in self.SELF_GAUGES:
if key in self_response:
self.gauge(self.SELF_GAUGES[key], self_response[key], tags=instance_tags)
else:
self.log.warn("Missing key {0} in stats.".format(key))
# Gather store metrics
store_response = self._get_store_metrics(url, ssl_params, timeout, critical_tags)
if store_response is not None:
for key in self.STORE_RATES:
if key in store_response:
self.rate(self.STORE_RATES[key], store_response[key], tags=instance_tags)
else:
self.log.warn("Missing key {0} in stats.".format(key))
for key in self.STORE_GAUGES:
if key in store_response:
self.gauge(self.STORE_GAUGES[key], store_response[key], tags=instance_tags)
else:
self.log.warn("Missing key {0} in stats.".format(key))
# Gather leader metrics
if is_leader:
leader_response = self._get_leader_metrics(url, ssl_params, timeout, critical_tags)
if leader_response is not None and len(leader_response.get("followers", {})) > 0:
# Get the followers
followers = leader_response.get("followers")
for fol in followers:
# counts
for key in self.LEADER_COUNTS:
self.rate(self.LEADER_COUNTS[key],
followers[fol].get("counts").get(key),
tags=instance_tags + ['follower:{0}'.format(fol)])
# latency
for key in self.LEADER_LATENCY:
self.gauge(self.LEADER_LATENCY[key],
followers[fol].get("latency").get(key),
tags=instance_tags + ['follower:{0}'.format(fol)])
# Service check
if self_response is not None and store_response is not None:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags=instance_tags)
def _get_health_status(self, url, ssl_params, timeout, tags):
"""
Don't send the "can connect" service check if we have troubles getting
the health status
"""
try:
r = self._perform_request(url, "/health", ssl_params, timeout)
# we don't use get() here so we can report a KeyError
return r.json()[self.HEALTH_KEY]
except Exception as e:
self.log.debug("Can't determine health status: {}".format(e))
return None
def _get_self_metrics(self, url, ssl_params, timeout, tags):
return self._get_json(url, "/v2/stats/self", ssl_params, timeout, tags)
def _get_store_metrics(self, url, ssl_params, timeout, tags):
return self._get_json(url, "/v2/stats/store", ssl_params, timeout, tags)
def _get_leader_metrics(self, url, ssl_params, timeout, tags):
return self._get_json(url, "/v2/stats/leader", ssl_params, timeout, tags)
def _perform_request(self, url, path, ssl_params, timeout):
certificate = None
if 'ssl_certfile' in ssl_params and 'ssl_keyfile' in ssl_params:
certificate = (ssl_params['ssl_certfile'], ssl_params['ssl_keyfile'])
verify = ssl_params.get('ssl_ca_certs', True) if ssl_params['ssl_cert_validation'] else False
return requests.get(url + path, verify=verify, cert=certificate, timeout=timeout, headers=headers(self.agentConfig))
def _get_json(self, url, path, ssl_params, timeout, tags):
try:
r = self._perform_request(url, path, ssl_params, timeout)
except requests.exceptions.Timeout:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message="Timeout when hitting %s" % url,
tags=tags + ["url:{0}".format(url)])
raise
except Exception as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message="Error hitting %s. Error: %s" % (url, e.message),
tags=tags + ["url:{0}".format(url)])
raise
if r.status_code != 200:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message="Got %s when hitting %s" % (r.status_code, url),
tags=tags + ["url:{0}".format(url)])
raise Exception("Http status code {0} on url {1}".format(r.status_code, url))
return r.json()
def _is_healthy(self, status):
"""
Version of etcd prior to 3.3 return this payload when you hit /health:
{"health": "true"}
which is wrong since the value is a `bool` on etcd.
Version 3.3 fixed this issue in https://github.com/coreos/etcd/pull/8312
but we need to support both.
"""
if isinstance(status, bool):
return status
return status == "true"
| bsd-3-clause | 979,046,649,356,276,500 | 40.069264 | 124 | 0.578054 | false |
SnakeJenny/TensorFlow | tensorflow/python/ops/math_ops_test.py | 24 | 16171 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
exp = np.exp
log = np.log
class ReduceTest(test_util.TensorFlowTestCase):
def testReduceAllDims(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = math_ops.reduce_sum(x).eval()
self.assertEqual(y_tf, 21)
def testReduceExplicitAxes(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
with self.test_session(use_gpu=True):
for axis in (0, -2, (0, 0), (0, -2)):
self.assertAllEqual(math_ops.reduce_sum(x, axis=axis).eval(), [5, 7, 9])
for axis in (1, -1, (1, 1), (1, -1)):
self.assertAllEqual(math_ops.reduce_sum(x, axis=axis).eval(), [6, 15])
for axis in (None, (0, 1), (-1, -2), (-2, -1, 0, 1)):
self.assertEqual(math_ops.reduce_sum(x, axis=axis).eval(), 21)
def testReduceInvalidAxis(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
axis = np.array([[0], [1]])
with self.assertRaisesRegexp(ValueError, "must be at most rank 1"):
math_ops.reduce_sum(x, axis)
class LogSumExpTest(test_util.TensorFlowTestCase):
def testReduceLogSumExp(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf_np = math_ops.reduce_logsumexp(x_np).eval()
y_np = log(np.sum(exp(x_np)))
self.assertAllClose(y_tf_np, y_np)
def testReductionIndices(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf = math_ops.reduce_logsumexp(x_np, reduction_indices=[0])
y_np = log(np.sum(exp(x_np), axis=0))
self.assertShapeEqual(y_np, y_tf)
y_tf_np = y_tf.eval()
self.assertAllClose(y_tf_np, y_np)
def testReductionIndices2(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf = math_ops.reduce_logsumexp(x_np, reduction_indices=0)
y_np = log(np.sum(exp(x_np), axis=0))
self.assertShapeEqual(y_np, y_tf)
y_tf_np = y_tf.eval()
self.assertAllClose(y_tf_np, y_np)
def testKeepDims(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf_np = math_ops.reduce_logsumexp(x_np, keep_dims=True).eval()
self.assertEqual(y_tf_np.ndim, x_np.ndim)
y_np = log(np.sum(exp(x_np), keepdims=True))
self.assertAllClose(y_tf_np, y_np)
def testOverflow(self):
x = [1000, 1001, 1002, 1003]
for dtype in [np.float16, np.float32, np.double]:
x_np = np.array(x, dtype=dtype)
max_np = np.max(x_np)
with self.assertRaisesRegexp(RuntimeWarning,
"overflow encountered in exp"):
out = log(np.sum(exp(x_np)))
if out == np.inf:
raise RuntimeWarning("overflow encountered in exp")
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf_np = math_ops.reduce_logsumexp(x_tf).eval()
y_np = log(np.sum(exp(x_np - max_np))) + max_np
self.assertAllClose(y_tf_np, y_np)
def testUnderflow(self):
x = [-1000, -1001, -1002, -1003]
for dtype in [np.float16, np.float32, np.double]:
x_np = np.array(x, dtype=dtype)
max_np = np.max(x_np)
with self.assertRaisesRegexp(RuntimeWarning,
"divide by zero encountered in log"):
out = log(np.sum(exp(x_np)))
if out == -np.inf:
raise RuntimeWarning("divide by zero encountered in log")
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf_np = math_ops.reduce_logsumexp(x_tf).eval()
y_np = log(np.sum(exp(x_np - max_np))) + max_np
self.assertAllClose(y_tf_np, y_np)
class RoundTest(test_util.TensorFlowTestCase):
def testRounding(self):
x = [0.49, 0.7, -0.3, -0.8]
# TODO(nolivia): Remove this when RoundOp is forwards compatible
# x = np.arange(-5.0, 5.0, .25)
for dtype in [np.float32, np.double, np.int32]:
x_np = np.array(x, dtype=dtype)
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.round(x_tf)
y_tf_np = y_tf.eval()
y_np = np.round(x_np)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
class ModTest(test_util.TensorFlowTestCase):
def testFloat(self):
x = [0.5, 0.7, 0.3]
for dtype in [np.float32, np.double]:
# Test scalar and vector versions.
for denom in [x[0], [x[0]] * 3]:
x_np = np.array(x, dtype=dtype)
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.fmod(x_np, denom)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
def testFixed(self):
x = [5, 10, 23]
for dtype in [np.int32, np.int64]:
# Test scalar and vector versions.
for denom in [x[0], x]:
x_np = np.array(x, dtype=dtype)
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.mod(x_np, denom)
self.assertAllClose(y_tf_np, y_np)
class SquaredDifferenceTest(test_util.TensorFlowTestCase):
def testSquaredDifference(self):
for dtype in [np.int32, np.float16]:
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=dtype)
y = np.array([-3, -2, -1], dtype=dtype)
z = (x - y) * (x - y)
with self.test_session(use_gpu=True):
z_tf = math_ops.squared_difference(x, y).eval()
self.assertAllClose(z, z_tf)
class ApproximateEqualTest(test_util.TensorFlowTestCase):
def testApproximateEqual(self):
for dtype in [np.float32, np.double]:
x = dtype(1)
y = dtype(1.00009)
z = False
with self.test_session(use_gpu=True):
# Default tolerance is 0.00001
z_tf = math_ops.approximate_equal(x, y).eval()
self.assertAllEqual(z, z_tf)
for dtype in [np.float32, np.double]:
x = dtype(1)
y = dtype(1.000009)
z = True
with self.test_session(use_gpu=True):
# Default tolerance is 0.00001
z_tf = math_ops.approximate_equal(x, y).eval()
self.assertAllEqual(z, z_tf)
for dtype in [np.float32, np.double]:
x = np.array([[[[-1, 2.00009999], [-3, 4.01]]]], dtype=dtype)
y = np.array([[[[-1.001, 2], [-3.00009, 4]]]], dtype=dtype)
z = np.array([[[[False, True], [True, False]]]], dtype=np.bool)
with self.test_session(use_gpu=True):
z_tf = math_ops.approximate_equal(x, y, tolerance=0.0001).eval()
self.assertAllEqual(z, z_tf)
class ScalarMulTest(test_util.TensorFlowTestCase):
def testAcceptsRefs(self):
var = variables.Variable(10)
result = math_ops.scalar_mul(3, var)
init = variables.global_variables_initializer()
with self.test_session(use_gpu=True) as sess:
sess.run(init)
self.assertEqual(30, result.eval())
def testAcceptsConstant(self):
const = constant_op.constant(10)
result = math_ops.scalar_mul(3, const)
with self.test_session(use_gpu=True):
self.assertEqual(30, result.eval())
def testAcceptsTensor(self):
tensor = array_ops.ones([10, 10])
result = math_ops.scalar_mul(3, tensor)
expected = array_ops.ones([10, 10]) * 3
with self.test_session(use_gpu=True):
self.assertAllEqual(expected.eval(), result.eval())
def testAcceptsIndexedSlices(self):
values = constant_op.constant([2, 3, 5, 7, 0, -1], shape=[3, 2])
indices = constant_op.constant([0, 2, 5])
x = math_ops.scalar_mul(-3, ops.IndexedSlices(values, indices))
with self.test_session(use_gpu=True):
self.assertAllEqual(x.values.eval(), [[-6, -9], [-15, -21], [0, 3]])
self.assertAllEqual(x.indices.eval(), [0, 2, 5])
class AccumulateNTest(test_util.TensorFlowTestCase):
def testFloat(self):
np.random.seed(12345)
x = [np.random.random((1, 2, 3, 4, 5)) - 0.5 for _ in range(5)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllClose(sum(x), math_ops.accumulate_n(tf_x).eval())
self.assertAllClose(x[0] * 5, math_ops.accumulate_n([tf_x[0]] * 5).eval())
def testInt(self):
np.random.seed(54321)
x = [np.random.randint(-128, 128, (5, 4, 3, 2, 1)) for _ in range(6)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllEqual(sum(x), math_ops.accumulate_n(tf_x).eval())
self.assertAllEqual(x[0] * 6, math_ops.accumulate_n([tf_x[0]] * 6).eval())
class AddNTest(test_util.TensorFlowTestCase):
def testPartials(self):
"""Test that previously revealed a bug in buffer forwarding for AddN."""
partials = []
for _ in range(98):
partials.append(math_ops.add_n([constant_op.constant(1)]))
partials.append(
math_ops.add_n([constant_op.constant(1), constant_op.constant(1)]))
res = math_ops.add_n(partials) + constant_op.constant(0)
with self.test_session(use_gpu=True):
self.assertAllEqual(res.eval(), 100)
def testFloat(self):
np.random.seed(12345)
for num_inputs in range(1, 10):
x = [np.random.random((1, 2, 3, 4, 5)) - 0.5 for _ in range(num_inputs)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllClose(sum(x), math_ops.add_n(tf_x).eval())
self.assertAllClose(x[0] * num_inputs,
math_ops.add_n([tf_x[0]] * num_inputs).eval())
def testInt(self):
np.random.seed(54321)
for num_inputs in range(1, 10):
x = [
np.random.randint(-128, 128, (5, 4, 3, 2, 1))
for _ in range(num_inputs)
]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllEqual(sum(x), math_ops.add_n(tf_x).eval())
self.assertAllEqual(x[0] * num_inputs,
math_ops.add_n([tf_x[0]] * num_inputs).eval())
class DivAndModTest(test_util.TensorFlowTestCase):
# TODO(aselle): Test more types before exposing new division operators.
def intTestData(self):
nums = np.arange(-10, 10, 1).reshape(20, 1)
divs = np.arange(-3, 4, 2).reshape(1, 4)
return nums, divs
def floatTestData(self):
nums = np.arange(-10, 10, .25).reshape(80, 1)
divs = np.arange(-3, 0, .25).reshape(1, 12)
return nums, divs
def testFloorModInt(self):
nums, divs = self.intTestData()
with self.test_session():
# TODO(aselle): Change test to use % after switch
# tf_result = math_ops.floor_mod(nums, divs).eval()
tf_result = math_ops.floormod(nums, divs).eval()
np_result = nums % divs
self.assertAllEqual(tf_result, np_result)
def testFloorModFloat(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.floormod(nums, divs).eval()
np_result = nums % divs
self.assertAllEqual(tf_result, np_result)
# TODO(aselle): put this test in once % switched to floormod
# tf2_result = (array_ops.constant(nums)
# % array_ops.constant(divs)).eval()
# self.assertAllEqual(tf2_result, tf_result)
def testTruncateModInt(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = math_ops.truncatemod(nums, divs).eval()
np_result = np.fmod(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testTruncateModFloat(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.truncatemod(nums, divs).eval()
np_result = np.fmod(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testDivideInt(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = math_ops.floor_div(nums, divs).eval()
np_result = nums // divs
self.assertAllEqual(tf_result, np_result)
# TODO(aselle): Put this test in once // is switched to floordiv
# tf2_result = (array_ops.constant(nums)
# // array_ops.constant(divs)).eval()
# self.assertAllEqual(tf2_result, tf_result)
def testDivideName(self):
with self.test_session():
op = math_ops.divide(
array_ops.constant(3), array_ops.constant(4), name="my_cool_divide")
self.assertEqual(op.name, "my_cool_divide:0")
def testRealDiv(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.realdiv(nums, divs).eval()
np_result = np.divide(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testComplexDiv(self):
foo = array_ops.constant([1. + 3.j])
with self.test_session():
_ = math_ops.divide(foo, 1.).eval()
_ = math_ops.div(foo, 2.).eval()
def testFloorDivGrad(self):
with self.test_session():
a = variables.Variable(2.)
b = variables.Variable(4.)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
c_grad = gradients.gradients(math_ops.divide(a, b), [a, b])
self.assertAllEqual([x.eval() for x in c_grad], [.25, -.125])
c_grad = gradients.gradients(math_ops.div(a, b), [a, b])
self.assertAllEqual([x.eval() for x in c_grad], [.25, -.125])
c_grad = gradients.gradients(math_ops.floordiv(a, b), [a, b])
self.assertAllEqual([None if x is None else x.eval()
for x in c_grad], [None, None])
def testConsistent(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = (math_ops.floor_div(nums, divs) * divs + math_ops.floormod(
nums, divs)).eval()
tf_nums = array_ops.constant(nums)
tf_divs = array_ops.constant(divs)
tf2_result = (tf_nums // tf_divs * tf_divs + tf_nums % tf_divs).eval()
np_result = (nums // divs) * divs + (nums % divs)
# consistentcy with numpy
self.assertAllEqual(tf_result, np_result)
# consistentcy with two forms of divide
self.assertAllEqual(tf_result, tf2_result)
# consistency for truncation form
tf3_result = (math_ops.truncatediv(nums, divs) * divs +
math_ops.truncatemod(nums, divs)).eval()
expanded_nums = np.reshape(
np.tile(nums, divs.shape[1]), (nums.shape[0], divs.shape[1]))
# Consistent with desire to get numerator
self.assertAllEqual(tf3_result, expanded_nums)
# Consistent with desire to get numerator
self.assertAllEqual(tf_result, expanded_nums)
if __name__ == "__main__":
googletest.main()
| apache-2.0 | 3,454,466,222,381,870,000 | 36.871194 | 80 | 0.617958 | false |
brandsoulmates/incubator-airflow | airflow/contrib/operators/dataflow_operator.py | 22 | 9090 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import re
import uuid
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.contrib.hooks.gcp_dataflow_hook import DataFlowHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DataFlowJavaOperator(BaseOperator):
"""
Start a Java Cloud DataFlow batch job. The parameters of the operation
will be passed to the job.
It's a good practice to define dataflow_* parameters in the default_args of the dag
like the project, zone and staging location.
```
default_args = {
'dataflow_default_options': {
'project': 'my-gcp-project',
'zone': 'europe-west1-d',
'stagingLocation': 'gs://my-staging-bucket/staging/'
}
}
```
You need to pass the path to your dataflow as a file reference with the ``jar``
parameter, the jar needs to be a self executing jar. Use ``options`` to pass on
options to your job.
```
t1 = DataFlowOperation(
task_id='datapflow_example',
jar='{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar',
options={
'autoscalingAlgorithm': 'BASIC',
'maxNumWorkers': '50',
'start': '{{ds}}',
'partitionType': 'DAY'
},
dag=my-dag)
```
Both ``jar`` and ``options`` are templated so you can use variables in them.
"""
template_fields = ['options', 'jar']
ui_color = '#0273d4'
@apply_defaults
def __init__(
self,
jar,
dataflow_default_options=None,
options=None,
gcp_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new DataFlowJavaOperator. Note that both
dataflow_default_options and options will be merged to specify pipeline
execution parameter, and dataflow_default_options is expected to save
high-level options, for instances, project and zone information, which
apply to all dataflow operators in the DAG.
For more detail on job submission have a look at the reference:
https://cloud.google.com/dataflow/pipelines/specifying-exec-params
:param jar: The reference to a self executing DataFlow jar.
:type jar: string
:param dataflow_default_options: Map of default job options.
:type dataflow_default_options: dict
:param options: Map of job specific options.
:type options: dict
:param gcp_conn_id: The connection ID to use connecting to Google Cloud
Platform.
:type gcp_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: string
"""
super(DataFlowJavaOperator, self).__init__(*args, **kwargs)
dataflow_default_options = dataflow_default_options or {}
options = options or {}
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.jar = jar
self.dataflow_default_options = dataflow_default_options
self.options = options
def execute(self, context):
bucket_helper = GoogleCloudBucketHelper(
self.gcp_conn_id, self.delegate_to)
self.jar = bucket_helper.google_cloud_to_local(self.jar)
hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
dataflow_options = copy.copy(self.dataflow_default_options)
dataflow_options.update(self.options)
hook.start_java_dataflow(self.task_id, dataflow_options, self.jar)
class DataFlowPythonOperator(BaseOperator):
@apply_defaults
def __init__(
self,
py_file,
py_options=None,
dataflow_default_options=None,
options=None,
gcp_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new DataFlowPythonOperator. Note that both
dataflow_default_options and options will be merged to specify pipeline
execution parameter, and dataflow_default_options is expected to save
high-level options, for instances, project and zone information, which
apply to all dataflow operators in the DAG.
For more detail on job submission have a look at the reference:
https://cloud.google.com/dataflow/pipelines/specifying-exec-params
:param py_file: Reference to the python dataflow pipleline file, e.g.,
/some/local/file/path/to/your/python/pipeline/file.py.
:type py_file: string
:param py_options: Additional python options.
:type pyt_options: list of strings, e.g., ["-m", "-v"].
:param dataflow_default_options: Map of default job options.
:type dataflow_default_options: dict
:param options: Map of job specific options.
:type options: dict
:param gcp_conn_id: The connection ID to use connecting to Google Cloud
Platform.
:type gcp_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: string
"""
super(DataFlowPythonOperator, self).__init__(*args, **kwargs)
self.py_file = py_file
self.py_options = py_options or []
self.dataflow_default_options = dataflow_default_options or {}
self.options = options or {}
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
def execute(self, context):
"""Execute the python dataflow job."""
bucket_helper = GoogleCloudBucketHelper(
self.gcp_conn_id, self.delegate_to)
self.py_file = bucket_helper.google_cloud_to_local(self.py_file)
hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
dataflow_options = self.dataflow_default_options.copy()
dataflow_options.update(self.options)
# Convert argument names from lowerCamelCase to snake case.
camel_to_snake = lambda name: re.sub(
r'[A-Z]', lambda x: '_' + x.group(0).lower(), name)
formatted_options = {camel_to_snake(key): dataflow_options[key]
for key in dataflow_options}
hook.start_python_dataflow(
self.task_id, formatted_options,
self.py_file, self.py_options)
class GoogleCloudBucketHelper():
"""GoogleCloudStorageHook helper class to download GCS object."""
GCS_PREFIX_LENGTH = 5
def __init__(self,
gcp_conn_id='google_cloud_default',
delegate_to=None):
self._gcs_hook = GoogleCloudStorageHook(gcp_conn_id, delegate_to)
def google_cloud_to_local(self, file_name):
"""
Checks whether the file specified by file_name is stored in Google Cloud
Storage (GCS), if so, downloads the file and saves it locally. The full
path of the saved file will be returned. Otherwise the local file_name
will be returned immediately.
:param file_name: The full path of input file.
:type file_name: string
:return: The full path of local file.
:type: string
"""
if not file_name.startswith('gs://'):
return file_name
# Extracts bucket_id and object_id by first removing 'gs://' prefix and
# then split the remaining by path delimiter '/'.
path_components = file_name[self.GCS_PREFIX_LENGTH:].split('/')
if path_components < 2:
raise Exception(
'Invalid Google Cloud Storage (GCS) object path: {}.'
.format(file_name))
bucket_id = path_components[0]
object_id = '/'.join(path_components[1:])
local_file = '/tmp/dataflow{}-{}'.format(str(uuid.uuid1())[:8],
path_components[-1])
file_size = self._gcs_hook.download(bucket_id, object_id, local_file)
if file_size > 0:
return local_file
raise Exception(
'Failed to download Google Cloud Storage GCS object: {}'
.format(file_name))
| apache-2.0 | 4,525,378,211,886,974,500 | 37.680851 | 90 | 0.624092 | false |
promptworks/keystone | keystone/common/sql/migrate_repo/versions/064_drop_user_and_group_fk.py | 3 | 1640 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
from keystone.common.sql import migration_helpers
def list_constraints(migrate_engine):
meta = sqlalchemy.MetaData()
meta.bind = migrate_engine
user_table = sqlalchemy.Table('user', meta, autoload=True)
group_table = sqlalchemy.Table('group', meta, autoload=True)
domain_table = sqlalchemy.Table('domain', meta, autoload=True)
constraints = [{'table': user_table,
'fk_column': 'domain_id',
'ref_column': domain_table.c.id},
{'table': group_table,
'fk_column': 'domain_id',
'ref_column': domain_table.c.id}]
return constraints
def upgrade(migrate_engine):
# SQLite does not support constraints, and querying the constraints
# raises an exception
if migrate_engine.name == 'sqlite':
return
migration_helpers.remove_constraints(list_constraints(migrate_engine))
def downgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
return
migration_helpers.add_constraints(list_constraints(migrate_engine))
| apache-2.0 | -6,645,443,029,560,701,000 | 35.444444 | 75 | 0.685366 | false |
android-ia/platform_external_chromium_org | tools/perf/measurements/smoothness.py | 27 | 1633 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import power
from measurements import smoothness_controller
from telemetry.page import page_test
class Smoothness(page_test.PageTest):
def __init__(self):
super(Smoothness, self).__init__('RunSmoothness')
self._power_metric = None
self._smoothness_controller = None
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
options.AppendExtraBrowserArgs('--touch-events=enabled')
options.AppendExtraBrowserArgs('--running-performance-benchmark')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillStartBrowser(self, platform):
self._power_metric = power.PowerMetric(platform)
def WillNavigateToPage(self, page, tab):
self._power_metric.Start(page, tab)
self._smoothness_controller = smoothness_controller.SmoothnessController()
self._smoothness_controller.SetUp(page, tab)
def WillRunActions(self, page, tab):
self._smoothness_controller.Start(tab)
def DidRunActions(self, page, tab):
self._power_metric.Stop(page, tab)
self._smoothness_controller.Stop(tab)
def ValidateAndMeasurePage(self, page, tab, results):
self._power_metric.AddResults(tab, results)
self._smoothness_controller.AddResults(tab, results)
def CleanUpAfterPage(self, page, tab):
if self._power_metric:
self._power_metric.Stop(page, tab)
if self._smoothness_controller:
self._smoothness_controller.CleanUp(tab)
| bsd-3-clause | 4,294,614,156,108,376,000 | 34.5 | 78 | 0.746479 | false |
phobson/statsmodels | statsmodels/genmod/tests/results/results_glm.py | 4 | 286016 | """
Results for test_glm.py.
Hard-coded from R or Stata. Note that some of the remaining discrepancy vs.
Stata may be because Stata uses ML by default unless you specifically ask for
IRLS.
"""
import numpy as np
from statsmodels.compat.python import asbytes
from . import glm_test_resids
import os
from statsmodels.api import add_constant, categorical
# Test Precisions
DECIMAL_4 = 4
DECIMAL_3 = 3
DECIMAL_2 = 2
DECIMAL_1 = 1
DECIMAL_0 = 0
class Longley(object):
"""
Longley used for TestGlmGaussian
Results are from Stata and R.
"""
def __init__(self):
self.resids = np.array([[ 267.34002976, 267.34002976, 267.34002976,
267.34002976, 267.34002976],
[ -94.0139424 , -94.0139424 , -94.0139424 , -94.0139424 ,
-94.0139424 ],
[ 46.28716776, 46.28716776, 46.28716776, 46.28716776,
46.28716776],
[-410.11462193, -410.11462193, -410.11462193, -410.11462193,
-410.11462193],
[ 309.71459076, 309.71459076, 309.71459076, 309.71459076,
309.71459076],
[-249.31121533, -249.31121533, -249.31121533, -249.31121533,
-249.31121533],
[-164.0489564 , -164.0489564 , -164.0489564 , -164.0489564 ,
-164.0489564 ],
[ -13.18035687, -13.18035687, -13.18035687, -13.18035687,
-13.18035687],
[ 14.3047726 , 14.3047726 , 14.3047726 , 14.3047726 ,
14.3047726 ],
[ 455.39409455, 455.39409455, 455.39409455, 455.39409455,
455.39409455],
[ -17.26892711, -17.26892711, -17.26892711, -17.26892711,
-17.26892711],
[ -39.05504252, -39.05504252, -39.05504252, -39.05504252,
-39.05504252],
[-155.5499736 , -155.5499736 , -155.5499736 , -155.5499736 ,
-155.5499736 ],
[ -85.67130804, -85.67130804, -85.67130804, -85.67130804,
-85.67130804],
[ 341.93151396, 341.93151396, 341.93151396, 341.93151396,
341.93151396],
[-206.75782519, -206.75782519, -206.75782519, -206.75782519,
-206.75782519]])
self.null_deviance = 185008826 # taken from R.
self.params = np.array([ 1.50618723e+01, -3.58191793e-02,
-2.02022980e+00, -1.03322687e+00, -5.11041057e-02,
1.82915146e+03, -3.48225863e+06])
self.bse = np.array([8.49149258e+01, 3.34910078e-02, 4.88399682e-01,
2.14274163e-01, 2.26073200e-01, 4.55478499e+02, 8.90420384e+05])
self.aic_R = 235.23486961695903 # R adds 2 for dof to AIC
self.aic_Stata = 14.57717943930524 # stata divides by nobs
self.deviance = 836424.0555058046 # from R
self.scale = 92936.006167311629
self.llf = -109.61743480847952
self.null_deviance = 185008826 # taken from R. Rpy bug
self.bic_Stata = 836399.1760177979 # no bic in R?
self.df_model = 6
self.df_resid = 9
self.chi2 = 1981.711859508729 #TODO: taken from Stata not available
# in sm yet
# self.pearson_chi2 = 836424.1293162981 # from Stata (?)
self.fittedvalues = np.array([60055.659970240202, 61216.013942398131,
60124.71283224225, 61597.114621930756, 62911.285409240052,
63888.31121532945, 65153.048956395127, 63774.180356866214,
66004.695227399934, 67401.605905447621,
68186.268927114084, 66552.055042522494,
68810.549973595422, 69649.67130804155, 68989.068486039061,
70757.757825193927])
class GaussianLog(object):
"""
Uses generated data. These results are from R and Stata.
"""
def __init__(self):
# self.resids = np.genfromtxt('./glm_gaussian_log_resid.csv', ',')
self.resids = np.array([[3.20800000e-04, 3.20800000e-04,
8.72100000e-04, 3.20800000e-04, 3.20800000e-04],
[ 8.12100000e-04, 8.12100000e-04, 2.16350000e-03,
8.12100000e-04, 8.12100000e-04],
[ -2.94800000e-04, -2.94800000e-04, -7.69700000e-04,
-2.94800000e-04, -2.94800000e-04],
[ 1.40190000e-03, 1.40190000e-03, 3.58560000e-03,
1.40190000e-03, 1.40190000e-03],
[ -2.30910000e-03, -2.30910000e-03, -5.78490000e-03,
-2.30910000e-03, -2.30910000e-03],
[ 1.10380000e-03, 1.10380000e-03, 2.70820000e-03,
1.10380000e-03, 1.10380000e-03],
[ -5.14000000e-06, -5.14000000e-06, -1.23000000e-05,
-5.14000000e-06, -5.14000000e-06],
[ -1.65500000e-04, -1.65500000e-04, -3.89200000e-04,
-1.65500000e-04, -1.65500000e-04],
[ -7.55400000e-04, -7.55400000e-04, -1.73870000e-03,
-7.55400000e-04, -7.55400000e-04],
[ -1.39800000e-04, -1.39800000e-04, -3.14800000e-04,
-1.39800000e-04, -1.39800000e-04],
[ -7.17000000e-04, -7.17000000e-04, -1.58000000e-03,
-7.17000000e-04, -7.17000000e-04],
[ -1.12200000e-04, -1.12200000e-04, -2.41900000e-04,
-1.12200000e-04, -1.12200000e-04],
[ 3.22100000e-04, 3.22100000e-04, 6.79000000e-04,
3.22100000e-04, 3.22100000e-04],
[ -3.78000000e-05, -3.78000000e-05, -7.79000000e-05,
-3.78000000e-05, -3.78000000e-05],
[ 5.54500000e-04, 5.54500000e-04, 1.11730000e-03,
5.54500000e-04, 5.54500000e-04],
[ 3.38400000e-04, 3.38400000e-04, 6.66300000e-04,
3.38400000e-04, 3.38400000e-04],
[ 9.72000000e-05, 9.72000000e-05, 1.87000000e-04,
9.72000000e-05, 9.72000000e-05],
[ -7.92900000e-04, -7.92900000e-04, -1.49070000e-03,
-7.92900000e-04, -7.92900000e-04],
[ 3.33000000e-04, 3.33000000e-04, 6.11500000e-04,
3.33000000e-04, 3.33000000e-04],
[ -8.35300000e-04, -8.35300000e-04, -1.49790000e-03,
-8.35300000e-04, -8.35300000e-04],
[ -3.99700000e-04, -3.99700000e-04, -6.99800000e-04,
-3.99700000e-04, -3.99700000e-04],
[ 1.41300000e-04, 1.41300000e-04, 2.41500000e-04,
1.41300000e-04, 1.41300000e-04],
[ -8.50700000e-04, -8.50700000e-04, -1.41920000e-03,
-8.50700000e-04, -8.50700000e-04],
[ 1.43000000e-06, 1.43000000e-06, 2.33000000e-06,
1.43000000e-06, 1.43000000e-06],
[ -9.12000000e-05, -9.12000000e-05, -1.44900000e-04,
-9.12000000e-05, -9.12000000e-05],
[ 6.75500000e-04, 6.75500000e-04, 1.04650000e-03,
6.75500000e-04, 6.75500000e-04],
[ 3.97900000e-04, 3.97900000e-04, 6.01100000e-04,
3.97900000e-04, 3.97900000e-04],
[ 1.07000000e-05, 1.07000000e-05, 1.57000000e-05,
1.07000000e-05, 1.07000000e-05],
[ -8.15200000e-04, -8.15200000e-04, -1.17060000e-03,
-8.15200000e-04, -8.15200000e-04],
[ -8.46400000e-04, -8.46400000e-04, -1.18460000e-03,
-8.46400000e-04, -8.46400000e-04],
[ 9.91200000e-04, 9.91200000e-04, 1.35180000e-03,
9.91200000e-04, 9.91200000e-04],
[ -5.07400000e-04, -5.07400000e-04, -6.74200000e-04,
-5.07400000e-04, -5.07400000e-04],
[ 1.08520000e-03, 1.08520000e-03, 1.40450000e-03,
1.08520000e-03, 1.08520000e-03],
[ 9.56100000e-04, 9.56100000e-04, 1.20500000e-03,
9.56100000e-04, 9.56100000e-04],
[ 1.87500000e-03, 1.87500000e-03, 2.30090000e-03,
1.87500000e-03, 1.87500000e-03],
[ -1.93920000e-03, -1.93920000e-03, -2.31650000e-03,
-1.93920000e-03, -1.93920000e-03],
[ 8.16000000e-04, 8.16000000e-04, 9.48700000e-04,
8.16000000e-04, 8.16000000e-04],
[ 1.01520000e-03, 1.01520000e-03, 1.14860000e-03,
1.01520000e-03, 1.01520000e-03],
[ 1.04150000e-03, 1.04150000e-03, 1.14640000e-03,
1.04150000e-03, 1.04150000e-03],
[ -3.88200000e-04, -3.88200000e-04, -4.15600000e-04,
-3.88200000e-04, -3.88200000e-04],
[ 9.95900000e-04, 9.95900000e-04, 1.03690000e-03,
9.95900000e-04, 9.95900000e-04],
[ -6.82800000e-04, -6.82800000e-04, -6.91200000e-04,
-6.82800000e-04, -6.82800000e-04],
[ -8.11400000e-04, -8.11400000e-04, -7.98500000e-04,
-8.11400000e-04, -8.11400000e-04],
[ -1.79050000e-03, -1.79050000e-03, -1.71250000e-03,
-1.79050000e-03, -1.79050000e-03],
[ 6.10000000e-04, 6.10000000e-04, 5.66900000e-04,
6.10000000e-04, 6.10000000e-04],
[ 2.52600000e-04, 2.52600000e-04, 2.28100000e-04,
2.52600000e-04, 2.52600000e-04],
[ -8.62500000e-04, -8.62500000e-04, -7.56400000e-04,
-8.62500000e-04, -8.62500000e-04],
[ -3.47300000e-04, -3.47300000e-04, -2.95800000e-04,
-3.47300000e-04, -3.47300000e-04],
[ -7.79000000e-05, -7.79000000e-05, -6.44000000e-05,
-7.79000000e-05, -7.79000000e-05],
[ 6.72000000e-04, 6.72000000e-04, 5.39400000e-04,
6.72000000e-04, 6.72000000e-04],
[ -3.72100000e-04, -3.72100000e-04, -2.89900000e-04,
-3.72100000e-04, -3.72100000e-04],
[ -1.22900000e-04, -1.22900000e-04, -9.29000000e-05,
-1.22900000e-04, -1.22900000e-04],
[ -1.63470000e-03, -1.63470000e-03, -1.19900000e-03,
-1.63470000e-03, -1.63470000e-03],
[ 2.64400000e-04, 2.64400000e-04, 1.88100000e-04,
2.64400000e-04, 2.64400000e-04],
[ 1.79230000e-03, 1.79230000e-03, 1.23650000e-03,
1.79230000e-03, 1.79230000e-03],
[ -1.40500000e-04, -1.40500000e-04, -9.40000000e-05,
-1.40500000e-04, -1.40500000e-04],
[ -2.98500000e-04, -2.98500000e-04, -1.93600000e-04,
-2.98500000e-04, -2.98500000e-04],
[ -9.33100000e-04, -9.33100000e-04, -5.86400000e-04,
-9.33100000e-04, -9.33100000e-04],
[ 9.11200000e-04, 9.11200000e-04, 5.54900000e-04,
9.11200000e-04, 9.11200000e-04],
[ -1.31840000e-03, -1.31840000e-03, -7.77900000e-04,
-1.31840000e-03, -1.31840000e-03],
[ -1.30200000e-04, -1.30200000e-04, -7.44000000e-05,
-1.30200000e-04, -1.30200000e-04],
[ 9.09300000e-04, 9.09300000e-04, 5.03200000e-04,
9.09300000e-04, 9.09300000e-04],
[ -2.39500000e-04, -2.39500000e-04, -1.28300000e-04,
-2.39500000e-04, -2.39500000e-04],
[ 7.15300000e-04, 7.15300000e-04, 3.71000000e-04,
7.15300000e-04, 7.15300000e-04],
[ 5.45000000e-05, 5.45000000e-05, 2.73000000e-05,
5.45000000e-05, 5.45000000e-05],
[ 2.85310000e-03, 2.85310000e-03, 1.38600000e-03,
2.85310000e-03, 2.85310000e-03],
[ 4.63400000e-04, 4.63400000e-04, 2.17800000e-04,
4.63400000e-04, 4.63400000e-04],
[ 2.80900000e-04, 2.80900000e-04, 1.27700000e-04,
2.80900000e-04, 2.80900000e-04],
[ 5.42000000e-05, 5.42000000e-05, 2.38000000e-05,
5.42000000e-05, 5.42000000e-05],
[ -3.62300000e-04, -3.62300000e-04, -1.54000000e-04,
-3.62300000e-04, -3.62300000e-04],
[ -1.11900000e-03, -1.11900000e-03, -4.59800000e-04,
-1.11900000e-03, -1.11900000e-03],
[ 1.28900000e-03, 1.28900000e-03, 5.11900000e-04,
1.28900000e-03, 1.28900000e-03],
[ -1.40820000e-03, -1.40820000e-03, -5.40400000e-04,
-1.40820000e-03, -1.40820000e-03],
[ -1.69300000e-04, -1.69300000e-04, -6.28000000e-05,
-1.69300000e-04, -1.69300000e-04],
[ -1.03620000e-03, -1.03620000e-03, -3.71000000e-04,
-1.03620000e-03, -1.03620000e-03],
[ 1.49150000e-03, 1.49150000e-03, 5.15800000e-04,
1.49150000e-03, 1.49150000e-03],
[ -7.22000000e-05, -7.22000000e-05, -2.41000000e-05,
-7.22000000e-05, -7.22000000e-05],
[ 5.49000000e-04, 5.49000000e-04, 1.76900000e-04,
5.49000000e-04, 5.49000000e-04],
[ -2.12320000e-03, -2.12320000e-03, -6.60400000e-04,
-2.12320000e-03, -2.12320000e-03],
[ 7.84000000e-06, 7.84000000e-06, 2.35000000e-06,
7.84000000e-06, 7.84000000e-06],
[ 1.15580000e-03, 1.15580000e-03, 3.34700000e-04,
1.15580000e-03, 1.15580000e-03],
[ 4.83400000e-04, 4.83400000e-04, 1.35000000e-04,
4.83400000e-04, 4.83400000e-04],
[ -5.26100000e-04, -5.26100000e-04, -1.41700000e-04,
-5.26100000e-04, -5.26100000e-04],
[ -1.75100000e-04, -1.75100000e-04, -4.55000000e-05,
-1.75100000e-04, -1.75100000e-04],
[ -1.84600000e-03, -1.84600000e-03, -4.62100000e-04,
-1.84600000e-03, -1.84600000e-03],
[ 2.07200000e-04, 2.07200000e-04, 5.00000000e-05,
2.07200000e-04, 2.07200000e-04],
[ -8.54700000e-04, -8.54700000e-04, -1.98700000e-04,
-8.54700000e-04, -8.54700000e-04],
[ -9.20000000e-05, -9.20000000e-05, -2.06000000e-05,
-9.20000000e-05, -9.20000000e-05],
[ 5.35700000e-04, 5.35700000e-04, 1.15600000e-04,
5.35700000e-04, 5.35700000e-04],
[ -7.67300000e-04, -7.67300000e-04, -1.59400000e-04,
-7.67300000e-04, -7.67300000e-04],
[ -1.79710000e-03, -1.79710000e-03, -3.59500000e-04,
-1.79710000e-03, -1.79710000e-03],
[ 1.10910000e-03, 1.10910000e-03, 2.13500000e-04,
1.10910000e-03, 1.10910000e-03],
[ -5.53800000e-04, -5.53800000e-04, -1.02600000e-04,
-5.53800000e-04, -5.53800000e-04],
[ 7.48000000e-04, 7.48000000e-04, 1.33400000e-04,
7.48000000e-04, 7.48000000e-04],
[ 4.23000000e-04, 4.23000000e-04, 7.26000000e-05,
4.23000000e-04, 4.23000000e-04],
[ -3.16400000e-04, -3.16400000e-04, -5.22000000e-05,
-3.16400000e-04, -3.16400000e-04],
[ -6.63200000e-04, -6.63200000e-04, -1.05200000e-04,
-6.63200000e-04, -6.63200000e-04],
[ 1.33540000e-03, 1.33540000e-03, 2.03700000e-04,
1.33540000e-03, 1.33540000e-03],
[ -7.81200000e-04, -7.81200000e-04, -1.14600000e-04,
-7.81200000e-04, -7.81200000e-04],
[ 1.67880000e-03, 1.67880000e-03, 2.36600000e-04,
1.67880000e-03, 1.67880000e-03]])
self.null_deviance = 56.691617808182208
self.params = np.array([9.99964386e-01,-1.99896965e-02,
-1.00027232e-04])
self.bse = np.array([1.42119293e-04, 1.20276468e-05, 1.87347682e-07])
self.aic_R = -1103.8187213072656 # adds 2 for dof for scale
self.aic_Stata = -11.05818072104212 # divides by nobs for e(aic)
self.deviance = 8.68876986288542e-05
self.scale = 8.9574946938163984e-07 # from R but e(phi) in Stata
self.llf = 555.9093606536328
self.bic_Stata = -446.7014211525822
self.df_model = 2
self.df_resid = 97
self.chi2 = 33207648.86501769 # from Stata not in sm
self.fittedvalues = np.array([2.7181850213327747, 2.664122305869506,
2.6106125414084405, 2.5576658143523567, 2.5052916730829535,
2.4534991313100165, 2.4022966718815781, 2.3516922510411282,
2.3016933031175575, 2.2523067456332542, 2.2035389848154616,
2.1553959214958001, 2.107882957382607, 2.0610050016905817,
2.0147664781120667, 1.969171332114154, 1.9242230385457144,
1.8799246095383746, 1.8362786026854092, 1.7932871294825108,
1.7509518640143886, 1.7092740518711942, 1.6682545192788105,
1.6278936824271399, 1.5881915569806042, 1.5491477677552221,
1.5107615585467538, 1.4730318020945796, 1.4359570101661721,
1.3995353437472129, 1.3637646233226499, 1.3286423392342188,
1.2941656621002184, 1.2603314532836074, 1.2271362753947765,
1.1945764028156565, 1.162647832232141, 1.1313462931621328,
1.1006672584668622, 1.0706059548334832, 1.0411573732173065,
1.0123162792324054, 0.98407722347970683, 0.95643455180206194,
0.92938241545618494, 0.90291478119174029, 0.87702544122826565,
0.85170802312101246, 0.82695599950720078, 0.80276269772458597,
0.77912130929465073, 0.75602489926313921, 0.73346641539106316,
0.71143869718971686, 0.68993448479364294, 0.66894642766589496,
0.64846709313034534, 0.62848897472617915, 0.60900450038011367,
0.5900060403922629, 0.57148591523195513, 0.55343640314018494,
0.5358497475357491, 0.51871816422248385, 0.50203384839536769,
0.48578898144361343, 0.46997573754920047, 0.45458629007964013,
0.4396128177740814, 0.42504751072218311, 0.41088257613548018,
0.39711024391126759, 0.38372277198930843, 0.37071245150195081,
0.35807161171849949, 0.34579262478494655, 0.33386791026040569,
0.32228993945183393, 0.31105123954884056, 0.30014439756060574,
0.28956206405712448, 0.27929695671718968, 0.26934186368570684,
0.25968964674310463, 0.25033324428976694, 0.24126567414856051,
0.23248003618867552, 0.22396951477412205, 0.21572738104035141,
0.20774699500257574, 0.20002180749946474, 0.19254536197598673,
0.18531129610924435, 0.17831334328122878, 0.17154533390247831,
0.16500119659068577, 0.15867495920834204, 0.15256074976354628,
0.14665279717814039, 0.14094543192735109])
class GaussianInverse(object):
"""
This test uses generated data. Results are from R and Stata.
"""
def __init__(self):
self.resids = np.array([[-5.15300000e-04, -5.15300000e-04,
5.14800000e-04, -5.15300000e-04, -5.15300000e-04],
[ -2.12500000e-04, -2.12500000e-04, 2.03700000e-04,
-2.12500000e-04, -2.12500000e-04],
[ -1.71400000e-04, -1.71400000e-04, 1.57200000e-04,
-1.71400000e-04, -1.71400000e-04],
[ 1.94020000e-03, 1.94020000e-03, -1.69710000e-03,
1.94020000e-03, 1.94020000e-03],
[ -6.81100000e-04, -6.81100000e-04, 5.66900000e-04,
-6.81100000e-04, -6.81100000e-04],
[ 1.21370000e-03, 1.21370000e-03, -9.58800000e-04,
1.21370000e-03, 1.21370000e-03],
[ -1.51090000e-03, -1.51090000e-03, 1.13070000e-03,
-1.51090000e-03, -1.51090000e-03],
[ 3.21500000e-04, 3.21500000e-04, -2.27400000e-04,
3.21500000e-04, 3.21500000e-04],
[ -3.18500000e-04, -3.18500000e-04, 2.12600000e-04,
-3.18500000e-04, -3.18500000e-04],
[ 3.75600000e-04, 3.75600000e-04, -2.36300000e-04,
3.75600000e-04, 3.75600000e-04],
[ 4.82300000e-04, 4.82300000e-04, -2.85500000e-04,
4.82300000e-04, 4.82300000e-04],
[ -1.41870000e-03, -1.41870000e-03, 7.89300000e-04,
-1.41870000e-03, -1.41870000e-03],
[ 6.75000000e-05, 6.75000000e-05, -3.52000000e-05,
6.75000000e-05, 6.75000000e-05],
[ 4.06300000e-04, 4.06300000e-04, -1.99100000e-04,
4.06300000e-04, 4.06300000e-04],
[ -3.61500000e-04, -3.61500000e-04, 1.66000000e-04,
-3.61500000e-04, -3.61500000e-04],
[ -2.97400000e-04, -2.97400000e-04, 1.28000000e-04,
-2.97400000e-04, -2.97400000e-04],
[ -9.32700000e-04, -9.32700000e-04, 3.75800000e-04,
-9.32700000e-04, -9.32700000e-04],
[ 1.16270000e-03, 1.16270000e-03, -4.38500000e-04,
1.16270000e-03, 1.16270000e-03],
[ 6.77900000e-04, 6.77900000e-04, -2.39200000e-04,
6.77900000e-04, 6.77900000e-04],
[ -1.29330000e-03, -1.29330000e-03, 4.27000000e-04,
-1.29330000e-03, -1.29330000e-03],
[ 2.24500000e-04, 2.24500000e-04, -6.94000000e-05,
2.24500000e-04, 2.24500000e-04],
[ 1.05510000e-03, 1.05510000e-03, -3.04900000e-04,
1.05510000e-03, 1.05510000e-03],
[ 2.50400000e-04, 2.50400000e-04, -6.77000000e-05,
2.50400000e-04, 2.50400000e-04],
[ 4.08600000e-04, 4.08600000e-04, -1.03400000e-04,
4.08600000e-04, 4.08600000e-04],
[ -1.67610000e-03, -1.67610000e-03, 3.96800000e-04,
-1.67610000e-03, -1.67610000e-03],
[ 7.47600000e-04, 7.47600000e-04, -1.65700000e-04,
7.47600000e-04, 7.47600000e-04],
[ 2.08200000e-04, 2.08200000e-04, -4.32000000e-05,
2.08200000e-04, 2.08200000e-04],
[ -8.00800000e-04, -8.00800000e-04, 1.55700000e-04,
-8.00800000e-04, -8.00800000e-04],
[ 5.81200000e-04, 5.81200000e-04, -1.05900000e-04,
5.81200000e-04, 5.81200000e-04],
[ 1.00980000e-03, 1.00980000e-03, -1.72400000e-04,
1.00980000e-03, 1.00980000e-03],
[ 2.77400000e-04, 2.77400000e-04, -4.44000000e-05,
2.77400000e-04, 2.77400000e-04],
[ -5.02800000e-04, -5.02800000e-04, 7.55000000e-05,
-5.02800000e-04, -5.02800000e-04],
[ 2.69800000e-04, 2.69800000e-04, -3.80000000e-05,
2.69800000e-04, 2.69800000e-04],
[ 2.01300000e-04, 2.01300000e-04, -2.67000000e-05,
2.01300000e-04, 2.01300000e-04],
[ -1.19690000e-03, -1.19690000e-03, 1.48900000e-04,
-1.19690000e-03, -1.19690000e-03],
[ -6.94200000e-04, -6.94200000e-04, 8.12000000e-05,
-6.94200000e-04, -6.94200000e-04],
[ 5.65500000e-04, 5.65500000e-04, -6.22000000e-05,
5.65500000e-04, 5.65500000e-04],
[ 4.93100000e-04, 4.93100000e-04, -5.10000000e-05,
4.93100000e-04, 4.93100000e-04],
[ 3.25000000e-04, 3.25000000e-04, -3.17000000e-05,
3.25000000e-04, 3.25000000e-04],
[ -7.70200000e-04, -7.70200000e-04, 7.07000000e-05,
-7.70200000e-04, -7.70200000e-04],
[ 2.58000000e-05, 2.58000000e-05, -2.23000000e-06,
2.58000000e-05, 2.58000000e-05],
[ -1.52800000e-04, -1.52800000e-04, 1.25000000e-05,
-1.52800000e-04, -1.52800000e-04],
[ 4.52000000e-05, 4.52000000e-05, -3.48000000e-06,
4.52000000e-05, 4.52000000e-05],
[ -6.83900000e-04, -6.83900000e-04, 4.97000000e-05,
-6.83900000e-04, -6.83900000e-04],
[ -7.77600000e-04, -7.77600000e-04, 5.34000000e-05,
-7.77600000e-04, -7.77600000e-04],
[ 1.03170000e-03, 1.03170000e-03, -6.70000000e-05,
1.03170000e-03, 1.03170000e-03],
[ 1.20000000e-03, 1.20000000e-03, -7.37000000e-05,
1.20000000e-03, 1.20000000e-03],
[ -7.71600000e-04, -7.71600000e-04, 4.48000000e-05,
-7.71600000e-04, -7.71600000e-04],
[ -3.37000000e-04, -3.37000000e-04, 1.85000000e-05,
-3.37000000e-04, -3.37000000e-04],
[ 1.19880000e-03, 1.19880000e-03, -6.25000000e-05,
1.19880000e-03, 1.19880000e-03],
[ -1.54610000e-03, -1.54610000e-03, 7.64000000e-05,
-1.54610000e-03, -1.54610000e-03],
[ 9.11600000e-04, 9.11600000e-04, -4.27000000e-05,
9.11600000e-04, 9.11600000e-04],
[ -4.70800000e-04, -4.70800000e-04, 2.09000000e-05,
-4.70800000e-04, -4.70800000e-04],
[ -1.21550000e-03, -1.21550000e-03, 5.13000000e-05,
-1.21550000e-03, -1.21550000e-03],
[ 1.09160000e-03, 1.09160000e-03, -4.37000000e-05,
1.09160000e-03, 1.09160000e-03],
[ -2.72000000e-04, -2.72000000e-04, 1.04000000e-05,
-2.72000000e-04, -2.72000000e-04],
[ -7.84500000e-04, -7.84500000e-04, 2.84000000e-05,
-7.84500000e-04, -7.84500000e-04],
[ 1.53330000e-03, 1.53330000e-03, -5.28000000e-05,
1.53330000e-03, 1.53330000e-03],
[ -1.84450000e-03, -1.84450000e-03, 6.05000000e-05,
-1.84450000e-03, -1.84450000e-03],
[ 1.68550000e-03, 1.68550000e-03, -5.26000000e-05,
1.68550000e-03, 1.68550000e-03],
[ -3.06100000e-04, -3.06100000e-04, 9.10000000e-06,
-3.06100000e-04, -3.06100000e-04],
[ 1.00950000e-03, 1.00950000e-03, -2.86000000e-05,
1.00950000e-03, 1.00950000e-03],
[ 5.22000000e-04, 5.22000000e-04, -1.41000000e-05,
5.22000000e-04, 5.22000000e-04],
[ -2.18000000e-05, -2.18000000e-05, 5.62000000e-07,
-2.18000000e-05, -2.18000000e-05],
[ -7.80600000e-04, -7.80600000e-04, 1.92000000e-05,
-7.80600000e-04, -7.80600000e-04],
[ 6.81400000e-04, 6.81400000e-04, -1.60000000e-05,
6.81400000e-04, 6.81400000e-04],
[ -1.43800000e-04, -1.43800000e-04, 3.23000000e-06,
-1.43800000e-04, -1.43800000e-04],
[ 7.76000000e-04, 7.76000000e-04, -1.66000000e-05,
7.76000000e-04, 7.76000000e-04],
[ 2.54900000e-04, 2.54900000e-04, -5.22000000e-06,
2.54900000e-04, 2.54900000e-04],
[ 5.77500000e-04, 5.77500000e-04, -1.13000000e-05,
5.77500000e-04, 5.77500000e-04],
[ 7.58100000e-04, 7.58100000e-04, -1.42000000e-05,
7.58100000e-04, 7.58100000e-04],
[ -8.31000000e-04, -8.31000000e-04, 1.49000000e-05,
-8.31000000e-04, -8.31000000e-04],
[ -2.10340000e-03, -2.10340000e-03, 3.62000000e-05,
-2.10340000e-03, -2.10340000e-03],
[ -8.89900000e-04, -8.89900000e-04, 1.47000000e-05,
-8.89900000e-04, -8.89900000e-04],
[ 1.08570000e-03, 1.08570000e-03, -1.71000000e-05,
1.08570000e-03, 1.08570000e-03],
[ -1.88600000e-04, -1.88600000e-04, 2.86000000e-06,
-1.88600000e-04, -1.88600000e-04],
[ 9.10000000e-05, 9.10000000e-05, -1.32000000e-06,
9.10000000e-05, 9.10000000e-05],
[ 1.07700000e-03, 1.07700000e-03, -1.50000000e-05,
1.07700000e-03, 1.07700000e-03],
[ 9.04100000e-04, 9.04100000e-04, -1.21000000e-05,
9.04100000e-04, 9.04100000e-04],
[ -2.20000000e-04, -2.20000000e-04, 2.83000000e-06,
-2.20000000e-04, -2.20000000e-04],
[ -1.64030000e-03, -1.64030000e-03, 2.02000000e-05,
-1.64030000e-03, -1.64030000e-03],
[ 2.20600000e-04, 2.20600000e-04, -2.62000000e-06,
2.20600000e-04, 2.20600000e-04],
[ -2.78300000e-04, -2.78300000e-04, 3.17000000e-06,
-2.78300000e-04, -2.78300000e-04],
[ -4.93000000e-04, -4.93000000e-04, 5.40000000e-06,
-4.93000000e-04, -4.93000000e-04],
[ -1.85000000e-04, -1.85000000e-04, 1.95000000e-06,
-1.85000000e-04, -1.85000000e-04],
[ -7.64000000e-04, -7.64000000e-04, 7.75000000e-06,
-7.64000000e-04, -7.64000000e-04],
[ 7.79600000e-04, 7.79600000e-04, -7.61000000e-06,
7.79600000e-04, 7.79600000e-04],
[ 2.88400000e-04, 2.88400000e-04, -2.71000000e-06,
2.88400000e-04, 2.88400000e-04],
[ 1.09370000e-03, 1.09370000e-03, -9.91000000e-06,
1.09370000e-03, 1.09370000e-03],
[ 3.07000000e-04, 3.07000000e-04, -2.68000000e-06,
3.07000000e-04, 3.07000000e-04],
[ -8.76000000e-04, -8.76000000e-04, 7.37000000e-06,
-8.76000000e-04, -8.76000000e-04],
[ -1.85300000e-04, -1.85300000e-04, 1.50000000e-06,
-1.85300000e-04, -1.85300000e-04],
[ 3.24700000e-04, 3.24700000e-04, -2.54000000e-06,
3.24700000e-04, 3.24700000e-04],
[ 4.59600000e-04, 4.59600000e-04, -3.47000000e-06,
4.59600000e-04, 4.59600000e-04],
[ -2.73300000e-04, -2.73300000e-04, 1.99000000e-06,
-2.73300000e-04, -2.73300000e-04],
[ 1.32180000e-03, 1.32180000e-03, -9.29000000e-06,
1.32180000e-03, 1.32180000e-03],
[ -1.32620000e-03, -1.32620000e-03, 9.00000000e-06,
-1.32620000e-03, -1.32620000e-03],
[ 9.62000000e-05, 9.62000000e-05, -6.31000000e-07,
9.62000000e-05, 9.62000000e-05],
[ -6.04400000e-04, -6.04400000e-04, 3.83000000e-06,
-6.04400000e-04, -6.04400000e-04],
[ -6.66300000e-04, -6.66300000e-04, 4.08000000e-06,
-6.66300000e-04, -6.66300000e-04]])
self.null_deviance = 6.8088354977561 # from R, Rpy bug
self.params = np.array([ 1.00045997, 0.01991666, 0.00100126])
self.bse = np.array([ 4.55214070e-04, 7.00529313e-05, 1.84478509e-06])
self.aic_R = -1123.1528237643774
self.aic_Stata = -11.25152876811373
self.deviance = 7.1612915365488368e-05
self.scale = 7.3827747608449547e-07
self.llf = 565.57641188218872
self.bic_Stata = -446.7014364279675
self.df_model = 2
self.df_resid = 97
self.chi2 = 2704006.698904491
self.fittedvalues = np.array([ 0.99954024, 0.97906956, 0.95758077,
0.93526008, 0.91228657,
0.88882978, 0.8650479 , 0.84108646, 0.81707757, 0.79313958,
0.76937709, 0.74588129, 0.72273051, 0.69999099, 0.67771773,
0.65595543, 0.63473944, 0.61409675, 0.59404691, 0.57460297,
0.55577231, 0.53755742, 0.51995663, 0.50296478, 0.48657379,
0.47077316, 0.4555505 , 0.44089187, 0.42678213, 0.41320529,
0.40014475, 0.38758348, 0.37550428, 0.36388987, 0.35272306,
0.34198684, 0.33166446, 0.32173953, 0.31219604, 0.30301842,
0.29419156, 0.28570085, 0.27753216, 0.26967189, 0.26210695,
0.25482476, 0.24781324, 0.2410608 , 0.23455636, 0.22828931,
0.22224947, 0.21642715, 0.21081306, 0.20539835, 0.20017455,
0.19513359, 0.19026777, 0.18556972, 0.18103243, 0.17664922,
0.1724137 , 0.16831977, 0.16436164, 0.16053377, 0.15683086,
0.15324789, 0.14978003, 0.1464227 , 0.14317153, 0.14002232,
0.13697109, 0.13401403, 0.1311475 , 0.12836802, 0.12567228,
0.1230571 , 0.12051944, 0.11805642, 0.11566526, 0.1133433 ,
0.11108802, 0.10889699, 0.10676788, 0.10469847, 0.10268664,
0.10073034, 0.09882763, 0.09697663, 0.09517555, 0.09342267,
0.09171634, 0.09005498, 0.08843707, 0.08686116, 0.08532585,
0.08382979, 0.0823717 , 0.08095035, 0.07956453, 0.07821311])
class Star98(object):
"""
Star98 class used with TestGlmBinomial
"""
def __init__(self):
self.params = (-0.0168150366, 0.0099254766, -0.0187242148,
-0.0142385609, 0.2544871730, 0.2406936644, 0.0804086739,
-1.9521605027, -0.3340864748, -0.1690221685, 0.0049167021,
-0.0035799644, -0.0140765648, -0.0040049918, -0.0039063958,
0.0917143006, 0.0489898381, 0.0080407389, 0.0002220095,
-0.0022492486, 2.9588779262)
self.bse = (4.339467e-04, 6.013714e-04, 7.435499e-04, 4.338655e-04,
2.994576e-02, 5.713824e-02, 1.392359e-02, 3.168109e-01,
6.126411e-02, 3.270139e-02, 1.253877e-03, 2.254633e-04,
1.904573e-03, 4.739838e-04, 9.623650e-04, 1.450923e-02,
7.451666e-03, 1.499497e-03, 2.988794e-05, 3.489838e-04,
1.546712e+00)
self.null_deviance = 34345.3688931
self.df_null = 302
self.deviance = 4078.76541772
self.df_resid = 282
self.df_model = 20
self.aic_R = 6039.22511799
self.aic_Stata = 19.93143846737438
self.bic_Stata = 2467.493504191302
self.llf = -2998.61255899391 # from R
self.llf_Stata = -2998.612927807218
self.scale = 1.
self.pearson_chi2 = 4051.921614
self.resids = glm_test_resids.star98_resids
self.fittedvalues = np.array([ 0.5833118 , 0.75144661, 0.50058272,
0.68534524, 0.32251021,
0.68693601, 0.33299827, 0.65624766, 0.49851481, 0.506736,
0.23954874, 0.86631452, 0.46432936, 0.44171873, 0.66797935,
0.73988491, 0.51966014, 0.42442446, 0.5649369 , 0.59251634,
0.34798337, 0.56415024, 0.49974355, 0.3565539 , 0.20752309,
0.18269097, 0.44932642, 0.48025128, 0.59965277, 0.58848671,
0.36264203, 0.33333196, 0.74253352, 0.5081886 , 0.53421878,
0.56291445, 0.60205239, 0.29174423, 0.2954348 , 0.32220414,
0.47977903, 0.23687535, 0.11776464, 0.1557423 , 0.27854799,
0.22699533, 0.1819439 , 0.32554433, 0.22681989, 0.15785389,
0.15268609, 0.61094772, 0.20743222, 0.51649059, 0.46502006,
0.41031788, 0.59523288, 0.65733285, 0.27835336, 0.2371213 ,
0.25137045, 0.23953942, 0.27854519, 0.39652413, 0.27023163,
0.61411863, 0.2212025 , 0.42005842, 0.55940397, 0.35413774,
0.45724563, 0.57399437, 0.2168918 , 0.58308738, 0.17181104,
0.49873249, 0.22832683, 0.14846056, 0.5028073 , 0.24513863,
0.48202096, 0.52823155, 0.5086262 , 0.46295993, 0.57869402,
0.78363217, 0.21144435, 0.2298366 , 0.17954825, 0.32232586,
0.8343015 , 0.56217006, 0.47367315, 0.52535649, 0.60350746,
0.43210701, 0.44712008, 0.35858239, 0.2521347 , 0.19787004,
0.63256553, 0.51386532, 0.64997027, 0.13402072, 0.81756174,
0.74543642, 0.30825852, 0.23988707, 0.17273125, 0.27880599,
0.17395893, 0.32052828, 0.80467697, 0.18726218, 0.23842081,
0.19020381, 0.85835388, 0.58703615, 0.72415106, 0.64433695,
0.68766653, 0.32923663, 0.16352185, 0.38868816, 0.44980444,
0.74810044, 0.42973792, 0.53762581, 0.72714996, 0.61229484,
0.30267667, 0.24713253, 0.65086008, 0.48957265, 0.54955545,
0.5697156 , 0.36406211, 0.48906545, 0.45919413, 0.4930565 ,
0.39785555, 0.5078719 , 0.30159626, 0.28524393, 0.34687707,
0.22522042, 0.52947159, 0.29277287, 0.8585002 , 0.60800389,
0.75830521, 0.35648175, 0.69508796, 0.45518355, 0.21567675,
0.39682985, 0.49042948, 0.47615798, 0.60588234, 0.62910299,
0.46005639, 0.71755165, 0.48852156, 0.47940661, 0.60128813,
0.16589699, 0.68512861, 0.46305199, 0.68832227, 0.7006721 ,
0.56564937, 0.51753941, 0.54261733, 0.56072214, 0.34545715,
0.30226104, 0.3572956 , 0.40996287, 0.33517519, 0.36248407,
0.33937041, 0.34140691, 0.2627528 , 0.29955161, 0.38581683,
0.24840026, 0.15414272, 0.40415991, 0.53936252, 0.52111887,
0.28060168, 0.45600958, 0.51110589, 0.43757523, 0.46891953,
0.39425249, 0.5834369 , 0.55817308, 0.32051259, 0.43567448,
0.34134195, 0.43016545, 0.4885413 , 0.28478325, 0.2650776 ,
0.46784606, 0.46265983, 0.42655938, 0.18972234, 0.60448491,
0.211896 , 0.37886032, 0.50727577, 0.39782309, 0.50427121,
0.35882898, 0.39596807, 0.49160806, 0.35618002, 0.6819922 ,
0.36871093, 0.43079679, 0.67985516, 0.41270595, 0.68952767,
0.52587734, 0.32042126, 0.39120123, 0.56870985, 0.32962349,
0.32168989, 0.54076251, 0.4592907 , 0.48480182, 0.4408386 ,
0.431178 , 0.47078232, 0.55911605, 0.30331618, 0.50310393,
0.65036038, 0.45078895, 0.62354291, 0.56435463, 0.50034281,
0.52693538, 0.57217285, 0.49221472, 0.40707122, 0.44226533,
0.3475959 , 0.54746396, 0.86385832, 0.48402233, 0.54313657,
0.61586824, 0.27097185, 0.69717808, 0.52156974, 0.50401189,
0.56724181, 0.6577178 , 0.42732047, 0.44808396, 0.65435634,
0.54766225, 0.38160648, 0.49890847, 0.50879037, 0.5875452 ,
0.45101593, 0.5709704 , 0.3175516 , 0.39813159, 0.28305688,
0.40521062, 0.30120578, 0.26400428, 0.44205496, 0.40545798,
0.39366599, 0.55288196, 0.14104184, 0.17550155, 0.1949095 ,
0.40255144, 0.21016822, 0.09712017, 0.63151487, 0.25885514,
0.57323748, 0.61836898, 0.43268601, 0.67008878, 0.75801989,
0.50353406, 0.64222315, 0.29925757, 0.32592036, 0.39634977,
0.39582747, 0.41037006, 0.34174944])
class Lbw(object):
'''
The LBW data can be found here
http://www.stata-press.com/data/r9/rmain.html
'''
def __init__(self):
# data set up for data not in datasets
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_lbw_glm.csv")
data=np.recfromcsv(open(filename, 'rb'), converters={4: lambda s: s.strip(asbytes("\""))})
data = categorical(data, col='race', drop=True)
self.endog = data.low
design = np.column_stack((data['age'], data['lwt'],
data['race_black'], data['race_other'], data['smoke'],
data['ptl'], data['ht'], data['ui']))
self.exog = add_constant(design, prepend=False)
# Results for Canonical Logit Link
self.params = (-.02710031, -.01515082, 1.26264728,
.86207916, .92334482, .54183656, 1.83251780,
.75851348, .46122388)
self.bse = (0.036449917, 0.006925765, 0.526405169,
0.439146744, 0.400820976, 0.346246857, 0.691623875,
0.459373871, 1.204574885)
self.aic_R = 219.447991133
self.aic_Stata = 1.161100482182551
self.deviance = 201.4479911325021
self.scale = 1
self.llf = -100.7239955662511
self.chi2 = 25.65329337867037 # from Stata not used by sm
self.null_deviance = 234.671996193219
self.bic_Stata = -742.0664715782335
self.df_resid = 180
self.df_model = 8
self.df_null = 188
self.pearson_chi2 = 182.023342493558
self.resids = glm_test_resids.lbw_resids
self.fittedvalues = np.array([ 0.31217507, 0.12793027, 0.32119762,
0.48442686, 0.50853393,
0.24517662, 0.12755193, 0.33226988, 0.22013309, 0.26268069,
0.34729955, 0.18782188, 0.75404181, 0.54723527, 0.35016393,
0.35016393, 0.45824406, 0.25336683, 0.43087357, 0.23284101,
0.20146616, 0.24315597, 0.02725586, 0.22207692, 0.39800383,
0.05584178, 0.28403447, 0.06931188, 0.35371946, 0.3896279 ,
0.3896279 , 0.47812002, 0.60043853, 0.07144772, 0.29995988,
0.17910031, 0.22773411, 0.22691015, 0.06221253, 0.2384528 ,
0.32633864, 0.05131047, 0.2954536 , 0.07364416, 0.57241299,
0.57241299, 0.08272435, 0.23298882, 0.12658158, 0.58967487,
0.46989562, 0.22455631, 0.2348285 , 0.29571887, 0.28212464,
0.31499013, 0.68340511, 0.14090647, 0.31448425, 0.28082972,
0.28082972, 0.24918728, 0.27018297, 0.08175784, 0.64808999,
0.38252574, 0.25550797, 0.09113411, 0.40736693, 0.32644055,
0.54367425, 0.29606968, 0.47028421, 0.39972155, 0.25079125,
0.09678472, 0.08807264, 0.27467837, 0.5675742 , 0.045619 ,
0.10719293, 0.04826292, 0.23934092, 0.24179618, 0.23802197,
0.49196179, 0.31379451, 0.10605469, 0.04047396, 0.11620849,
0.09937016, 0.21822964, 0.29770265, 0.83912829, 0.25079125,
0.08548557, 0.06550308, 0.2046457 , 0.2046457 , 0.08110349,
0.13519643, 0.47862055, 0.38891913, 0.1383964 , 0.26176764,
0.31594589, 0.11418612, 0.06324112, 0.28468594, 0.21663702,
0.03827107, 0.27237604, 0.20246694, 0.19042999, 0.15019447,
0.18759474, 0.12308435, 0.19700616, 0.11564002, 0.36595033,
0.07765727, 0.14119063, 0.13584627, 0.11012759, 0.10102472,
0.10002166, 0.07439288, 0.27919958, 0.12491598, 0.06774594,
0.72513764, 0.17714986, 0.67373352, 0.80679436, 0.52908941,
0.15695938, 0.49722003, 0.41970014, 0.62375224, 0.53695622,
0.25474238, 0.79135707, 0.2503871 , 0.25352337, 0.33474211,
0.19308929, 0.24658944, 0.25495092, 0.30867144, 0.41240259,
0.59412526, 0.16811226, 0.48282791, 0.36566756, 0.09279325,
0.75337353, 0.57128885, 0.52974123, 0.44548504, 0.77748843,
0.3224082 , 0.40054277, 0.29522468, 0.19673553, 0.73781774,
0.57680312, 0.44545573, 0.30242355, 0.38720223, 0.16632904,
0.30804092, 0.56385194, 0.60012179, 0.48324821, 0.24636345,
0.26153216, 0.2348285 , 0.29023669, 0.41011454, 0.36472083,
0.65922069, 0.30476903, 0.09986775, 0.70658332, 0.30713075,
0.36096386, 0.54962701, 0.71996086, 0.6633756 ])
class Scotvote(object):
"""
Scotvot class is used with TestGlmGamma.
"""
def __init__(self):
self.params = (4.961768e-05, 2.034423e-03, -7.181429e-05, 1.118520e-04,
-1.467515e-07, -5.186831e-04, -2.42717498e-06, -1.776527e-02)
self.bse = (1.621577e-05, 5.320802e-04, 2.711664e-05, 4.057691e-05,
1.236569e-07, 2.402534e-04, 7.460253e-07, 1.147922e-02)
self.null_deviance = 0.536072
self.df_null = 31
self.deviance = 0.087388516417
self.df_resid = 24
self.df_model = 7
self.aic_R = 182.947045954721
self.aic_Stata = 10.72212
self.bic_Stata = -83.09027
self.llf = -163.5539382 # from Stata, same as ours with scale = 1
# self.llf = -82.47352 # Very close to ours as is
self.scale = 0.003584283
self.pearson_chi2 = .0860228056
self.resids = glm_test_resids.scotvote_resids
self.fittedvalues = np.array([57.80431482, 53.2733447, 50.56347993,
58.33003783,
70.46562169, 56.88801284, 66.81878401, 66.03410393,
57.92937473, 63.23216907, 53.9914785 , 61.28993391,
64.81036393, 63.47546816, 60.69696114, 74.83508176,
56.56991106, 72.01804172, 64.35676519, 52.02445881,
64.24933079, 71.15070332, 45.73479688, 54.93318588,
66.98031261, 52.02479973, 56.18413736, 58.12267471,
67.37947398, 60.49162862, 73.82609217, 69.61515621])
class Cancer(object):
'''
The Cancer data can be found here
http://www.stata-press.com/data/r10/rmain.html
'''
def __init__(self):
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_cancer_glm.csv")
data = np.recfromcsv(open(filename, 'rb'))
self.endog = data.studytime
design = np.column_stack((data.age,data.drug))
design = categorical(design, col=1, drop=True)
design = np.delete(design, 1, axis=1) # drop first dummy
self.exog = add_constant(design, prepend=False)
class CancerLog(Cancer):
"""
CancerLog is used TestGlmGammaLog
"""
def __init__(self):
super(CancerLog, self).__init__()
self.resids = np.array([[-8.52598100e-01,-1.45739100e+00,
-3.92408100e+01,
-1.41526900e+00, -5.78417200e+00],
[ -8.23683800e-01, -1.35040200e+00, -2.64957500e+01,
-1.31777000e+00, -4.67162900e+00],
[ -7.30450400e-01, -1.07754600e+00, -4.02136400e+01,
-1.06208800e+00, -5.41978500e+00],
[ -7.04471600e-01, -1.01441500e+00, -7.25951500e+01,
-1.00172900e+00, -7.15130900e+00],
[ -5.28668000e-01, -6.68617300e-01, -3.80758100e+01,
-6.65304600e-01, -4.48658700e+00],
[ -2.28658500e-01, -2.48859700e-01, -6.14913600e+00,
-2.48707200e-01, -1.18577100e+00],
[ -1.93939400e-01, -2.08119900e-01, -7.46226500e+00,
-2.08031700e-01, -1.20300800e+00],
[ -3.55635700e-01, -4.09525000e-01, -2.14132500e+01,
-4.08815100e-01, -2.75958600e+00],
[ -5.73360000e-02, -5.84700000e-02, -4.12946200e+00,
-5.84681000e-02, -4.86586900e-01],
[ 3.09828000e-02, 3.06685000e-02, 1.86551100e+00,
3.06682000e-02, 2.40413800e-01],
[ -2.11924300e-01, -2.29071300e-01, -2.18386100e+01,
-2.28953000e-01, -2.15130900e+00],
[ -3.10989000e-01, -3.50739300e-01, -4.19249500e+01,
-3.50300400e-01, -3.61084500e+00],
[ -9.22250000e-03, -9.25100000e-03, -1.13679700e+00,
-9.25100000e-03, -1.02392100e-01],
[ 2.39402500e-01, 2.22589700e-01, 1.88577300e+01,
2.22493500e-01, 2.12475600e+00],
[ 3.35166000e-02, 3.31493000e-02, 4.51842400e+00,
3.31489000e-02, 3.89155400e-01],
[ 8.49829400e-01, 6.85180200e-01, 3.57627500e+01,
6.82689900e-01, 5.51291500e+00],
[ 4.12934200e-01, 3.66785200e-01, 4.65392600e+01,
3.66370400e-01, 4.38379500e+00],
[ 4.64148400e-01, 4.07123200e-01, 6.25726500e+01,
4.06561900e-01, 5.38915500e+00],
[ 1.71104600e+00, 1.19474800e+00, 1.12676500e+02,
1.18311900e+00, 1.38850500e+01],
[ 1.26571800e+00, 9.46389000e-01, 1.30431000e+02,
9.40244600e-01, 1.28486900e+01],
[ -3.48532600e-01, -3.99988300e-01, -2.95638100e+01,
-3.99328600e-01, -3.20997700e+00],
[ -4.04340300e-01, -4.76960100e-01, -4.10254300e+01,
-4.75818000e-01, -4.07286500e+00],
[ -4.92057900e-01, -6.08818300e-01, -9.34509600e+01,
-6.06357200e-01, -6.78109700e+00],
[ -4.02876400e-01, -4.74878400e-01, -9.15226200e+01,
-4.73751900e-01, -6.07225700e+00],
[ -5.15056700e-01, -6.46013300e-01, -2.19014600e+02,
-6.43043500e-01, -1.06209700e+01],
[ -8.70423000e-02, -8.97043000e-02, -1.26361400e+01,
-8.96975000e-02, -1.04875100e+00],
[ 1.28362300e-01, 1.23247800e-01, 1.70383300e+01,
1.23231000e-01, 1.47887800e+00],
[ -2.39271900e-01, -2.61562100e-01, -9.30283300e+01,
-2.61384400e-01, -4.71795100e+00],
[ 7.37246500e-01, 6.08186000e-01, 6.25359600e+01,
6.06409700e-01, 6.79002300e+00],
[ -3.64110000e-02, -3.68626000e-02, -1.41565300e+01,
-3.68621000e-02, -7.17951200e-01],
[ 2.68833000e-01, 2.47933100e-01, 6.67934100e+01,
2.47801000e-01, 4.23748400e+00],
[ 5.96389600e-01, 5.07237700e-01, 1.13265500e+02,
5.06180100e-01, 8.21890300e+00],
[ 1.98218000e-02, 1.96923000e-02, 1.00820900e+01,
1.96923000e-02, 4.47040700e-01],
[ 7.74936000e-01, 6.34305300e-01, 2.51883900e+02,
6.32303700e-01, 1.39711800e+01],
[ -7.63925100e-01, -1.16591700e+00, -4.93461700e+02,
-1.14588000e+00, -1.94156600e+01],
[ -6.23771700e-01, -8.41174800e-01, -4.40679600e+02,
-8.34266300e-01, -1.65796100e+01],
[ -1.63272900e-01, -1.73115100e-01, -6.73975900e+01,
-1.73064800e-01, -3.31725800e+00],
[ -4.28562500e-01, -5.11932900e-01, -4.73787800e+02,
-5.10507400e-01, -1.42494800e+01],
[ 8.00693000e-02, 7.80269000e-02, 3.95353400e+01,
7.80226000e-02, 1.77920500e+00],
[ -2.13674400e-01, -2.31127400e-01, -2.15987000e+02,
-2.31005700e-01, -6.79344600e+00],
[ -1.63544000e-02, -1.64444000e-02, -1.05642100e+01,
-1.64444000e-02, -4.15657600e-01],
[ 2.04900500e-01, 1.92372100e-01, 1.10651300e+02,
1.92309400e-01, 4.76156600e+00],
[ -1.94758900e-01, -2.09067700e-01, -2.35484100e+02,
-2.08978200e-01, -6.77219400e+00],
[ 3.16727400e-01, 2.88367800e-01, 1.87065600e+02,
2.88162100e-01, 7.69732400e+00],
[ 6.24234900e-01, 5.27632500e-01, 2.57678500e+02,
5.26448400e-01, 1.26827400e+01],
[ 8.30241100e-01, 6.72002100e-01, 2.86513700e+02,
6.69644800e-01, 1.54232100e+01],
[ 6.55140000e-03, 6.53710000e-03, 7.92130700e+00,
6.53710000e-03, 2.27805800e-01],
[ 3.41595200e-01, 3.08985000e-01, 2.88667600e+02,
3.08733300e-01, 9.93012900e+00]])
self.null_deviance = 27.92207137420696 # From R (bug in rpy)
self.params = np.array([-0.04477778, 0.57437126, 1.05210726,
4.64604002])
self.bse = np.array([ 0.0147328 , 0.19694727, 0.19772507,
0.83534671])
self.aic_R = 331.89022395372069
self.aic_Stata = 7.403608467857651
self.deviance = 16.174635536991005
self.scale = 0.31805268736385695
# self.llf = -160.94511197686035 # From R
self.llf = -173.6866032285836 # from Staa
self.bic_Stata = -154.1582089453923 # from Stata
self.df_model = 3
self.df_resid = 44
self.chi2 = 36.77821448266359 # from Stata not in sm
self.fittedvalues = np.array([ 6.78419193, 5.67167253, 7.41979002,
10.15123371,
8.48656317, 5.18582263, 6.20304079, 7.75958258,
8.48656317, 7.75958258, 10.15123371, 11.61071755,
11.10228357, 8.87520908, 11.61071755, 6.48711178,
10.61611394, 11.61071755, 8.11493609, 10.15123371,
9.21009116, 10.07296716, 13.78112366, 15.07225103,
20.62079147, 12.04881666, 11.5211983 , 19.71780584,
9.21009116, 19.71780584, 15.76249142, 13.78112366,
22.55271436, 18.02872842, 25.41575239, 26.579678 ,
20.31745227, 33.24937131, 22.22095589, 31.79337946,
25.41575239, 23.23857437, 34.77204095, 24.30279515,
20.31745227, 18.57700761, 34.77204095, 29.06987768])
class CancerIdentity(Cancer):
"""
CancerIdentity is used with TestGlmGammaIdentity
"""
def __init__(self):
super(CancerIdentity, self).__init__()
self.resids = np.array([[ -8.52598100e-01, -1.45739100e+00,
-3.92408100e+01,
-1.41526900e+00, -5.78417200e+00],
[ -8.23683800e-01, -1.35040200e+00, -2.64957500e+01,
-1.31777000e+00, -4.67162900e+00],
[ -7.30450400e-01, -1.07754600e+00, -4.02136400e+01,
-1.06208800e+00, -5.41978500e+00],
[ -7.04471600e-01, -1.01441500e+00, -7.25951500e+01,
-1.00172900e+00, -7.15130900e+00],
[ -5.28668000e-01, -6.68617300e-01, -3.80758100e+01,
-6.65304600e-01, -4.48658700e+00],
[ -2.28658500e-01, -2.48859700e-01, -6.14913600e+00,
-2.48707200e-01, -1.18577100e+00],
[ -1.93939400e-01, -2.08119900e-01, -7.46226500e+00,
-2.08031700e-01, -1.20300800e+00],
[ -3.55635700e-01, -4.09525000e-01, -2.14132500e+01,
-4.08815100e-01, -2.75958600e+00],
[ -5.73360000e-02, -5.84700000e-02, -4.12946200e+00,
-5.84681000e-02, -4.86586900e-01],
[ 3.09828000e-02, 3.06685000e-02, 1.86551100e+00,
3.06682000e-02, 2.40413800e-01],
[ -2.11924300e-01, -2.29071300e-01, -2.18386100e+01,
-2.28953000e-01, -2.15130900e+00],
[ -3.10989000e-01, -3.50739300e-01, -4.19249500e+01,
-3.50300400e-01, -3.61084500e+00],
[ -9.22250000e-03, -9.25100000e-03, -1.13679700e+00,
-9.25100000e-03, -1.02392100e-01],
[ 2.39402500e-01, 2.22589700e-01, 1.88577300e+01,
2.22493500e-01, 2.12475600e+00],
[ 3.35166000e-02, 3.31493000e-02, 4.51842400e+00,
3.31489000e-02, 3.89155400e-01],
[ 8.49829400e-01, 6.85180200e-01, 3.57627500e+01,
6.82689900e-01, 5.51291500e+00],
[ 4.12934200e-01, 3.66785200e-01, 4.65392600e+01,
3.66370400e-01, 4.38379500e+00],
[ 4.64148400e-01, 4.07123200e-01, 6.25726500e+01,
4.06561900e-01, 5.38915500e+00],
[ 1.71104600e+00, 1.19474800e+00, 1.12676500e+02,
1.18311900e+00, 1.38850500e+01],
[ 1.26571800e+00, 9.46389000e-01, 1.30431000e+02,
9.40244600e-01, 1.28486900e+01],
[ -3.48532600e-01, -3.99988300e-01, -2.95638100e+01,
-3.99328600e-01, -3.20997700e+00],
[ -4.04340300e-01, -4.76960100e-01, -4.10254300e+01,
-4.75818000e-01, -4.07286500e+00],
[ -4.92057900e-01, -6.08818300e-01, -9.34509600e+01,
-6.06357200e-01, -6.78109700e+00],
[ -4.02876400e-01, -4.74878400e-01, -9.15226200e+01,
-4.73751900e-01, -6.07225700e+00],
[ -5.15056700e-01, -6.46013300e-01, -2.19014600e+02,
-6.43043500e-01, -1.06209700e+01],
[ -8.70423000e-02, -8.97043000e-02, -1.26361400e+01,
-8.96975000e-02, -1.04875100e+00],
[ 1.28362300e-01, 1.23247800e-01, 1.70383300e+01,
1.23231000e-01, 1.47887800e+00],
[ -2.39271900e-01, -2.61562100e-01, -9.30283300e+01,
-2.61384400e-01, -4.71795100e+00],
[ 7.37246500e-01, 6.08186000e-01, 6.25359600e+01,
6.06409700e-01, 6.79002300e+00],
[ -3.64110000e-02, -3.68626000e-02, -1.41565300e+01,
-3.68621000e-02, -7.17951200e-01],
[ 2.68833000e-01, 2.47933100e-01, 6.67934100e+01,
2.47801000e-01, 4.23748400e+00],
[ 5.96389600e-01, 5.07237700e-01, 1.13265500e+02,
5.06180100e-01, 8.21890300e+00],
[ 1.98218000e-02, 1.96923000e-02, 1.00820900e+01,
1.96923000e-02, 4.47040700e-01],
[ 7.74936000e-01, 6.34305300e-01, 2.51883900e+02,
6.32303700e-01, 1.39711800e+01],
[ -7.63925100e-01, -1.16591700e+00, -4.93461700e+02,
-1.14588000e+00, -1.94156600e+01],
[ -6.23771700e-01, -8.41174800e-01, -4.40679600e+02,
-8.34266300e-01, -1.65796100e+01],
[ -1.63272900e-01, -1.73115100e-01, -6.73975900e+01,
-1.73064800e-01, -3.31725800e+00],
[ -4.28562500e-01, -5.11932900e-01, -4.73787800e+02,
-5.10507400e-01, -1.42494800e+01],
[ 8.00693000e-02, 7.80269000e-02, 3.95353400e+01,
7.80226000e-02, 1.77920500e+00],
[ -2.13674400e-01, -2.31127400e-01, -2.15987000e+02,
-2.31005700e-01, -6.79344600e+00],
[ -1.63544000e-02, -1.64444000e-02, -1.05642100e+01,
-1.64444000e-02, -4.15657600e-01],
[ 2.04900500e-01, 1.92372100e-01, 1.10651300e+02,
1.92309400e-01, 4.76156600e+00],
[ -1.94758900e-01, -2.09067700e-01, -2.35484100e+02,
-2.08978200e-01, -6.77219400e+00],
[ 3.16727400e-01, 2.88367800e-01, 1.87065600e+02,
2.88162100e-01, 7.69732400e+00],
[ 6.24234900e-01, 5.27632500e-01, 2.57678500e+02,
5.26448400e-01, 1.26827400e+01],
[ 8.30241100e-01, 6.72002100e-01, 2.86513700e+02,
6.69644800e-01, 1.54232100e+01],
[ 6.55140000e-03, 6.53710000e-03, 7.92130700e+00,
6.53710000e-03, 2.27805800e-01],
[ 3.41595200e-01, 3.08985000e-01, 2.88667600e+02,
3.08733300e-01, 9.93012900e+00]])
self.params = np.array([ -0.5369833, 6.47296332, 16.20336802,
38.96617431])
self.bse = np.array([ 0.13341238, 2.1349966 , 3.87411875, 8.19235553])
self.aic_R = 328.39209118952965
#TODO: the below will fail
self.aic_Stata = 7.381090276021671
self.deviance = 15.093762327607557
self.scale = 0.29512089119443752
self.null_deviance = 27.92207137420696 # from R bug in RPy
#NOTE: our scale is Stata's dispers_p (pearson?)
#NOTE: if scale is analagous to Stata's dispersion, then this might be
#where the discrepancies come from?
# self.llf = -159.19604559476483 # From R
self.llf = -173.1461666245201 # From Stata
self.bic_Stata = -155.2390821535193
self.df_model = 3
self.df_resid = 44
self.chi2 = 51.56632068622578
self.fittedvalues = np.array([ 6.21019277, 4.06225956,
7.28415938, 11.04304251,
8.89510929, 2.98829295, 5.13622616, 7.82114268,
8.89510929, 7.82114268, 11.04304251, 12.65399242,
12.11700911, 9.43209259, 12.65399242, 5.67320947,
11.58002581, 12.65399242, 8.35812599, 11.04304251,
9.46125627, 10.53522287, 14.294106 , 15.36807261,
19.12695574, 12.68315609, 12.14617279, 18.58997243,
9.46125627, 18.58997243, 15.90505591, 14.294106 ,
20.20092234, 17.51600582, 25.63546061, 26.17244391,
22.95054409, 28.85736043, 24.0245107 , 28.32037713,
25.63546061, 24.561494 , 29.39434374, 25.09847731,
22.95054409, 21.87657748, 29.39434374, 27.24641052])
class Cpunish(object):
'''
The following are from the R script in models.datasets.cpunish
Slightly different than published results, but should be correct
Probably due to rounding in cleaning?
'''
def __init__(self):
self.params = (2.611017e-04, 7.781801e-02, -9.493111e-02, 2.969349e-01,
2.301183e+00, -1.872207e+01, -6.801480e+00)
self.bse = (5.187132e-05, 7.940193e-02, 2.291926e-02, 4.375164e-01,
4.283826e-01, 4.283961e+00, 4.146850e+00)
self.null_deviance = 136.57281747225
self.df_null = 16
self.deviance = 18.591641759528944
self.df_resid = 10
self.df_model = 6
self.aic_R = 77.8546573896503 # same as Stata
self.aic_Stata = 4.579685683305706
self.bic_Stata = -9.740492454486446
self.chi2 = 128.8021169250578 # from Stata not in sm
self.llf = -31.92732869482515
self.scale = 1
self.pearson_chi2 = 24.75374835
self.resids = glm_test_resids.cpunish_resids
self.fittedvalues = np.array([35.2263655, 8.1965744, 1.3118966,
3.6862982, 2.0823003, 1.0650316, 1.9260424, 2.4171405,
1.8473219, 2.8643241, 3.1211989, 3.3382067, 2.5269969,
0.8972542, 0.9793332, 0.5346209, 1.9790936])
class Cpunish_offset(Cpunish):
'''
Same model as Cpunish but with offset of 100. Many things do not change.
'''
def __init__(self):
super(Cpunish_offset, self).__init__()
self.params = (-1.140665e+01, 2.611017e-04, 7.781801e-02,
-9.493111e-02, 2.969349e-01, 2.301183e+00,
-1.872207e+01)
self.bse = (4.147e+00, 5.187e-05, 7.940e-02, 2.292e-02,
4.375e-01, 4.284e-01, 4.284e+00)
class InvGauss(object):
'''
Usef
Data was generated by Hardin and Hilbe using Stata.
Note only the first 5000 observations are used because
the models code currently uses np.eye.
'''
# np.random.seed(54321)
# x1 = np.abs(stats.norm.ppf((np.random.random(5000))))
# x2 = np.abs(stats.norm.ppf((np.random.random(5000))))
# X = np.column_stack((x1,x2))
# X = add_constant(X)
# params = np.array([.5, -.25, 1])
# eta = np.dot(X, params)
# mu = 1/np.sqrt(eta)
# sigma = .5
# This isn't correct. Errors need to be normally distributed
# But Y needs to be Inverse Gaussian, so we could build it up
# by throwing out data?
# Refs: Lai (2009) Generating inverse Gaussian random variates by
# approximation
# Atkinson (1982) The simulation of generalized inverse gaussian and
# hyperbolic random variables seems to be the canonical ref
# Y = np.dot(X,params) + np.random.wald(mu, sigma, 1000)
# model = GLM(Y, X, family=models.family.InverseGaussian(link=\
# models.family.links.identity))
def __init__(self):
# set up data #
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"inv_gaussian.csv")
data=np.genfromtxt(open(filename, 'rb'), delimiter=",", dtype=float)[1:]
self.endog = data[:5000,0]
self.exog = data[:5000,1:]
self.exog = add_constant(self.exog, prepend=False)
#class InvGaussDefault(InvGauss)
# def __init__(self):
# super(InvGaussDefault, self).__init__()
# Results
#NOTE: loglikelihood difference in R vs. Stata vs. Models
# is the same situation as gamma
self.params = (0.4519770, -0.2508288, 1.0359574)
self.bse = (0.03148291, 0.02237211, 0.03429943)
self.null_deviance = 1520.673165475461
self.df_null = 4999
self.deviance = 1423.943980407997
self.df_resid = 4997
self.df_model = 2
self.aic_R = 5059.41911646446
self.aic_Stata = 1.552280060977946
self.bic_Stata = -41136.47039418921
self.llf = -3877.700354 # Stata is same as ours with scale set to 1
# self.llf = -2525.70955823223 # from R, close to ours
self.scale = 0.2867266359127567
self.pearson_chi2 = 1432.771536
self.resids = glm_test_resids.invgauss_resids
self.fittedvalues = np.array([ 1.0404339 , 0.96831526,
0.81265833, 0.9958362 , 1.05433442,
1.09866137, 0.95548191, 1.38082105, 0.98942888, 0.96521958,
1.02684056, 0.91412576, 0.91492102, 0.92639676, 0.96763425,
0.80250852, 0.85281816, 0.90962261, 0.95550299, 0.86386815,
0.94760134, 0.94269533, 0.98960509, 0.84787252, 0.78949111,
0.76873582, 0.98933453, 0.95105574, 0.8489395 , 0.88962971,
0.84856357, 0.88567313, 0.84505405, 0.84626147, 0.77250421,
0.90175601, 1.15436378, 0.98375558, 0.83539542, 0.82845381,
0.90703971, 0.85546165, 0.96707286, 0.84127197, 0.82096543,
1.1311227 , 0.87617029, 0.91194419, 1.05125511, 0.95330314,
0.75556148, 0.82573228, 0.80424982, 0.83800144, 0.8203644 ,
0.84423807, 0.98348433, 0.93165089, 0.83968706, 0.79256287,
1.0302839 , 0.90982028, 0.99471562, 0.70931825, 0.85471721,
1.02668021, 1.11308301, 0.80497105, 1.02708486, 1.07671424,
0.821108 , 0.86373486, 0.99104964, 1.06840593, 0.94947784,
0.80982122, 0.95778065, 1.0254212 , 1.03480946, 0.83942363,
1.17194944, 0.91772559, 0.92368795, 1.10410916, 1.12558875,
1.11290791, 0.87816503, 1.04299294, 0.89631173, 1.02093004,
0.86331723, 1.13134858, 1.01807861, 0.98441692, 0.72567667,
1.42760495, 0.78987436, 0.72734482, 0.81750166, 0.86451854,
0.90564264, 0.81022323, 0.98720325, 0.98263709, 0.99364823,
0.7264445 , 0.81632452, 0.7627845 , 1.10726938, 0.79195664,
0.86836774, 1.01558149, 0.82673675, 0.99529548, 0.97155636,
0.980696 , 0.85460503, 1.00460782, 0.77395244, 0.81229831,
0.94078297, 1.05910564, 0.95921954, 0.97841172, 0.93093166,
0.93009865, 0.89888111, 1.18714408, 0.98964763, 1.03388898,
1.67554215, 0.82998876, 1.34100687, 0.86766346, 0.96392316,
0.91371033, 0.76589296, 0.92329051, 0.82560326, 0.96758148,
0.8412995 , 1.02550678, 0.74911108, 0.8751611 , 1.01389312,
0.87865556, 1.24095868, 0.90678261, 0.85973204, 1.05617845,
0.94163038, 0.88087351, 0.95699844, 0.86083491, 0.89669384,
0.78646825, 1.0014202 , 0.82399199, 1.05313139, 1.06458324,
0.88501766, 1.19043294, 0.8458026 , 1.00231535, 0.72464305,
0.94790753, 0.7829744 , 1.1953009 , 0.85574035, 0.95433052,
0.96341484, 0.91362908, 0.94097713, 0.87273804, 0.81126399,
0.72715262, 0.85526116, 0.76015834, 0.8403826 , 0.9831501 ,
1.17104665, 0.78862494, 1.01054909, 0.91511601, 1.0990797 ,
0.91352124, 1.13671162, 0.98793866, 1.0300545 , 1.04490115,
0.85778231, 0.94824343, 1.14510618, 0.81305136, 0.88085051,
0.94743792, 0.94875465, 0.96206997, 0.94493612, 0.93547218,
1.09212018, 0.86934651, 0.90532353, 1.07066001, 1.26197714,
0.93858662, 0.9685039 , 0.7946546 , 1.03052031, 0.75395899,
0.87527062, 0.82156476, 0.949774 , 1.01000235, 0.82613526,
1.0224591 , 0.91529149, 0.91608832, 1.09418385, 0.8228272 ,
1.06337472, 1.05533176, 0.93513063, 1.00055806, 0.95474743,
0.91329368, 0.88711836, 0.95584926, 0.9825458 , 0.74954073,
0.96964967, 0.88779583, 0.95321846, 0.95390055, 0.95369029,
0.94326714, 1.31881201, 0.71512263, 0.84526602, 0.92323824,
1.01993108, 0.85155992, 0.81416851, 0.98749128, 1.00034192,
0.98763473, 1.05974138, 1.05912658, 0.89772172, 0.97905626,
1.1534306 , 0.92304181, 1.16450278, 0.7142307 , 0.99846981,
0.79861247, 0.73939835, 0.93776385, 1.0072242 , 0.89159707,
1.05514263, 1.05254569, 0.81005146, 0.95179784, 1.00278795,
1.04910398, 0.88427798, 0.74394266, 0.92941178, 0.83622845,
0.84064958, 0.93426956, 1.03619314, 1.22439347, 0.73510451,
0.82997071, 0.90828036, 0.80866989, 1.34078212, 0.85079169,
0.88346039, 0.76871666, 0.96763454, 0.66936914, 0.94175741,
0.97127617, 1.00844382, 0.83449557, 0.88095564, 1.17711652,
1.0547188 , 1.04525593, 0.93817487, 0.77978294, 1.36143199,
1.16127997, 1.03792952, 1.03151637, 0.83837387, 0.94326066,
1.0054787 , 0.99656841, 1.05575689, 0.97641643, 0.85108163,
0.82631589, 0.77407305, 0.90566132, 0.91308164, 0.95560906,
1.04523011, 1.03773723, 0.97378685, 0.83999133, 1.06926871,
1.01073982, 0.9804959 , 1.06473061, 1.25315673, 0.969175 ,
0.63443508, 0.84574684, 1.06031239, 0.93834605, 1.01784925,
0.93488249, 0.80240225, 0.88757274, 0.9224097 , 0.99158962,
0.87412592, 0.76418199, 0.78044069, 1.03117412, 0.82042521,
1.10272129, 1.09673757, 0.89626935, 1.01678612, 0.84911824,
0.95821431, 0.99169558, 0.86853864, 0.92172772, 0.94046199,
0.89750517, 1.09599258, 0.92387291, 1.07770118, 0.98831383,
0.86352396, 0.83079533, 0.94431185, 1.12424626, 1.02553104,
0.8357513 , 0.97019669, 0.76816092, 1.34011343, 0.86489527,
0.82156358, 1.25529129, 0.86820218, 0.96970237, 0.85850546,
0.97429559, 0.84826078, 1.02498396, 0.72478517, 0.993497 ,
0.76918521, 0.91079198, 0.80988325, 0.75431095, 1.02918073,
0.88884197, 0.82625507, 0.78564563, 0.91505355, 0.88896863,
0.85882361, 0.81538316, 0.67656235, 0.8564822 , 0.82473022,
0.92928331, 0.98068415, 0.82605685, 1.0150412 , 1.00631678,
0.92405101, 0.88909552, 0.94873568, 0.87657342, 0.8280683 ,
0.77596382, 0.96598811, 0.78922426, 0.87637606, 0.98698735,
0.92207026, 0.71487846, 1.03845478, 0.70749745, 1.08603388,
0.92697779, 0.86470448, 0.70119494, 1.00596847, 0.91426549,
1.05318838, 0.79621712, 0.96169742, 0.88053405, 0.98963934,
0.94152997, 0.88413591, 0.75035344, 0.86007123, 0.83713514,
0.91234911, 0.79562744, 0.84099675, 1.0334279 , 1.00272243,
0.95359383, 0.84292969, 0.94234155, 0.90190899, 0.97302022,
1.1009829 , 1.0148975 , 0.99082987, 0.75916515, 0.9204784 ,
0.94477378, 1.01108683, 1.00038149, 0.9259798 , 1.19400436,
0.80191877, 0.79565851, 0.81865924, 0.79003506, 0.8995508 ,
0.73137983, 0.88336018, 0.7855268 , 1.04478073, 0.90857981,
1.16076951, 0.76096486, 0.90004113, 0.83819665, 0.95295365,
1.09911441, 0.78498197, 0.95094991, 0.94333419, 0.95131688,
0.82961049, 1.08001761, 1.06426458, 0.94291798, 1.04381938,
0.90380364, 0.74060138, 0.98701862, 0.72250236, 0.86125293,
0.76488061, 0.9858051 , 0.98099677, 0.96849209, 0.90053351,
0.88469597, 0.80688516, 1.06396217, 1.02446023, 0.911863 ,
0.98837746, 0.91102987, 0.92810392, 1.13526335, 1.00419541,
1.00866175, 0.74352261, 0.91051641, 0.81868428, 0.93538014,
0.87822651, 0.93278572, 1.0356074 , 1.25158731, 0.98372647,
0.81335741, 1.06441863, 0.80305786, 0.95201148, 0.90283451,
1.17319519, 0.8984894 , 0.88911288, 0.91474736, 0.94512294,
0.92956283, 0.86682085, 1.08937227, 0.94825713, 0.9787145 ,
1.16747163, 0.80863682, 0.98314119, 0.91052823, 0.80913225,
0.78503169, 0.78751737, 1.08932193, 0.86859845, 0.96847458,
0.93468839, 1.10769915, 1.1769249 , 0.84916138, 1.00556408,
0.84508585, 0.92617942, 0.93985886, 1.17303268, 0.81172495,
0.93482682, 1.04082486, 1.03209348, 0.97220394, 0.90274672,
0.93686291, 0.91116431, 1.14814563, 0.83279158, 0.95853283,
1.0261179 , 0.95779432, 0.86995883, 0.78164915, 0.89946906,
0.9194465 , 0.97919367, 0.92719039, 0.89063569, 0.80847805,
0.81192101, 0.75044535, 0.86819023, 1.03420014, 0.8899434 ,
0.94899544, 0.9860773 , 1.10047297, 1.00243849, 0.82153972,
1.14289945, 0.8604684 , 0.87187524, 1.00415032, 0.78460709,
0.86319884, 0.92818335, 1.08892111, 1.06841003, 1.00735918,
1.20775251, 0.72613554, 1.25768191, 1.08573511, 0.89671127,
0.91259535, 1.01414208, 0.87422903, 0.82720677, 0.9568079 ,
1.00450416, 0.91043845, 0.84095709, 1.08010574, 0.69848293,
0.90769214, 0.94713501, 1.14808251, 1.0605676 , 1.21734482,
0.78578521, 1.01516235, 0.94330326, 0.98363817, 0.99650084,
0.74280796, 0.96227123, 0.95741454, 1.00980406, 0.93468092,
1.10098591, 1.18175828, 0.8553791 , 0.81713219, 0.82912143,
0.87599518, 1.15006511, 1.03151163, 0.8751847 , 1.15701331,
0.73394166, 0.91426368, 0.96953458, 1.13901709, 0.83028721,
1.15742641, 0.9395442 , 0.98118552, 0.89585426, 0.74147117,
0.8902096 , 1.00212097, 0.97665858, 0.92624514, 0.98006601,
0.9507215 , 1.00889825, 1.2406772 , 0.88768719, 0.76587533,
1.0081044 , 0.89608494, 1.00083526, 0.85594415, 0.76425576,
1.0286636 , 1.13570272, 0.82020405, 0.81961271, 1.04586579,
1.26560245, 0.89721521, 1.19324037, 0.948205 , 0.79414261,
0.85157002, 0.95155101, 0.91969239, 0.87699126, 1.03452982,
0.97093572, 1.14355781, 0.85088592, 0.79032079, 0.84521733,
0.99547581, 0.87593455, 0.8776799 , 1.05531013, 0.94557017,
0.91538439, 0.79679863, 1.03398557, 0.88379021, 0.98850319,
1.05833423, 0.90055078, 0.92267584, 0.76273738, 0.98222632,
0.86392524, 0.78242646, 1.19417739, 0.89159895, 0.97565002,
0.85818308, 0.85334266, 1.85008011, 0.87199282, 0.77873231,
0.78036174, 0.96023918, 0.91574121, 0.89217979, 1.16421151,
1.29817786, 1.18683283, 0.96096225, 0.89964569, 1.00401442,
0.80758845, 0.89458758, 0.7994919 , 0.85889356, 0.73147252,
0.7777221 , 0.9148438 , 0.72388117, 0.91134001, 1.0892724 ,
1.01736424, 0.86503014, 0.77344917, 1.04515616, 1.06677211,
0.93421936, 0.8821777 , 0.91860774, 0.96381507, 0.70913689,
0.82354748, 1.12416046, 0.85989778, 0.90588737, 1.22832895,
0.65955579, 0.93828405, 0.88946418, 0.92152859, 0.83168025,
0.93346887, 0.96456078, 0.9039245 , 1.03598695, 0.78405559,
1.21739525, 0.79019383, 0.84034646, 1.00273203, 0.96356393,
0.948103 , 0.90279217, 1.0187839 , 0.91630508, 1.15965854,
0.84203423, 0.98803156, 0.91604459, 0.90986512, 0.93384826,
0.76687038, 0.96251902, 0.80648134, 0.77336547, 0.85720164,
0.9351947 , 0.88004728, 0.91083961, 1.06225829, 0.90230812,
0.72383932, 0.8343425 , 0.8850996 , 1.19037918, 0.93595522,
0.85061223, 0.84330949, 0.82397482, 0.92075047, 0.86129584,
0.99296756, 0.84912251, 0.8569699 , 0.75252201, 0.80591772,
1.03902954, 1.04379139, 0.87360195, 0.97452318, 0.93240609,
0.85406409, 1.11717394, 0.95758536, 0.82772817, 0.67947416,
0.85957788, 0.93731268, 0.90349227, 0.79464185, 0.99148637,
0.8461071 , 0.95399991, 1.04320664, 0.87290871, 0.96780849,
0.99467159, 0.96421545, 0.80174643, 0.86475812, 0.74421362,
0.85230296, 0.89891758, 0.77589592, 0.98331957, 0.87387233,
0.92023388, 1.03037742, 0.83796515, 1.0296667 , 0.85891747,
1.02239978, 0.90958406, 1.09731875, 0.8032638 , 0.84482057,
0.8233118 , 0.86184709, 0.93105929, 0.99443502, 0.77442109,
0.98367982, 0.95786272, 0.81183444, 1.0526009 , 0.86993018,
0.985886 , 0.92016756, 1.00847155, 1.2309469 , 0.97732206,
0.83074957, 0.87406987, 0.95268492, 0.94189139, 0.87056443,
1.0135018 , 0.93051004, 1.5170931 , 0.80948763, 0.83737473,
1.05461331, 0.97501633, 1.01449333, 0.79760056, 1.05756482,
0.97300884, 0.92674035, 0.8933763 , 0.91624084, 1.13127607,
0.88115305, 0.9351562 , 0.91430431, 1.11668229, 1.10000526,
0.88171963, 0.74914744, 0.94610698, 1.13841497, 0.90551414,
0.89773592, 1.01696097, 0.85096063, 0.80935471, 0.68458106,
1.2718979 , 0.93550219, 0.96071403, 0.75434294, 0.95112257,
1.16233368, 0.73664915, 1.02195777, 1.07487625, 0.8937445 ,
0.78006023, 0.89588994, 1.16354892, 1.02629448, 0.89208642,
1.02088244, 0.85385355, 0.88586061, 0.94571704, 0.89710576,
0.95191525, 0.99819848, 0.97117841, 1.13899808, 0.88414949,
0.90938883, 1.02937917, 0.92936684, 0.87323594, 0.8384819 ,
0.87766945, 1.05869911, 0.91028734, 0.969953 , 1.11036647,
0.94996802, 1.01305483, 1.03697568, 0.9750155 , 1.04537837,
0.9314676 , 0.86589798, 1.17446667, 1.02564533, 0.82088708,
0.96481845, 0.86148642, 0.79174298, 1.18029919, 0.82132544,
0.92193776, 1.03669516, 0.96637464, 0.83725933, 0.88776321,
1.08395861, 0.91255709, 0.96884738, 0.89840008, 0.91168146,
0.99652569, 0.95693101, 0.83144932, 0.99886503, 1.02819927,
0.95273533, 0.95959945, 1.08515986, 0.70269432, 0.79529303,
0.93355669, 0.92597539, 1.0745695 , 0.87949758, 0.86133964,
0.95653873, 1.09161425, 0.91402143, 1.13895454, 0.89384443,
1.16281703, 0.8427015 , 0.7657266 , 0.92724079, 0.95383649,
0.86820891, 0.78942366, 1.11752711, 0.97902686, 0.87425286,
0.83944794, 1.12576718, 0.9196059 , 0.89844835, 1.10874172,
1.00396783, 0.9072041 , 1.63580253, 0.98327489, 0.68564426,
1.01007087, 0.92746473, 1.01328833, 0.99584546, 0.86381679,
1.0082541 , 0.85414132, 0.87620981, 1.22461203, 1.03935516,
0.86457326, 0.95165828, 0.84762138, 0.83080254, 0.84715241,
0.80323344, 1.09282941, 1.00902453, 1.02834261, 1.09810743,
0.86560231, 1.31568763, 1.03754782, 0.81298745, 1.14500629,
0.87364384, 0.89928367, 0.96118471, 0.83321743, 0.90590461,
0.98739499, 0.79408399, 1.18513754, 1.05619307, 0.99920088,
1.04347259, 1.07689022, 1.24916765, 0.74246274, 0.90949597,
0.87077335, 0.81233276, 1.05403934, 0.98333063, 0.77689527,
0.93181907, 0.98853585, 0.80700332, 0.89570662, 0.97102475,
0.69178123, 0.72950409, 0.89661719, 0.84821737, 0.8724469 ,
0.96453177, 0.9690018 , 0.87132764, 0.91711564, 1.79521288,
0.75894855, 0.90733112, 0.86565687, 0.90433268, 0.83412618,
1.26779628, 1.06999114, 0.73181364, 0.90334838, 0.86634581,
0.76999285, 1.55403008, 0.74712547, 0.84702579, 0.72396203,
0.82292773, 0.73633208, 0.90524618, 0.9954355 , 0.85076517,
0.96097585, 1.21655611, 0.77658146, 0.81026686, 1.07540173,
0.94219623, 0.97472554, 0.72422803, 0.85055855, 0.85905477,
1.17391419, 0.87644114, 1.03573284, 1.16647944, 0.87810532,
0.89134419, 0.83531593, 0.93448128, 1.04967869, 1.00110843,
0.936784 , 1.00143426, 0.79714807, 0.82656251, 0.95057309,
0.93821813, 0.93469098, 0.99825205, 0.95384714, 1.07063008,
0.97603699, 0.816668 , 0.98286184, 0.86061483, 0.88166732,
0.93730982, 0.77633837, 0.87671549, 0.99192439, 0.86452825,
0.95880282, 0.7098419 , 1.12717149, 1.16707939, 0.84854333,
0.87486963, 0.9255293 , 1.06534197, 0.9888494 , 1.09931069,
1.21859221, 0.97489537, 0.82508579, 1.14868922, 0.98076133,
0.85524084, 0.69042079, 0.93012936, 0.96908499, 0.94284892,
0.80114327, 0.919846 , 0.95753354, 1.04536666, 0.77109284,
0.99942571, 0.79004323, 0.91820045, 0.97665489, 0.64689716,
0.89444405, 0.96106598, 0.74196857, 0.92905294, 0.70500318,
0.95074586, 0.98518665, 1.0794044 , 1.00364488, 0.96710486,
0.92429638, 0.94383006, 1.12554253, 0.95199191, 0.87380738,
0.72183594, 0.94453761, 0.98663804, 0.68247366, 1.02761427,
0.93255355, 0.85264705, 1.00341417, 1.07765999, 0.97396039,
0.90770805, 0.82750901, 0.73824542, 1.24491161, 0.83152629,
0.78656996, 0.99062838, 0.98276905, 0.98291014, 1.12795903,
0.98742704, 0.9579893 , 0.80451701, 0.87198344, 1.24746127,
0.95839155, 1.11708725, 0.97113877, 0.7721646 , 0.95781621,
0.67069168, 1.05509376, 0.96071852, 0.99768666, 0.83008521,
0.9156695 , 0.86314088, 1.23081412, 1.14723685, 0.8007289 ,
0.81590842, 1.31857558, 0.7753396 , 1.11091566, 1.03560198,
1.01837739, 0.94882818, 0.82551111, 0.93188019, 0.99532255,
0.93848495, 0.77764975, 0.85192319, 0.79913938, 0.99495229,
0.96122733, 1.13845155, 0.95846389, 0.8891543 , 0.97979531,
0.87167192, 0.88119611, 0.79655111, 0.9298217 , 0.96399321,
1.02005428, 1.06936503, 0.86948022, 1.02560548, 0.9149464 ,
0.83797207, 0.86175383, 0.92455994, 0.89218435, 0.81546463,
0.98488771, 0.92784833, 0.87895608, 0.93366386, 1.17487238,
0.79088952, 0.9237694 , 0.76389869, 0.931953 , 0.76272078,
1.00304977, 0.86612561, 0.87870143, 0.93808276, 1.12489343,
1.00668791, 0.88027101, 0.88845209, 0.88574216, 0.84284514,
0.96594357, 0.94363002, 0.78245367, 0.92941326, 0.99622557,
0.83812683, 0.77901691, 0.9588432 , 0.82057415, 0.95178868,
1.01904651, 0.97598844, 0.99369336, 1.12041918, 1.19432836,
0.91709572, 0.94645855, 0.93656587, 0.68754669, 0.80869784,
0.86704186, 0.83033797, 0.71892193, 0.97549489, 1.12150683,
0.76214802, 1.08564181, 0.84677802, 0.68080207, 1.03577057,
1.07937239, 0.6773357 , 1.0279076 , 0.89945816, 0.97765439,
0.91322633, 0.92490964, 0.92693575, 1.12297137, 0.81825246,
0.87598377, 1.11873032, 0.83472799, 1.21424495, 1.02318444,
1.01563195, 1.05663193, 0.82533918, 0.88766496, 0.95906474,
0.90738779, 0.93509534, 1.06658145, 1.00231797, 1.3131534 ,
0.88839464, 1.081006 , 0.866936 , 0.89030904, 0.91197562,
0.73449761, 0.95767806, 1.03407868, 0.79812826, 1.10555445,
0.85610722, 0.87420881, 1.04251375, 1.14286242, 1.00025972,
0.83742693, 1.11116502, 0.97424809, 0.92059325, 0.93958773,
0.80386755, 0.6881267 , 0.88620708, 1.01715536, 1.12403581,
0.91078992, 0.81101399, 1.17271429, 1.09980447, 0.86063042,
0.80805811, 0.87988444, 0.97398188, 0.91808966, 0.90676805,
0.80042891, 0.84060789, 0.9710147 , 1.00012669, 1.04805667,
0.66912164, 0.96111694, 0.86948596, 0.9056999 , 1.01489333,
1.27876763, 0.873881 , 0.98276702, 0.95553234, 0.82877996,
0.79697623, 0.77015376, 0.8234212 , 1.13394959, 0.96244655,
1.06516156, 0.82743856, 1.02931842, 0.78093489, 1.01322256,
1.00348929, 0.9408142 , 1.06495299, 0.8599522 , 0.81640723,
0.81505589, 1.02506487, 0.91148383, 1.11134309, 0.83992234,
0.82982074, 0.9721429 , 0.98897262, 1.01815004, 0.87838456,
0.80573592, 1.103707 , 0.97326218, 1.08921236, 1.2638062 ,
0.83142563, 1.16028769, 0.86701564, 1.15610014, 0.98303722,
0.87138463, 0.75281511, 1.07715535, 0.91526065, 1.08769832,
0.83598308, 1.03580956, 0.9390066 , 0.78544378, 1.03635836,
0.7974467 , 0.99273331, 0.89639711, 0.9250066 , 1.14323824,
0.9783478 , 1.15460639, 0.94265587, 1.09317654, 0.78585439,
0.99523323, 0.95104776, 0.85582572, 0.96100168, 0.9131529 ,
0.86496966, 0.72414589, 1.05142704, 0.85570039, 0.98217968,
0.99031168, 1.01867086, 0.96781667, 0.98581487, 1.00415938,
1.0339337 , 1.13987579, 1.14205543, 0.83393745, 0.96348647,
0.91895164, 0.77055293, 1.0053723 , 0.93168993, 1.00332386,
1.04195993, 1.11933891, 0.87439883, 0.87156457, 0.96050419,
0.72718399, 1.13546762, 0.89614816, 0.85081037, 0.8831463 ,
0.76370482, 0.99582951, 1.01844155, 1.08611311, 1.15832217,
1.17551069, 0.97057262, 0.95163548, 0.98310701, 0.65874788,
0.9655409 , 0.85675853, 1.34637286, 0.93779619, 1.0005791 ,
0.88104966, 1.14530829, 0.93687034, 1.01472112, 1.62464726,
0.84652357, 0.84639676, 0.87513324, 0.94837881, 0.85425129,
0.89820401, 0.94906277, 0.97796792, 0.98969445, 0.8036801 ,
1.03936478, 0.95898918, 0.82919938, 1.29609354, 0.97833841,
0.86862799, 0.88040491, 0.8741178 , 0.80617278, 0.95983882,
0.9752235 , 0.84292828, 0.9327284 , 0.93297136, 1.06255543,
0.88756716, 1.13601403, 0.72311518, 0.95250034, 0.95369843,
1.02562728, 0.74354691, 0.78463923, 0.88720818, 1.07763289,
0.94502062, 0.81170329, 0.96516347, 0.76884811, 0.84169312,
0.83752837, 1.1487847 , 1.04311868, 0.78128663, 0.74604211,
0.96488513, 1.1722513 , 0.91661948, 1.06642815, 0.92185781,
0.93289001, 0.65208625, 0.75734648, 0.99580571, 1.21871511,
0.96316283, 1.06093093, 0.7914337 , 0.90494572, 0.79235327,
0.90771769, 0.91355145, 0.98754767, 0.88938619, 0.89503537,
0.82764566, 0.77267065, 0.81520031, 0.90423926, 0.94289609,
0.88678376, 1.03209085, 0.81319963, 0.91600997, 0.81608666,
0.72429125, 0.95585073, 1.14039309, 1.00326452, 0.99629944,
0.95647901, 0.8927127 , 0.96558599, 0.86305195, 1.0366906 ,
0.90494731, 0.95148458, 1.11229696, 1.17059748, 0.74867876,
0.99621909, 0.94246499, 0.82403515, 0.92144961, 0.93209989,
0.9705427 , 0.97915309, 0.92431525, 0.7589944 , 0.75208652,
0.89375154, 0.78820016, 1.24061454, 1.08031776, 0.88364539,
0.86909794, 0.98635253, 0.97620372, 1.24278282, 1.01146474,
0.93726261, 0.94411536, 1.08344492, 0.75389972, 1.09979822,
0.84271329, 1.16616317, 0.88177625, 0.8451345 , 0.91355741,
0.99833789, 0.86172172, 0.87076203, 0.83743078, 0.99771528,
1.0469295 , 0.87952668, 1.04362453, 0.96350831, 0.95744466,
0.84284283, 0.8773066 , 0.85984544, 1.00589365, 0.88069101,
1.02331332, 1.06616241, 0.78475212, 1.02296979, 0.81480926,
1.09008244, 0.71435844, 0.79655626, 1.09824162, 0.87785428,
1.18020492, 0.99852432, 0.79028362, 0.80081103, 1.10940685,
1.08752313, 0.90673214, 0.84978348, 0.69466992, 0.77497046,
0.83074014, 0.87865947, 0.78890395, 0.7925195 , 0.99749611,
0.91430636, 0.87863864, 0.95392862, 0.91430684, 0.97358575,
0.87999755, 0.88234274, 0.71682337, 1.09723693, 0.71907671,
0.97487202, 0.71792963, 0.88374828, 0.73386811, 0.9315647 ,
1.05020628, 0.99128682, 0.71831173, 1.07119604, 1.02028122,
1.04696848, 0.93335813, 1.04275931, 0.72181913, 0.8837163 ,
0.90283411, 0.96642474, 0.89851984, 0.8397063 , 0.91185676,
1.00573193, 0.88430729, 0.7738957 , 1.07361285, 0.92617819,
0.64251751, 1.05229257, 0.73378537, 1.08270418, 0.99490809,
1.13634433, 1.11979997, 1.03383516, 1.00661234, 1.05778729,
1.05977357, 1.13779694, 0.91237075, 1.04866775, 0.9163203 ,
0.93152436, 0.83607634, 1.13426049, 1.26438419, 0.93515536,
0.92181847, 0.86558905, 1.01985742, 1.44095931, 0.92256398,
0.83369288, 0.93369164, 0.8243758 , 0.98278708, 0.80512458,
1.02092014, 0.73575074, 1.2214659 , 0.85391033, 0.97617313,
0.82054292, 1.04792993, 0.93961791, 1.01145014, 0.89301558,
0.93167504, 0.88221321, 1.23543354, 0.97023998, 1.00197517,
0.85394662, 0.89426495, 0.81344186, 1.08242456, 0.76253284,
1.00642867, 0.76685541, 1.01487961, 0.84028343, 0.87979545,
0.92796937, 0.99796437, 1.28844084, 1.02827514, 1.03663144,
0.83164521, 0.95644234, 0.77797914, 0.96748275, 1.09139879,
0.84329253, 0.9539873 , 0.80094065, 1.13771172, 0.91557533,
0.93370323, 0.79977904, 1.02721929, 1.16292026, 0.92976802,
0.85806865, 0.97824974, 1.02721582, 0.82773004, 0.9297126 ,
0.93769842, 1.14995068, 1.02895292, 0.90307101, 0.85918303,
1.14903979, 1.0344768 , 0.7502627 , 1.27452448, 1.12150928,
0.87274005, 1.09807041, 0.98634666, 1.03086907, 0.94743667,
0.91145542, 1.04395791, 0.83396016, 0.94783374, 0.96693806,
0.88864359, 0.93400675, 1.08563936, 0.78599906, 0.92142347,
1.15487344, 1.19946426, 0.92729226, 0.83333347, 0.90837637,
0.89191831, 1.0581614 , 0.85162688, 1.10081699, 0.98295351,
0.86684217, 1.00867408, 0.95966205, 0.73170785, 1.3207658 ,
0.87988622, 0.82869937, 0.9620586 , 0.71668579, 1.04105616,
0.71415591, 1.30198958, 0.81934393, 0.86731955, 0.99773712,
0.99943609, 0.87678188, 1.01650692, 0.73917494, 0.92077402,
0.98322263, 0.90623212, 0.88261034, 1.12798871, 0.84698889,
0.85312827, 0.91214965, 0.8778361 , 0.99621569, 0.94155734,
0.66441342, 0.85925635, 0.98064691, 0.97107172, 0.96438785,
0.95670408, 0.87601389, 0.9388234 , 0.91165254, 1.14769638,
0.99856344, 0.84391431, 0.94850194, 0.93754548, 0.86398937,
0.95090327, 1.07959765, 1.16684297, 0.82354834, 0.93165852,
0.91422292, 1.14872038, 0.87050113, 0.92322683, 1.04111597,
0.87780005, 0.94602618, 1.10071675, 0.88412438, 0.91286998,
0.9045216 , 0.91750005, 0.98647095, 1.10986959, 0.98912028,
1.01565645, 0.93891294, 0.97696431, 0.91186476, 0.77363533,
1.00075969, 0.89608139, 0.99828964, 0.87239569, 0.87540604,
0.76152791, 0.82501538, 0.91656546, 0.74389243, 1.07923575,
1.00241137, 1.05628365, 1.04407879, 0.90048788, 1.1134027 ,
0.89745966, 0.96534 , 0.71151925, 0.91798511, 0.7337992 ,
0.83636115, 0.75279928, 0.95570185, 0.89073922, 0.90307955,
0.8030445 , 0.84374939, 0.89769981, 0.99002578, 1.01849373,
0.92436541, 0.79675699, 1.03910383, 1.07487895, 0.8906169 ,
0.97729004, 0.97284392, 0.76338988, 0.82756432, 1.12289431,
0.9582901 , 0.97160038, 0.90141331, 0.83271234, 1.16065947,
0.90605662, 1.13389282, 0.8557889 , 0.77149889, 0.9462268 ,
0.95908887, 1.03399986, 0.92795031, 0.73529029, 0.93630494,
0.96730298, 1.05490026, 0.93313995, 0.96980639, 0.9177592 ,
0.95483326, 0.85262905, 0.95170479, 0.9601628 , 0.94878173,
0.87627934, 1.00561764, 0.83441231, 0.90890643, 0.97177858,
1.26394809, 0.80773622, 0.72205262, 0.87692143, 1.01842034,
0.98128171, 1.10776014, 0.94400422, 0.92697961, 0.79523284,
0.8609763 , 0.96303262, 1.17190075, 1.01259271, 1.04973619,
0.94837034, 0.86592734, 0.85908444, 1.14914962, 0.98113587,
1.03070712, 0.89916573, 0.90618114, 0.93223156, 0.96031901,
0.94162334, 0.98908438, 0.95170104, 0.95056422, 0.81782932,
0.81770133, 1.32039255, 1.28822384, 0.82916292, 1.01626284,
0.97537737, 0.83235746, 0.78645733, 0.77916206, 0.93591612,
0.8469273 , 0.74309279, 0.91331015, 1.11240033, 1.41018987,
0.95320314, 0.95807535, 0.89382722, 0.9259679 , 0.92570222,
0.84567759, 0.82332966, 0.98371126, 1.00248628, 0.72107053,
1.09687436, 0.78399705, 0.85224803, 0.92151262, 0.85618586,
0.88485527, 0.954487 , 0.86659146, 1.12800711, 0.93019359,
0.91388385, 0.95298992, 0.96834137, 0.90256791, 1.01222062,
0.84883116, 1.01234642, 0.91135106, 0.83362478, 0.94928359,
0.82247066, 0.7671973 , 0.85663382, 0.88838144, 0.92491567,
0.88698604, 0.87485584, 1.08494606, 0.96431031, 1.06243095,
1.14062212, 1.02081623, 0.72229471, 0.82390737, 0.86599633,
0.95284398, 0.87238315, 1.02818071, 0.98462575, 0.81992808,
1.01207538, 1.0081178 , 0.88458825, 1.01726135, 0.97708359,
0.79820777, 1.06081843, 0.97028599, 0.95203124, 1.00482088,
0.71764193, 0.88115767, 0.90628038, 0.97304174, 0.77015983,
1.06109546, 0.89575454, 0.94824633, 0.93822134, 0.98048549,
0.812265 , 0.95744328, 0.79087999, 1.0222571 , 0.89100453,
1.03590214, 0.92699983, 0.86840126, 0.99455198, 0.87912973,
0.93506231, 0.80706147, 0.89931563, 0.7861299 , 0.89253527,
0.90052785, 0.82420191, 0.97042004, 1.03249619, 0.92354267,
0.80482118, 0.9007601 , 0.80123508, 0.82285143, 0.88105118,
1.03519622, 0.8620259 , 0.96447485, 0.80399664, 1.00324939,
0.96317193, 0.83260244, 0.98561657, 0.88445103, 0.70777743,
0.81608832, 0.98073402, 1.1206105 , 0.69903403, 0.84353026,
0.9064964 , 0.97055276, 0.82747966, 0.85400205, 1.01205886,
0.85324973, 0.90899616, 0.92797575, 0.94646632, 0.89358892,
0.7981183 , 0.96559671, 0.88352248, 1.09804477, 0.79152196,
1.1054838 , 0.93272283, 0.96165854, 0.8899703 , 0.8792494 ,
0.74563326, 0.85371604, 0.87760912, 0.87184716, 0.92049887,
0.99459292, 0.93699011, 0.90492494, 1.12981885, 1.10621082,
0.91391466, 1.05207781, 1.13395097, 0.87022945, 0.93165871,
0.89083332, 0.99584874, 0.98626911, 1.13885184, 1.17350384,
0.93294232, 0.79602714, 0.93670114, 1.09726582, 1.05378961,
0.9457279 , 1.03257053, 1.11349021, 0.80111296, 0.96415105,
0.99447221, 0.75745769, 0.77537636, 0.83860967, 0.90122484,
0.78850128, 1.19877642, 0.91190085, 0.80851919, 0.79484738,
0.93093657, 0.87619908, 1.22781715, 0.89734952, 0.8678127 ,
0.76177975, 0.82089769, 0.89288915, 1.01603179, 0.95279916,
0.84037366, 0.99962719, 0.84298093, 0.77234882, 0.99876963,
1.01856707, 1.2133211 , 0.73822878, 0.83465671, 1.08879938,
0.8878534 , 1.24133317, 0.89264527, 0.83938655, 1.03853109,
0.9842176 , 0.94257497, 0.98282054, 0.90632313, 0.75810741,
1.02540204, 0.86648513, 0.98430307, 0.84561701, 1.13483974,
1.12446434, 1.00220923, 1.23248603, 0.98999724, 0.81980761,
0.91334393, 0.92831557, 1.16798373, 0.8888053 , 0.9319632 ,
0.89206108, 0.86764558, 0.69337981, 0.9021983 , 1.09931186,
1.15290804, 0.62304114, 1.1205393 , 1.27030677, 1.12718725,
0.93002501, 0.83367301, 0.96589068, 0.86578968, 0.79204086,
0.85124905, 0.89121046, 0.96406141, 0.99249204, 0.93363878,
1.11258502, 0.92020983, 1.16020824, 0.99075915, 0.73994574,
0.9335638 , 0.97410789, 1.00029038, 1.43611904, 0.93089581,
0.94758878, 0.84808364, 0.92192819, 1.0249259 , 0.69529827,
0.94629021, 0.7330735 , 1.07902207, 0.93022729, 0.77375973,
0.95019291, 0.92333668, 0.81483081, 0.78044978, 0.85101115,
0.88859716, 0.88720344, 0.89291167, 1.10372601, 0.91132273,
1.04156844, 0.94867703, 0.83546241, 0.84227545, 0.97043199,
0.73281541, 0.74512501, 0.9128489 , 0.99223543, 0.7319106 ,
0.93065507, 1.07907995, 0.86895295, 0.84344015, 0.89394039,
0.88802964, 1.00580322, 1.04286883, 0.82233574, 1.0279258 ,
0.97550628, 1.03867605, 1.10231813, 0.9642628 , 0.91684874,
1.11066089, 0.99439688, 0.88595489, 0.88725073, 0.78921585,
0.80397616, 0.71088468, 0.98316478, 0.72820659, 0.96964036,
1.03825415, 1.01438989, 1.02763769, 1.29949298, 1.06450406,
0.86198627, 0.85588074, 0.90445183, 1.01268187, 0.87927487,
0.9263951 , 0.93582126, 0.88738294, 1.20707424, 0.92887657,
0.97891062, 0.92893689, 0.84846424, 0.96287008, 0.99565057,
0.93483385, 1.21357183, 0.82369562, 0.65144728, 1.11249654,
0.7785981 , 0.88248898, 0.8953217 , 0.95884666, 0.77538093,
0.82272417, 0.91073072, 1.17185169, 0.99645708, 0.88693463,
0.90293325, 0.93368474, 0.87575633, 1.01924242, 0.80011545,
0.99762674, 0.75834671, 0.91952152, 0.86754419, 0.81073894,
0.8880299 , 0.74868718, 0.99979109, 0.90652154, 0.92463566,
0.93894041, 0.92370595, 0.88766357, 1.04614978, 1.77193759,
0.85480724, 0.85208602, 0.96154559, 0.95832935, 0.84210613,
0.9604567 , 0.88597666, 1.0010723 , 0.91890105, 1.10529207,
0.91123688, 0.88466788, 1.09759195, 0.8946647 , 0.78066485,
1.04376296, 1.02951755, 0.88455241, 0.99284282, 0.82423576,
0.80612213, 0.80915541, 0.9482253 , 0.8887192 , 0.86163309,
0.891385 , 0.84850622, 1.03353375, 1.09248204, 1.05337218,
0.85927317, 0.89167858, 1.04868715, 0.92933249, 1.1177299 ,
0.99846776, 0.82418972, 0.86041965, 0.88015748, 0.89785813,
0.85997945, 0.97102367, 0.86679181, 1.00848475, 0.9091588 ,
0.92565039, 0.84019067, 0.86978485, 1.21977681, 1.14920817,
1.05177219, 0.84202905, 0.85356083, 1.01379321, 0.93364219,
1.01999942, 0.85906744, 0.98178266, 0.87218886, 0.93983742,
0.79713053, 1.01123331, 0.86551625, 0.81983929, 0.86782985,
0.86735664, 1.43316935, 0.8490094 , 0.99909103, 0.85715326,
0.89452366, 1.08380518, 0.74686847, 1.62233058, 0.81046611,
0.83563461, 0.96925792, 0.82863186, 0.87147202, 0.92609558,
0.8879082 , 0.93933353, 0.90043906, 0.81677055, 0.78016427,
0.68871014, 0.83329967, 0.81570171, 0.89780443, 0.81337668,
1.00772749, 0.96220158, 0.90035459, 1.06031906, 0.85832752,
0.93636203, 0.96336629, 0.94686138, 0.98499419, 0.87223701,
0.96079992, 0.81302793, 0.99287479, 0.99369685, 1.21897038,
0.94547481, 0.80785132, 1.02033902, 0.93270741, 0.90386512,
1.05290969, 1.08873223, 0.81226537, 0.87185463, 0.96283379,
0.95065022, 1.07603824, 1.22279786, 0.83749284, 0.93504869,
0.93554565, 0.95255889, 0.96665227, 0.92370811, 0.76627742,
1.14267254, 0.98268052, 1.10017739, 0.79569048, 0.86494449,
1.17939799, 0.80655859, 0.76799971, 1.0018905 , 0.83051793,
1.37419036, 1.10424623, 0.93729691, 0.99655914, 0.94900303,
1.157402 , 0.93397459, 0.8133195 , 0.8592273 , 1.024661 ,
0.83708977, 1.06537435, 0.93561942, 1.00402051, 0.68981047,
0.92807172, 0.72192097, 1.232419 , 0.97080757, 0.90350598,
0.95122672, 1.04663207, 0.79080723, 0.8421381 , 1.01956925,
0.93307897, 0.88011784, 0.78674974, 0.97537097, 0.7582792 ,
0.85704507, 0.97683858, 0.7739793 , 0.96245444, 0.99506991,
0.76853035, 0.90875698, 0.97951121, 0.93350388, 1.16380858,
0.8154485 , 1.16902243, 0.98644779, 0.969998 , 0.73120517,
1.19059456, 0.85953661, 0.99193867, 0.88144929, 0.99254885,
1.02956121, 0.90689455, 0.89494433, 0.85625065, 0.86227273,
0.99830845, 0.97635222, 0.83420327, 1.02359646, 0.93694813,
0.88462353, 0.97040788, 1.02543309, 0.91904348, 1.2527365 ,
0.82235812, 0.92026753, 0.93935859, 0.88919482, 1.00405208,
1.06835782, 1.34738363, 0.97831176, 0.92053317, 1.09692339,
0.86156677, 1.02455351, 1.25572326, 0.89721167, 0.95787106,
0.85059479, 0.92044416, 0.99210399, 0.94334232, 0.76604642,
0.8239008 , 0.70790815, 1.06013034, 1.12729012, 0.88584074,
0.91995677, 0.82002708, 0.91612106, 0.86556894, 0.88014564,
0.95764757, 0.96559535, 0.97882426, 0.70725389, 0.9273384 ,
0.86511581, 0.85436928, 1.26804081, 1.02018914, 0.95359667,
0.89336753, 0.91851577, 0.78166458, 1.02673106, 1.01340992,
1.34916703, 0.77389899, 1.12009884, 0.94523179, 0.87991868,
0.82919239, 0.98198121, 0.83653977, 0.91748611, 1.0642761 ,
0.86964263, 0.86304793, 1.11500797, 0.7234409 , 1.00464282,
1.01835251, 0.73389264, 0.88471293, 0.85754755, 1.05383962,
0.73121546, 0.85445808, 0.768308 , 0.81396206, 1.01261272,
0.76696225, 1.01770784, 0.76742866, 0.98390583, 0.96277488,
0.87998292, 0.85264282, 1.12704234, 0.79612317, 0.92206712,
1.09846877, 0.99874997, 0.87707457, 1.03404785, 1.00726392,
0.91613763, 0.74242708, 0.80247702, 0.90702146, 0.81638055,
0.78507729, 1.00066404, 0.84687328, 0.76488847, 0.89697089,
0.82524207, 0.84940145, 1.022041 , 0.75856559, 1.15434195,
1.09781849, 0.93256477, 0.96021119, 1.00796782, 0.88193493,
0.87902107, 0.82245196, 1.04739362, 1.133521 , 0.82969043,
1.01007529, 1.07135903, 0.981338 , 0.86178089, 0.77930618,
0.82512349, 1.2017057 , 1.30452154, 1.12652148, 1.03670177,
0.90631643, 0.74222362, 0.84452965, 0.86366363, 0.79192948,
1.10288297, 0.9554774 , 1.00912465, 0.95545229, 0.93584303,
0.91604017, 0.91681165, 0.76792072, 1.66615421, 0.99044246,
1.05068209, 0.88197497, 0.91153792, 0.82702508, 0.95182748,
1.05320356, 0.8466656 , 1.01676717, 0.65881123, 1.02589358,
1.03902555, 1.00199915, 1.03022137, 0.93427176, 0.94600332,
0.94594696, 0.86465228, 0.91241272, 0.72232997, 0.93380167,
1.1960032 , 0.87463367, 0.78428202, 0.88088 , 0.97202961,
0.99425528, 0.89567214, 0.84908979, 0.81004889, 0.85484368,
0.68478631, 0.96563032, 0.78298607, 0.71894276, 0.88632131,
0.8885966 , 0.99235811, 0.84002222, 0.91265424, 0.91999157,
0.89786651, 1.18062511, 0.92378385, 0.82501238, 1.09009807,
0.96787582, 1.12456979, 0.86339677, 0.8786218 , 0.89865768,
1.02943564, 0.98886502, 0.97135566, 0.95914954, 1.05080931,
0.76554446, 0.80142172, 0.99661393, 1.14749469, 0.93695459,
0.95769957, 1.00811373, 1.00352699, 0.98747546, 0.99436785,
1.10256609, 0.84366101, 0.85931876, 0.90745126, 1.04928733,
0.84499693, 1.14018589, 1.2337188 , 0.90516077, 0.84991869,
0.72984467, 0.9729476 , 0.97483938, 0.88626286, 1.02838695,
0.89750089, 0.80324802, 1.40726294, 0.91149383, 0.86837826,
1.21798148, 0.96459285, 0.71897535, 0.76230781, 0.88042964,
0.8205186 , 1.0517869 , 0.74269565, 0.98278109, 1.1454159 ,
1.03806052, 0.75238659, 0.94224089, 0.94931526, 1.24018529,
0.99048689, 0.88108251, 0.81008694, 0.95443294, 0.99975781,
0.83336879, 0.74422074, 0.87934792, 0.81994499, 0.98684546,
0.82176924, 0.91652824, 0.77571479, 0.77039071, 0.9951089 ,
0.92896121, 0.96234268, 1.00295341, 1.01455466, 0.75014075,
0.95568202, 0.80995874, 1.24671334, 0.89480962, 0.81300194,
0.76967074, 0.92514927, 0.89610963, 0.97441759, 1.19354494,
0.87041262, 0.97344039, 0.88983828, 0.91614149, 0.85782814,
0.78403196, 0.96665254, 0.91000054, 0.78641804, 0.96920714,
0.89670528, 0.79247817, 1.04189638, 0.86777037, 1.18686087,
0.79506403, 0.92389297, 0.76211023, 0.93617759, 0.91879446,
0.8207635 , 0.78984486, 0.93005953, 0.78743101, 0.9814347 ,
0.94882561, 0.9577075 , 0.81121566, 1.01025446, 0.90587214,
0.94842798, 0.8811194 , 1.01942816, 0.94698308, 0.92603676,
0.86119014, 0.97543551, 0.84730649, 0.77552262, 0.97536054,
0.96944817, 0.8736804 , 0.86809673, 0.98134953, 1.16303105,
0.81534447, 1.35930512, 0.83221293, 0.94136243, 0.76926289,
1.05844282, 0.87783288, 0.78921971, 0.84360428, 0.78722128,
1.00022607, 0.96779519, 0.95891975, 0.91900001, 1.07307813,
1.03713093, 0.96257742, 0.90363152, 0.88729834, 0.91929215,
1.00508255, 0.80838454, 0.92165553, 0.94513005, 0.95429071,
0.80829571, 0.79531708, 1.01317347, 0.75337253, 0.85965134,
0.77014567, 0.77680991, 0.77158741, 0.88882588, 0.91466414,
0.82815897, 0.80251251, 1.04901425, 1.03386161, 1.3267075 ,
1.12457236, 0.8267327 , 0.89313417, 0.85992512, 0.93482733,
0.83456348, 0.87991138, 0.8110149 , 0.77913188, 0.89391799,
0.73646974, 0.87038816, 0.99533506, 0.90744083, 0.98175496,
1.17458551, 0.86718975, 0.93125366, 0.76131575, 0.90419708,
0.95122171, 0.97531776, 1.05955142, 0.94714906, 0.79360281,
1.02765349, 0.85192628, 0.84680852, 0.85470655, 0.94950982,
0.75868699, 0.89731933, 1.00736877, 1.05171121, 0.73336848,
0.97323586, 0.9848978 , 1.27418684, 0.83954394, 0.73979357,
1.06785996, 0.97832832, 0.7903268 , 0.76600605, 0.94906446,
0.81383465, 0.83620612, 1.00573379, 0.86359645, 0.9962139 ,
0.98779432, 1.13793814, 1.02764992, 0.9070168 , 0.81340349,
0.94807089, 0.90499083, 0.83805736, 0.99623054, 0.91875275,
0.95603557, 0.93156095, 0.83858677, 1.03667466, 1.01436655,
0.85551979, 0.76227045, 0.84743986, 0.88487423, 0.93800365,
0.8984666 , 0.92600404, 0.89230381, 1.34625848, 1.10026015,
0.9314026 , 0.82450724, 1.0299575 , 0.98494286, 1.07564492,
0.96565301, 0.89677015, 1.15236174, 0.85476951, 1.00169288,
0.90520725, 1.06235248, 1.04267637, 0.8311949 , 0.82017897,
0.81635968, 0.97246582, 0.84554172, 0.85409644, 1.18006461,
0.96488389, 0.69228637, 0.97812108, 0.91764623, 0.86250551,
0.91067775, 1.04692847, 0.94594707, 1.04351374, 0.9861303 ,
0.92192581, 0.835444 , 0.84362223, 1.13770705, 0.8075574 ,
1.02260109, 1.13786456, 0.80862839, 0.89291687, 0.90278047,
1.11613951, 1.29900454, 1.5622857 , 0.70999772, 0.99692653,
0.89109939, 0.77506441, 0.86054356, 0.99498141, 0.84222293,
0.95213508, 0.91438286, 0.89305591, 0.9716793 , 0.88609491,
1.00275797, 0.90086022, 0.75336995, 1.1572679 , 0.75952094,
0.89203313, 0.82115965, 0.81459913, 1.02943406, 0.67063452,
1.08707079, 0.92139483, 0.89855103, 0.89910955, 1.07169531,
0.93684641, 0.84893365, 1.08659966, 1.43385982, 0.94788914,
0.95277539, 0.94709274, 1.08412066, 0.90274516, 0.85147284,
0.89327944, 0.92176174, 0.83820774, 0.90981839, 0.82303984,
0.95189716, 0.95154905, 0.73628819, 1.18956148, 1.20224654,
0.97666968, 1.08057375, 0.90369444, 0.98589538, 0.81426873,
0.75127684, 0.93200745, 0.833666 , 0.79532088, 0.91965037,
0.99540522, 0.75449668, 0.85698312, 0.79328453, 0.94667443,
0.7637764 , 0.77203985, 0.73841377, 0.98587851, 1.34642268,
0.78002774, 1.04356217, 1.02266882, 1.08936378, 0.9794388 ,
1.07623423, 0.78069571, 1.12194495, 0.8072132 , 0.91672662,
1.36102062, 0.86933509, 1.15282756, 1.06219505, 0.80295502,
1.00999033, 0.69418333, 0.93678452, 1.13002256, 0.91465628,
0.73558316, 1.1302073 , 0.85856238, 0.89450543, 1.11812369,
0.75891878, 0.66859534, 0.97445338, 0.82210227, 0.76292085,
0.79289499, 1.04380135, 0.95586226, 0.87480096, 0.81244036,
0.86097575, 0.84111811, 0.85369732, 0.99160655, 0.90911501,
0.81315845, 0.74037745, 1.04369233, 1.03535223, 1.18886682,
0.87092491, 0.93562683, 0.92555142, 0.95268616, 0.9653025 ,
0.93447525, 0.9043932 , 1.25701034, 1.10354218, 0.96588129,
0.94717991, 0.97010307, 0.78264501, 0.80991731, 0.98540974,
0.83174886, 0.66966351, 1.01747376, 1.21553117, 0.80527296,
1.06556826, 1.00870321, 1.03316522, 0.88994006, 0.89092714,
0.94119254, 0.83930854, 1.01500087, 1.03581272, 0.97608081,
1.11919255, 1.16586474, 0.85064102, 1.06070274, 1.00679658,
0.75848826, 0.97969353, 0.94834777, 1.64970724, 0.82448941,
1.02236919, 0.95252025, 0.98638842, 0.89094895, 0.95522527,
0.91533774, 0.83716951, 0.92612154, 0.8662328 , 0.9675949 ,
0.96758398, 0.84309291, 0.95071171, 1.0165785 , 0.96628063,
1.00096151, 0.83175371, 0.79063043, 0.97371271, 0.76009001,
1.02409279, 0.97232166, 0.8480577 , 0.8982739 , 0.9959743 ,
0.96604729, 0.8681602 , 0.99850841, 0.96162481, 1.01259965,
0.98580061, 0.82751273, 0.90469122, 0.98254028, 0.78012425,
0.87023012, 0.96830515, 0.9415831 , 0.8591063 , 0.82961507,
0.89166083, 0.88509907, 0.95987837, 1.12356244, 0.71406404,
0.99047619, 0.93735587, 0.80540831, 1.0024624 , 0.95179491,
0.83602101, 0.90343297, 0.90510417, 0.96477126, 0.79995299,
0.93123762, 0.73763362, 1.0619498 , 0.80929865, 0.86110233,
0.84552556, 0.9943 , 0.97085623, 0.75751174, 0.9201456 ,
1.02268858, 0.9642899 , 0.79078558, 1.03160502, 0.85200219,
1.02246639, 1.08771483, 0.81997868, 0.82499763, 0.92767703,
1.06700018, 0.7882174 , 0.7789828 , 0.89096139, 0.73155973,
1.01717651, 0.91889525, 0.93256065, 0.84716063, 1.00965969,
0.74505112, 0.80104245, 0.76003901, 0.96662605, 0.96594583,
1.04571121, 0.97700878, 0.85461917, 0.9150222 , 0.89110471,
1.11183096, 0.98143747, 1.02346975, 0.9059266 , 1.00771483,
0.96336096, 0.93783898, 0.90545613, 1.10404183, 0.75297691,
0.92548654, 0.79889783, 0.88177552, 0.93896814, 0.87309811,
0.80691061, 0.89725699, 1.16586955, 0.98948281, 0.94524894,
0.86085608, 0.76716851, 0.85362573, 1.09936882, 0.9328761 ,
0.74819673, 0.94331186, 0.81077304, 0.88610499, 1.01452015,
0.91513953, 0.92846128, 0.93539081, 0.8946682 , 0.9270336 ,
0.96673629, 0.9897488 , 1.11891899, 0.87551585, 0.85854576,
1.13458763, 1.11450768, 0.79887951, 1.091154 , 1.04180374,
0.79252573, 0.90484245, 0.94221016, 0.95721137, 0.86776103,
0.97167404, 0.83404166, 0.94634038, 0.98907413, 0.92321459,
1.03547804, 0.79660212, 0.94870239, 0.70027204, 0.79841059,
0.92563393, 1.4385341 , 0.8331731 , 0.844816 , 0.97851389,
1.24048695, 0.83765698, 0.83600835, 1.13901283, 1.05994936,
0.84292427, 0.86759056, 0.9272156 , 0.77375499, 0.99972839,
0.95570976, 0.97879539, 0.95528351, 0.84555495, 0.95296134,
0.87469056, 0.78862024, 0.793795 , 0.8516853 , 0.92816818,
1.02492208, 0.8037345 , 0.95481283, 0.75138828, 0.72110948,
1.36815666, 0.9661646 , 0.81651816, 0.87764538, 0.97397297,
0.99845266, 0.77433798, 0.9266279 , 1.92493013, 1.07588789,
0.90412593, 1.03165475, 1.00826548, 0.75500744, 0.87198881,
0.86871262, 0.97854606, 0.80954477, 0.84130266, 0.89674826,
1.43926644, 0.74873088, 1.01894282, 0.93606154, 1.08241489,
0.76626357, 0.97434747, 0.82824599, 1.00267494, 0.97168761,
1.06433173, 1.22741978, 1.46998419, 0.9521923 , 0.98276685,
0.92422781, 1.14241216, 1.13339577, 1.05586816, 1.04923068,
0.83364505, 0.98007268, 0.94322393, 0.84310173, 1.03481955,
1.18281181, 0.79807678, 0.840274 , 1.00344058, 1.09442855,
0.88033836, 0.86189964, 1.1395012 , 1.18808865, 0.78667714,
1.09323293, 0.81511099, 0.95830848, 0.99637275, 0.9146258 ,
0.96358155, 0.79048719, 0.80395604, 1.00828722, 0.92872342,
0.98789363, 0.96720252, 0.80541021, 0.73697557, 0.86692999,
0.86795696, 1.1516694 , 0.95911714, 1.13981603, 1.02002866,
0.90808456, 0.94208296, 0.93691739, 0.87653118, 0.72824225,
0.78177906, 1.2139146 , 0.83405505, 0.91764545, 0.83318595,
0.77930256, 0.86499397, 0.95599882, 0.73850016, 0.9630604 ,
0.97913407, 1.1790714 , 0.94994057, 1.04379512, 0.80815459,
1.16560205, 0.97486893, 1.02780804, 1.10633754, 0.78679252,
0.94643528, 1.19999119, 0.98621069, 0.8899674 , 0.89235261,
0.8728921 , 0.77089094, 0.8492628 , 0.86905159, 0.90741875,
0.81065291, 0.91208596, 1.04616696, 1.24291958, 0.98628605,
0.99751975, 0.83249612, 0.96343385, 0.77862866, 0.72381238,
1.17384381, 1.06013687, 0.73460652, 1.09554763, 0.82015886,
0.90862905, 0.89037104, 0.7866143 , 0.8570287 , 0.75061334,
0.94950855, 0.8091383 , 1.04055212, 0.96679573, 0.78338675,
0.75968533, 1.00495071, 0.6491633 , 1.02802735, 1.00725883,
0.89333988, 0.87539291, 0.99374251, 1.10241119, 1.14935785,
0.9369769 , 0.84772646, 1.05024743, 0.97411124, 0.76972352,
0.92161017, 0.88689841, 0.78598549, 0.93400036, 1.14699647,
0.98636563, 0.93051079, 1.00131515, 0.82749213, 0.96665447,
0.84457933, 0.95172036, 0.86372572, 0.97034285, 0.99877807,
0.8724721 , 0.86281118, 0.96253742, 1.13485439, 1.03410559,
0.83113167, 1.02644607, 1.0669284 , 0.947969 , 1.13373538,
0.85495039, 1.15829218, 0.72662405, 0.81755747, 0.78381403,
0.84360371, 1.10945791, 0.80215303, 0.8861351 , 0.97484684,
1.02996282, 0.86219328, 0.95675062, 1.10753315, 0.92496918,
0.79323289, 0.76891191, 0.93106762, 0.94523682, 0.9534338 ,
0.8954424 , 0.81732651, 1.00443776, 0.96178195, 0.89727229,
0.88917552, 0.88660003, 0.941933 , 1.03900381, 0.75262915,
0.94265862, 0.84472046, 1.09834757, 0.81516259, 0.90865634,
0.9582531 , 0.99819053, 0.8815072 , 0.92425525, 0.79085083,
0.98173446, 0.95199169, 0.71653726, 1.11863725, 0.97855807,
0.87873181, 1.37925403, 0.8085008 , 1.40027689, 0.79367826,
0.82070449, 0.87039383, 0.95896081, 0.75617612, 1.3196712 ,
0.9335008 , 0.9461447 , 1.0838461 , 0.83347962, 0.69558254,
0.92358528, 0.99423247, 0.94884494, 0.75094955, 0.90429063,
1.13740548, 0.89354463, 1.13094104, 1.7373979 , 0.87808028,
0.72820621, 1.02995089, 0.80134468, 0.97511989, 0.93823103,
0.98097787, 0.73179813, 0.93764192, 1.04399599, 0.95644709,
0.80476939, 0.87463727, 0.83220517, 0.76978546, 0.97056432,
1.1693819 , 1.0368387 , 0.98606478, 1.03538075, 0.88253058,
0.91105775, 0.93745618, 0.80272442, 0.77045021, 0.8482449 ,
1.04505306, 0.90427753, 0.706451 , 1.02687396, 0.82931474,
1.24255717, 0.91343217, 0.8692726 , 0.98422894, 0.82142068,
0.86854354, 0.77715916, 0.94490329, 0.97686366, 1.05198512,
0.888989 , 1.09252847, 0.8034292 , 1.04727187, 0.87246831,
0.89474556, 1.06031526, 0.93056174, 0.7747956 , 0.87772054,
1.1183045 , 0.78938083, 0.82019511, 0.82553273, 1.04324276,
0.7676436 , 0.68914756, 0.88400598, 0.79611901, 0.77011016,
0.76727015, 0.84523666, 1.09972447, 1.03942974, 1.07322466,
1.01079248, 1.03469338, 0.90450148, 0.87367007, 0.88432601,
0.85312482, 0.7328442 , 1.12256832, 0.8837547 , 0.81023384,
0.87068285, 0.94466637, 1.13236695, 0.95958423, 0.8099625 ,
1.07509372, 1.03306035, 0.99385633, 1.06433672, 1.07385915,
0.92709455, 1.03502217, 0.88961476, 0.8307198 , 0.98819038,
1.09916368, 0.8919766 , 0.90349117, 0.97554616, 0.98376763,
0.89285893, 0.99941071, 1.16078972, 0.66336693, 1.16389515,
1.10395069, 1.20381952, 0.98928899, 1.17155389, 0.81707565,
0.82903836, 0.95892646, 0.8437454 , 0.79017432, 0.81562954,
0.65169124, 0.87950793, 0.9017879 , 0.82160564, 0.87079127,
0.88100146, 1.00783979, 0.84102603, 1.16817499, 0.97697533,
0.89115235, 0.77254376, 0.7679024 , 0.97093775, 1.13881665,
0.90348632, 1.14654277, 1.08625707, 0.98787902, 1.49057495,
0.99639001, 0.97623973, 0.74807856, 0.76656108, 0.79095998,
1.04583503, 0.95124469, 0.90228738, 1.03129265, 1.02663212,
0.67704952, 0.95335397, 1.01726294, 0.78765385, 0.91140255,
1.04097119, 0.71881619, 1.14572601, 0.79708798, 1.07104057,
0.95925248, 0.72556831, 0.92256392, 1.08702165, 0.95977251,
0.99670254, 0.95276505, 1.15268752, 0.68215678, 1.05573208,
0.89672437, 0.89396611, 1.01814905, 0.81969778, 0.74390457,
1.20909881, 0.82388701, 1.00574083, 1.01348114, 1.01492015,
0.94759788, 0.99758684, 1.19912008, 0.92749943, 1.16660441,
0.97646538, 0.8189475 , 0.97464158, 1.01050799, 0.94368665,
0.70995047, 0.94469581, 1.02534612, 1.3513094 , 0.88081968,
1.00576693, 0.9695495 , 1.0549135 , 1.29993316, 0.91050559,
0.95543198, 1.02161725, 0.76895773, 1.03685293, 0.88201449,
0.90345561, 1.02793048, 1.00267831, 0.84653161, 0.9217411 ,
0.94666576, 0.94946561, 0.77482488, 0.94358305, 0.89779666,
1.01462131, 1.05829923, 1.13217729, 1.12260175, 0.89810828,
0.96305689, 0.90466377, 0.8091617 , 0.93070824, 1.03997521,
1.04076373, 0.95858477, 0.94382748, 0.7585222 , 1.22890096,
0.97300529, 0.87424719, 0.90435141, 0.91894865, 0.97819677,
0.80300175, 1.03729016, 1.19305569, 0.81633791, 0.7930351 ,
0.8141721 , 0.86764479, 0.89207142, 0.89691482, 0.86243171,
0.91184679, 0.94284352, 1.01357831, 1.03806277, 0.92000143,
0.91018767, 0.90555137, 0.89089532, 1.3530331 , 0.96933587,
0.82350429, 0.71549154, 1.13399156, 0.87838533, 0.99177078,
0.93296992, 1.43078263, 0.90278792, 0.85789581, 0.93531789,
0.84948314, 0.95778101, 0.80962713, 0.88865859, 1.15297165,
0.85695093, 0.88601982, 0.96665296, 0.9320964 , 1.04193558,
1.006005 , 0.78939639, 0.79344784, 0.87012624, 0.8532022 ,
0.93351167, 0.91705323, 0.74384626, 0.84219843, 0.78265573,
1.07759963, 1.0236098 , 1.00202257, 1.18687122, 1.00869294,
0.8809502 , 0.76397598, 0.81845324, 0.97439912, 1.10466318,
1.10678275, 0.96692316, 0.84120323, 1.13151276, 0.72574077,
0.82457571, 0.8179266 , 1.01118196, 0.84303742, 0.86255339,
1.03927791, 0.82302701, 1.03586066, 0.75785864, 0.9186558 ,
0.97139449, 0.92424514, 1.00415659, 1.08544681, 0.80940032,
0.9073428 , 0.83621672, 1.04027879, 0.79447936, 0.94829305,
1.16176292, 1.11185195, 0.88652664, 0.98676451, 0.89310091,
0.72272527, 0.79963233, 0.94651986, 0.91540761, 1.0498236 ,
0.84938647, 1.15539602, 1.03118991, 0.86565049, 0.77764016,
0.77866522, 0.78008955, 0.89062575, 0.81285464, 0.92554114,
1.08747324, 0.84338687, 0.76746516, 0.99205474, 0.86649541,
0.97586166, 0.9721711 , 1.14895298, 1.04659345, 1.0605085 ,
1.06392238, 1.08286448, 0.93612266, 0.82545354, 0.84305431,
0.83650404, 1.11073704, 0.91760695, 0.83281572, 0.84244131,
1.05843708, 0.94695861, 0.95469608, 0.96038612, 0.81373042,
0.94943303, 1.00824522, 0.86416102, 0.87121008, 1.04208739,
0.81171276, 1.12798927, 0.99122576, 0.80626996, 1.07103151,
0.99809277, 1.08490135, 0.9441509 , 0.98766371, 1.33205139,
0.92145678, 0.88112784, 0.9297591 , 1.17549838, 0.8481953 ,
0.96359948, 0.98478935, 0.77028684, 0.86408555, 0.92863805,
0.94593549, 0.78705212, 1.1923026 , 0.9983487 , 0.99152533,
0.95313678, 1.01847515, 1.05728959, 0.88009142, 1.00351951,
1.00549552, 0.81671365, 0.90545602, 0.77895202, 0.82217088,
0.94838645, 0.85928327, 0.90729044, 0.92975916, 0.91946285,
0.80537364, 1.11885357, 0.84691232, 0.85356231, 0.85102988,
1.06499659, 1.0242127 , 0.91245632, 0.83131215, 0.72151085,
0.9295769 , 0.89549018, 0.87914839, 0.93541175, 0.97319188,
0.791944 , 1.08008186, 0.79549907, 0.90967683, 0.80506028,
1.1206821 , 0.91258859, 1.24855319, 0.96112955, 1.14305514,
0.79327927, 0.84209204, 0.94494251, 0.89573237, 1.0571304 ,
0.94504292, 0.84446547, 0.92060829, 0.82347072, 0.86280426,
0.85516098, 0.78649432, 0.89522516, 0.94529795, 0.90322825,
0.9616288 , 0.77439126, 1.0130917 , 0.84021262, 0.97337238,
0.93206526, 0.93809914, 0.87626441, 0.92706652, 0.86819358,
0.74060652, 0.84046045, 0.94130171, 0.92537388, 0.80485074,
0.81633347, 0.76401825, 0.81300784, 0.8052467 , 1.27234895,
0.92674704, 1.12106762, 0.91743016, 0.94694287, 0.87309918,
0.99163895, 0.83777703, 0.89713459, 0.88208343, 0.90205904,
0.9708827 , 0.94965009, 0.81446019, 0.89512677, 0.97025135,
1.02314481, 0.88399736, 1.01059963, 0.86193889, 0.94621507,
0.97334837, 0.90122433, 0.71015398, 1.17491792, 1.13869784,
1.03908735, 0.85480742, 0.98971408, 1.04147459, 0.85170846,
0.94861439, 0.7778831 , 0.73445723, 0.89587488, 0.88627975,
0.98253057, 0.86159356, 1.06559385, 0.90852704, 0.86562284,
0.92122779, 0.98233847, 0.94989946, 0.97171474, 0.92428639,
1.03712828, 0.88170861, 0.86802004, 0.79670394, 0.85606075,
1.09636421, 0.85048902, 0.99393971, 1.10510884, 0.80515088,
0.95559246, 0.96803475, 0.98115871, 0.94603995, 0.8654312 ,
0.90759845, 0.9010954 , 0.77979965, 0.83322032, 0.8485444 ,
0.89217626, 0.78817966, 1.03815705, 0.84076982, 0.93362471,
1.06173045, 0.82612852, 0.8336989 , 0.93943901, 0.91775212,
1.00501856, 1.04269442, 0.93195426, 0.78377288, 1.03372915,
0.8415154 , 1.02888978, 0.93202174, 0.78683383, 0.85106996,
0.9724203 , 0.93409182, 0.97876305, 1.17153649, 0.9434591 ,
0.81361398, 1.09554602, 1.48193137, 0.96349931, 0.93586569,
1.0210303 , 0.88980694, 0.88890459, 1.05330284, 1.09511186,
0.91202441, 0.78753378, 0.98074421, 1.04268892, 1.14265114,
0.86482628, 0.87233851, 1.18915875, 0.82556032, 0.87461473,
1.08396187, 0.69206719, 0.88113605, 0.96951674, 0.89248729,
0.909926 , 0.82966779, 0.8261611 , 0.9551228 , 0.79879533,
1.09416042, 1.01020839, 1.04133795, 1.09654304, 0.84060693,
1.02612223, 1.00177693, 0.90510435, 1.2091018 , 1.03290288,
0.80529305, 0.74332311, 1.04728164, 1.04647891, 0.83707027,
0.81648396, 1.07180239, 0.7926372 , 0.99855278, 1.16851397,
0.94566149, 0.75612408, 0.94975744, 0.92924923, 1.03215206,
0.82394984, 0.84142091, 0.88028348, 1.11036047, 0.82451341,
0.83694112, 0.84207459, 0.94095384, 1.00173733, 1.10241786,
0.86609134, 0.86859604, 1.1211537 , 0.84188088, 0.89023025,
0.99062899, 0.96828743, 0.80106184, 0.86745454, 0.99013196,
0.91838615, 0.86400837, 0.95679525, 0.78893711, 1.03753175,
0.97177648, 0.88685941, 0.9441012 , 0.69289996, 0.84219432,
1.01050959, 0.83578317, 0.79907595, 1.21281139, 0.91613925,
1.00202544, 0.95293036, 0.84583258, 0.84574886, 0.76470341,
1.23606485, 1.10063291, 0.93852084, 0.97201415, 0.68523403,
0.94560108, 0.81903039, 1.14332074, 0.80914367, 1.46398921,
0.85155227, 1.41106313, 0.85740937, 0.91107708, 0.9003576 ,
0.94132363, 0.85710825, 0.74805485, 1.2521402 , 0.95307547,
0.94274593, 0.86732331, 0.83850172, 0.96835288, 1.09443821,
0.68532627, 0.84736457, 1.06989165, 0.81424504, 1.02942437,
0.80255995, 0.89258275, 0.93560962, 1.04192911, 1.13498644,
1.24409985, 0.93295415, 1.08360355, 1.16468059, 0.81482388,
0.92387137, 1.07508578, 0.86564567, 1.0142773 , 0.86143907,
0.91214944, 0.9757589 , 0.90588817, 0.74168224, 0.91222552,
0.96119617, 0.95431519, 0.78080736, 1.0327991 , 1.05112022,
0.92761155, 1.0183631 , 0.73188757, 0.85617225, 0.93341155,
0.95106173, 0.9481304 , 0.92996766, 1.08092599, 0.96485228,
0.97964284, 0.94224551, 1.00654477, 1.01367565, 0.89785325,
0.80725703, 0.7495798 , 0.78240339, 1.04479122, 0.88200252,
1.0664992 , 1.05951775, 0.82508097, 0.81201381, 0.81860218,
1.07561763, 1.02830358, 0.87348993, 1.0081337 , 0.87470565,
1.45597242, 0.77540871, 0.8036279 , 0.80514427, 0.92688461,
0.88152328, 1.56288788, 0.87251203, 0.92808414, 1.03548911,
0.65226699, 0.81243827, 1.03103554, 1.11995602, 0.78956176,
0.96734427, 0.91600861, 0.8246106 , 1.09390498, 0.98187349,
0.8919928 , 0.98746862, 0.96298125, 0.93854424, 0.83060031,
0.74692856, 0.99757209, 0.78888849, 1.17517182, 1.06657933,
1.1244446 , 0.93608433, 0.88898472, 0.96823218, 0.87496056,
0.81776683, 0.98863687, 0.82962648, 1.02395766, 0.99622674,
1.07138771, 0.86669915, 0.98172208, 0.8787271 , 0.86125353,
0.79554881, 0.93382729, 1.00706175, 1.08386454, 0.69664542,
0.77316657, 0.79978147, 0.80764736, 0.9969375 , 0.83554928,
0.91017317, 0.95323454, 1.29872357, 1.08851275, 1.01673108,
0.79536208, 0.84878371, 0.95165619, 0.87733936, 0.86319684,
0.96758495, 0.87763237, 0.95094713, 1.00143077, 1.0596993 ,
1.27278299, 0.82281481, 0.89765404, 0.94538181, 0.88161857,
0.77679456, 0.84274277, 0.89864342, 0.98705162, 0.95456512,
0.92712401, 0.77427128, 1.03292269, 0.87034158, 1.24316113,
0.98278702, 1.17325118, 1.18863971, 0.88678137, 0.90389731,
1.01740421, 0.80228624, 0.97742223, 0.82741518, 0.8359407 ,
0.7177401 , 1.02297899, 0.81896048, 0.77127181, 0.83328601,
0.96939523, 0.94073198, 0.90356023, 1.12355064, 1.12811114,
0.92403138, 1.05423548, 0.70827734, 0.95891358, 0.89898027,
1.02318421, 0.93775375, 0.8245529 , 0.80604304, 0.77555283,
0.92112699, 0.85662169, 0.92725859, 0.93599147, 0.78971931,
0.8337306 , 0.93775212, 0.91025099, 0.75308822, 0.95391173,
0.96840576, 0.8394416 , 0.89087015, 0.73703219, 0.97812386,
0.8787356 , 0.93985266, 0.96406021, 0.88666152, 0.89242745,
0.97900374, 0.85697634, 0.8795755 , 0.78581812, 0.87138735,
0.74602994, 0.96158936, 0.84529806, 0.85333232, 1.06116542,
1.05929382, 1.09720986, 1.28959453, 0.91541148, 0.87657407,
1.06514793, 0.8668096 , 1.07325125, 0.85009534, 0.95542191,
0.86977409, 0.96249874, 0.97715908, 0.89360331, 0.98859647,
0.67560717, 0.90213348, 1.12051182, 0.99684949, 0.9863559 ,
1.32246221, 0.84632664, 0.89707447, 1.00486846, 0.90843649,
1.02399424, 0.97899017, 0.95693977, 0.8384806 , 0.93927435,
0.79153251, 1.08694094, 1.01785553, 0.99674552, 0.898566 ,
0.94116882, 0.95224977, 0.99859129, 0.81125029, 0.85985586,
1.14418875, 0.96306241, 1.31398561, 0.77961419, 1.01958366,
0.9575668 , 0.771084 , 1.04473363, 1.01569517, 1.04560744,
0.9648178 , 0.93466398, 1.09313672, 0.90349389, 1.00193114,
0.79991514, 0.91102351, 0.9795356 , 0.89285193, 1.04898573,
0.93031782, 0.95087069, 1.15644699, 0.91155375, 0.93005986,
0.70098757, 0.82751625, 0.85462106, 1.34969332, 0.93382692,
1.05558387, 1.25417819, 1.0546501 , 1.05217032, 0.86031346,
1.00864463, 0.73592482, 1.01899722, 1.00462831, 0.96882832,
0.81334751, 1.05102745, 0.82288113, 1.05798623, 0.77971966,
1.38584414, 1.0248193 , 0.78951056, 0.76171823, 0.78407227,
1.14808104, 0.97890501, 0.99870905, 0.96006489, 0.78442704,
0.99315422, 0.83653213, 0.95210661, 0.97233777, 0.78140495,
0.95996216, 0.76318841, 0.82333311, 0.87123204, 0.79531258,
0.82681452, 1.00492217, 0.93549261, 1.00240153, 1.02086339,
1.00424549, 0.87437775, 0.84675564, 0.98014462, 0.77262117,
1.02620976, 0.91162462, 1.0275041 , 1.1475431 , 0.78167746,
0.86273856, 0.84499552, 0.99712362, 0.9694771 , 0.94523806,
0.8450763 , 0.93068519, 1.29362523, 1.0249628 , 1.05522183,
1.13433408, 1.06981137, 0.85666419, 0.98203234, 0.75867592,
0.8844762 , 0.89708521, 0.75482121, 0.80137918, 0.90412883,
0.88815714, 1.11497471, 0.77441965, 0.93853353, 0.8962444 ,
0.83055142, 0.99776183, 0.92581583, 0.78783745, 0.90934299,
0.81136457, 0.99000726, 0.9669203 , 1.2890399 , 1.01923088,
1.11076459, 1.01331706, 1.02470946, 0.92950448, 1.10298478,
1.03723287, 1.09129035, 0.95138186, 0.85764624, 0.86606803,
0.8141785 , 1.0129293 , 0.93267714, 0.95663734, 1.01940702,
0.8072268 , 1.0707215 , 0.90482063, 1.01546955, 0.84018308,
0.95938216, 0.96454054, 0.93114659, 1.09705112, 0.88720628,
0.81067916, 0.82667413, 0.89494027, 0.9173495 , 0.73326273,
1.00209461, 0.9560545 , 1.09126364, 0.95709908, 0.81314274,
0.8274943 , 1.37605062, 0.99097917, 1.02221806, 0.90277482,
1.01611791, 0.79663017, 1.16686882, 1.19669266, 0.88366356,
0.77661102, 0.73467145, 1.15438391, 0.91439204, 0.78280849,
1.07238853, 1.03588797, 1.0438292 , 0.75935005, 0.76200114,
0.81603429, 0.74402367, 1.1171573 , 0.90227791, 0.94762351,
0.92462278, 0.8847803 , 1.1343863 , 0.8662186 , 1.00410699,
1.05008842, 0.94783969, 0.89555844, 0.98278045, 0.80396855,
1.00483139, 0.82540491, 0.83284354, 0.93132265, 0.91191039,
0.95753995, 1.18260689, 0.84124197, 0.87429189, 0.67617592,
0.89495946, 0.92898357, 1.10528183, 1.06994417, 0.82259834,
0.74746328, 0.99070832, 1.07386274, 0.84007203, 0.89720099,
0.9670094 , 1.02728082, 0.78001838, 0.97709347, 0.90602469,
1.49985196, 0.80256976, 1.05905677, 0.98298874, 0.94679703,
0.94305923, 0.98720786, 0.82091251, 0.91644161, 0.79576881,
0.98942172, 0.92974761, 0.99307545, 0.86959859, 0.88549807,
1.09246144, 0.87265047, 1.01449921, 0.74353851, 0.95029192,
0.94385304, 0.84779449, 1.00690543, 0.79727923, 0.92285822,
0.83164749, 1.06508941, 1.09757529, 0.9059649 , 0.9146043 ,
0.74474669, 0.71306438, 0.77989422, 0.84965464, 0.9424323 ,
0.82492634, 0.85076686, 1.01110574, 1.01445751, 0.87929754,
0.8773275 , 0.72314196, 0.92285502, 1.18173931, 0.86460799,
0.91795108, 1.16580482, 0.79880497, 0.72734786, 0.97579653,
0.76967834, 0.97543732, 1.04996964, 1.16439594, 1.08656546,
1.15644902, 0.98333436, 1.24374723, 0.95810117, 0.8488915 ,
1.06288523, 0.99055893, 0.75517736, 0.95856183, 0.85574796,
1.00426506, 1.25275675, 0.92735225, 0.83351314, 0.90216604,
0.87996386, 1.13312875, 1.00891523, 0.76513657, 0.85659621,
0.91142459, 1.05893495, 0.92253051, 0.87153684, 1.03190013,
0.92160845, 1.01768282, 0.80590054, 1.05172907, 0.92758177,
0.86902046, 0.93927127, 0.80389584, 0.96016014, 0.9720314 ,
0.93255573, 0.85792534, 0.97826842, 0.80506149, 0.97170364,
1.08397772, 1.01866333, 1.18898045, 1.02855427, 0.94848891,
0.94336541, 0.93119013, 0.92907817, 1.11806635, 0.88409637,
0.88809707, 1.06735612, 0.98447974, 0.88816438, 1.00099784,
0.92443453, 1.00325146, 0.86977836, 0.84621801, 0.92361073,
0.85573903, 0.77309241, 0.86717528, 1.19892035, 1.07497019,
1.02178857, 0.8718756 , 0.90646803, 0.92912096, 1.04538692,
0.95245707, 0.99698525, 0.94583199, 0.92537599, 0.86720487,
0.89927054, 0.86111792, 0.94401208, 1.01130191, 1.03759681,
0.8177749 , 1.07784373, 0.79823294, 1.00839713, 1.39409602,
0.87146241, 1.21218822, 0.84895926, 1.01742432, 0.8044077 ,
0.78632084, 1.07751744, 1.13147508, 0.90268302, 0.90024653,
0.92072578, 0.87763264, 1.00736787, 0.90978808, 0.90895492,
0.90766826, 0.98956566, 0.92075658, 0.77613105, 0.93815569,
0.95455546, 1.00607757, 0.82187828, 0.94197599, 0.867015 ,
0.90709762, 0.75604815, 0.91312261, 0.9286002 , 0.74623204,
0.87368702, 0.83879278, 0.92224793, 0.81676402, 0.90355168,
0.92762955, 0.91784037, 0.82273304, 0.75947806, 0.92687078,
0.87971276, 1.15037445, 0.86707445, 0.8611453 , 0.91921763,
1.07088129, 1.05150864, 1.02162325, 0.90305964, 0.99912687,
0.87693204, 0.6186911 , 0.95526533, 1.15975655, 1.00061222,
0.74608861, 0.954568 , 0.84965574, 0.79177899, 0.9741051 ,
1.0119514 , 0.79147502, 0.81367071, 0.87757421, 1.01270813,
0.86044808, 0.9689615 , 0.9577413 , 0.79480242, 0.76073002,
0.83131288, 0.96379259, 0.84679732, 0.82508685, 0.89977283,
0.86766439, 1.12231836, 0.93058445, 1.04584181, 0.88838751,
0.96615893, 0.98731619, 1.05517799, 1.02860493, 0.98881473,
0.85210319, 0.91497438, 0.9275787 , 0.97456134, 0.9011687 ,
0.69417417, 0.89661214, 0.79038577, 1.08118303, 1.0509366 ,
0.97813138, 0.85714945, 0.97330329, 0.83611871, 0.99772489,
0.83591193, 0.75592677, 0.85392601, 1.02734573, 0.72404609,
0.83534547, 0.91630472, 0.88463459, 1.12044562, 1.10991104,
0.96047701, 1.12342573, 0.72046647, 0.96852239, 0.89605698,
0.98310243, 0.92300659, 0.87794646, 0.83109321, 1.43297752,
0.80609029, 0.8692251 , 0.90254649, 0.81647796, 1.07521371,
1.03942973, 0.96156488, 1.25225334, 1.0265727 , 0.9518054 ,
0.87765718, 1.15552582, 0.79577766, 0.66849239, 0.87236017,
1.03437641, 0.98567811, 0.78463682, 1.09573491, 0.89858959,
0.94056747, 1.16075317, 1.06296054, 0.85844006, 0.95475376,
0.67038747, 0.7924646 , 0.94009167, 0.88282093, 0.97711174,
0.9209607 , 1.03230176, 0.99981312, 1.12345314, 1.11705968,
1.02453864, 0.91724212, 0.98337942, 0.89195196, 0.83800177,
0.95044243, 0.76543521, 0.8613025 , 0.83907753, 0.69333275,
0.84411739, 0.68621941, 0.9847701 , 1.13328481, 1.1432074 ,
0.97156328, 0.86464461, 0.74258211, 0.97319505, 1.11453917,
0.87344741, 0.91382664, 1.01635943, 1.38708812, 0.81377942,
1.3828856 , 0.74476285, 0.86657537, 1.1216954 , 0.91008346,
0.800862 , 0.98356936, 0.92409916, 1.13970543, 0.97547004,
0.99385865, 1.16476579, 0.78678084, 1.003947 , 0.81491463,
1.19724322, 0.9173622 , 0.93274116, 0.80047839, 0.86798029,
0.9433708 , 0.82376832, 1.01726905, 0.81914971, 0.73290844])
class Medpar1(object):
'''
The medpar1 data can be found here.
http://www.stata-press.com/data/hh2/medpar1
'''
def __init__(self):
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_medpar1_glm.csv")
data = np.recfromcsv(open(filename, 'rb'), converters ={1: lambda s: s.strip(asbytes("\""))})
self.endog = data.los
design = np.column_stack((data.admitype, data.codes))
design = categorical(design, col=0, drop=True)
design = np.delete(design, 1, axis=1) # drop first dummy
self.exog = add_constant(design, prepend=False)
class InvGaussLog(Medpar1):
"""
InvGaussLog is used with TestGlmInvgaussLog
"""
def __init__(self):
super(InvGaussLog, self).__init__()
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"medparlogresids.csv")
self.resids = np.genfromtxt(open(filename, 'rb'), delimiter=",")
self.null_deviance = 335.1539777981053 # from R, Rpy bug
self.params = np.array([ 0.09927544, -0.19161722, 1.05712336])
self.bse = np.array([ 0.00600728, 0.02632126, 0.04915765])
self.aic_R = 18545.836421595981
self.aic_Stata = 6.619000588187141
self.deviance = 304.27188306012789
self.scale = 0.10240599519220173
# self.llf = -9268.9182107979905 # from R
self.llf = -12162.72308108797 # from Stata, big rounding diff with R
self.bic_Stata = -29849.51723280784
self.chi2 = 398.5465213008323 # from Stata not in sm
self.df_model = 2
self.df_resid = 3673
self.fittedvalues = np.array([ 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
5.22145448, 7.03292237, 5.22145448, 4.72799187, 4.72799187,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.76642001,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 6.36826384, 6.36826384, 4.28116479,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 3.87656588, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 5.22145448, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 6.36826384,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 4.72799187, 5.76642001, 7.03292237,
4.72799187, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
5.22145448, 5.22145448, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 4.28116479,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 6.36826384, 3.87656588, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 5.76642001, 4.28116479,
5.76642001, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 4.28116479, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 4.28116479, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 4.28116479, 5.76642001,
5.22145448, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 5.22145448, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.28116479, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
5.76642001, 4.28116479, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
5.76642001, 7.03292237, 4.72799187, 4.28116479, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
3.87656588, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 5.22145448, 6.36826384, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 7.03292237,
3.87656588, 6.36826384, 5.22145448, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
5.22145448, 7.03292237, 6.36826384, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 6.36826384, 6.36826384,
4.72799187, 5.76642001, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 5.76642001, 7.03292237, 5.76642001,
6.36826384, 5.76642001, 5.76642001, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 4.28116479, 6.36826384, 3.87656588,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 5.76642001, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
5.22145448, 4.72799187, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 7.03292237, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 6.36826384, 5.22145448, 5.76642001, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 5.76642001, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
5.22145448, 7.03292237, 3.87656588, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 6.36826384, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 7.03292237, 7.03292237,
3.87656588, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 4.72799187, 5.22145448, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 5.76642001,
5.76642001, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 4.28116479,
5.76642001, 7.03292237, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 5.76642001, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 4.28116479, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 7.03292237,
7.03292237, 3.87656588, 6.36826384, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 4.72799187, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 3.87656588, 5.22145448, 6.36826384,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 6.36826384, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 5.22145448,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
5.76642001, 7.03292237, 3.87656588, 7.03292237, 5.22145448,
3.87656588, 4.72799187, 6.36826384, 5.76642001, 7.03292237,
6.36826384, 7.03292237, 4.28116479, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 4.72799187, 7.03292237, 4.28116479, 7.03292237,
4.72799187, 7.03292237, 5.22145448, 5.76642001, 5.76642001,
3.87656588, 5.76642001, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
6.36826384, 5.76642001, 7.03292237, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 6.36826384, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
5.76642001, 7.03292237, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
6.36826384, 3.87656588, 3.5102043 , 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 4.72799187, 5.76642001, 7.03292237, 7.03292237,
3.87656588, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 5.76642001,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.28116479,
7.03292237, 5.76642001, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
6.36826384, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
3.87656588, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 6.36826384, 4.72799187,
4.72799187, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 4.72799187,
4.28116479, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.28116479, 4.28116479, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
3.87656588, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 4.72799187, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 7.03292237, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 5.22145448, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 5.76642001,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 3.87656588,
6.36826384, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
5.76642001, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 4.28116479, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 4.28116479, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 5.22145448, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 4.72799187, 5.22145448,
5.76642001, 7.03292237, 6.36826384, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 5.22145448, 4.72799187, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 6.36826384, 3.87656588, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 6.36826384, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 7.03292237, 5.22145448, 7.03292237,
4.72799187, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 6.36826384, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 5.76642001, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 3.5102043 ,
6.36826384, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
5.22145448, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 4.28116479, 7.03292237, 7.03292237,
4.72799187, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 5.76642001,
4.28116479, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 5.22145448,
7.03292237, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 4.72799187, 3.5102043 ,
3.17846635, 3.87656588, 5.22145448, 6.36826384, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 3.87656588,
4.72799187, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 6.36826384, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 5.22145448, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 4.28116479, 7.03292237, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 5.76642001, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 6.36826384, 7.03292237,
4.28116479, 6.36826384, 7.03292237, 6.36826384, 5.76642001,
7.03292237, 4.28116479, 5.22145448, 4.72799187, 7.03292237,
7.03292237, 6.36826384, 5.22145448, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
6.36826384, 5.22145448, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 7.03292237,
5.76642001, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 6.36826384, 6.36826384,
5.22145448, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 5.22145448, 6.36826384, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
4.28116479, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
5.76642001, 5.22145448, 5.76642001, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
4.72799187, 5.76642001, 7.03292237, 7.03292237, 6.36826384,
6.36826384, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 6.36826384,
7.03292237, 4.72799187, 4.72799187, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
3.5102043 , 6.36826384, 5.22145448, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 7.03292237, 4.28116479,
6.36826384, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
4.72799187, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 4.28116479, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 5.22145448, 7.03292237,
5.76642001, 4.72799187, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.76642001,
5.22145448, 7.03292237, 5.76642001, 6.36826384, 4.28116479,
7.03292237, 4.72799187, 3.87656588, 5.22145448, 7.03292237,
6.36826384, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 5.76642001, 5.22145448, 3.87656588,
5.76642001, 6.36826384, 7.03292237, 5.22145448, 6.36826384,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
3.87656588, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 4.72799187, 4.72799187, 7.03292237,
6.36826384, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
5.76642001, 5.22145448, 7.03292237, 4.72799187, 7.03292237,
4.28116479, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 6.36826384, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.28116479,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 6.36826384, 7.03292237,
5.22145448, 5.76642001, 5.76642001, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 4.72799187, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 3.87656588, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
5.76642001, 7.03292237, 5.76642001, 3.87656588, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.76642001,
5.22145448, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 3.17846635, 5.76642001, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 6.36826384, 7.03292237, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 5.22145448, 4.28116479,
7.03292237, 6.36826384, 7.03292237, 4.72799187, 5.76642001,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
4.72799187, 7.03292237, 3.53462742, 4.76088805, 5.25778406,
4.31095206, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.25778406, 5.80654132, 5.80654132,
3.90353806, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
3.53462742, 2.89810483, 5.80654132, 5.25778406, 5.80654132,
2.89810483, 5.80654132, 5.25778406, 3.53462742, 4.76088805,
5.80654132, 3.20058132, 5.80654132, 5.80654132, 4.76088805,
5.80654132, 3.53462742, 3.53462742, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 4.76088805, 3.90353806,
5.80654132, 3.53462742, 5.80654132, 2.6242144 , 3.20058132,
5.80654132, 5.80654132, 3.90353806, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
2.89810483, 5.80654132, 5.80654132, 3.90353806, 3.53462742,
4.31095206, 5.80654132, 5.80654132, 4.76088805, 5.80654132,
3.53462742, 5.80654132, 4.76088805, 2.89810483, 5.25778406,
4.31095206, 5.80654132, 4.31095206, 5.80654132, 5.80654132,
4.76088805, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 5.25778406,
5.25778406, 5.80654132, 5.80654132, 3.53462742, 5.80654132,
3.53462742, 5.80654132, 4.31095206, 5.80654132, 5.80654132,
5.25778406, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
3.20058132, 3.90353806, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 3.20058132, 5.80654132, 4.31095206, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.31095206, 5.80654132, 3.90353806,
5.80654132, 4.31095206, 4.31095206, 5.80654132, 4.76088805,
3.90353806, 3.90353806, 4.76088805, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 3.53462742, 5.80654132, 3.53462742,
5.80654132, 5.80654132, 5.80654132, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 4.76088805, 4.76088805,
5.80654132, 2.89810483, 5.80654132, 4.76088805, 5.80654132,
5.80654132, 4.31095206, 3.20058132, 5.80654132, 4.76088805,
5.80654132, 2.89810483, 2.89810483, 5.25778406, 3.90353806,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 5.25778406, 4.76088805, 5.80654132,
2.89810483, 5.25778406, 5.80654132, 5.80654132, 4.31095206,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
2.89810483, 5.80654132, 3.53462742, 3.90353806, 5.25778406,
5.80654132, 3.20058132, 2.89810483, 5.80654132, 4.31095206,
5.80654132, 3.53462742, 5.25778406, 4.76088805, 5.80654132,
3.53462742, 3.90353806, 5.80654132, 3.20058132, 5.80654132,
5.80654132, 3.53462742, 5.25778406, 4.76088805, 4.76088805,
5.80654132, 5.80654132, 2.89810483, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.25778406, 5.25778406,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 4.31095206,
5.25778406, 5.80654132, 4.31095206, 4.31095206, 5.80654132,
5.80654132, 3.53462742, 4.76088805, 3.53462742, 4.76088805,
4.31095206, 5.80654132, 3.90353806, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 5.80654132, 4.31095206, 3.90353806,
5.80654132, 4.76088805, 4.76088805, 3.53462742, 5.80654132,
5.80654132, 5.25778406, 3.53462742, 3.20058132, 3.53462742,
3.90353806, 5.80654132, 4.31095206, 4.76088805, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 4.76088805, 2.89810483,
5.80654132, 5.80654132, 5.80654132, 4.76088805, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.25778406, 4.76088805,
5.80654132, 4.76088805, 3.90353806, 5.80654132, 5.80654132,
4.76088805, 5.80654132, 5.25778406, 5.80654132, 2.89810483,
5.80654132, 5.25778406, 3.90353806, 3.90353806, 5.80654132,
5.25778406, 3.53462742, 5.80654132, 4.76088805, 5.25778406,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 5.25778406,
3.90353806, 3.53462742, 5.25778406, 2.89810483, 5.80654132,
3.53462742, 4.76088805, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.90353806, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.25778406, 5.25778406,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.31095206, 5.25778406, 3.53462742, 2.89810483,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
3.90353806, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 3.20058132, 4.31095206, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 3.90353806, 3.53462742,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
4.76088805, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.25778406, 5.80654132, 3.53462742, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
3.20058132, 5.80654132, 5.80654132, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.53462742, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.53462742, 5.25778406, 3.90353806,
5.80654132, 4.76088805, 4.76088805, 3.90353806, 5.80654132,
5.80654132, 4.31095206, 2.89810483, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 3.53462742, 3.90353806, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 4.31095206, 5.25778406,
5.25778406, 3.20058132, 3.53462742, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 3.90353806, 4.76088805, 4.76088805,
5.80654132, 5.80654132, 5.25778406, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.53462742, 4.31095206, 3.90353806, 4.76088805,
4.31095206, 3.53462742, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.90353806, 4.76088805,
5.25778406, 3.53462742, 3.20058132, 5.80654132, 3.90353806,
5.80654132, 3.53462742, 5.80654132, 5.80654132, 3.90353806,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 3.90353806, 4.76088805, 5.25778406,
2.89810483, 5.80654132, 4.31095206, 5.80654132, 4.76088805,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
3.53462742, 2.89810483, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.76088805, 5.80654132, 5.25778406, 4.76088805,
5.25778406, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 5.80654132, 2.89810483, 5.25778406, 5.80654132,
5.80654132, 4.76088805, 4.76088805, 5.25778406, 5.80654132,
5.80654132, 4.31095206, 3.20058132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 3.90353806, 4.76088805, 5.80654132,
3.53462742, 5.80654132, 5.25778406, 2.89810483, 5.80654132,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.31095206, 5.80654132, 3.20058132, 5.80654132,
5.25778406, 4.76088805, 5.25778406, 5.80654132, 4.76088805,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 3.90353806,
4.76088805, 3.90353806, 5.80654132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.53462742, 5.80654132,
4.76088805, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
2.6242144 , 5.80654132, 5.80654132, 5.80654132, 5.80654132,
4.76088805, 5.80654132, 3.53462742, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.80654132, 5.25778406,
4.31095206, 5.25778406, 4.31095206, 4.31095206, 4.76088805,
5.80654132, 4.76088805, 5.80654132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.80654132, 3.90353806, 5.80654132, 4.76088805, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.80654132, 4.31095206, 5.25778406,
4.31095206, 5.80654132, 3.90353806, 5.80654132, 3.53462742,
5.25778406, 5.80654132, 5.80654132, 4.31095206, 3.90353806,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 5.25778406, 4.76088805, 4.31095206,
3.20058132, 5.80654132, 3.53462742, 3.20058132, 5.80654132,
5.80654132, 3.20058132, 3.20058132, 5.80654132, 4.31095206,
4.31095206, 5.80654132, 5.80654132, 3.90353806, 3.90353806,
3.53462742, 5.80654132, 3.90353806, 3.53462742, 5.80654132,
3.90353806, 5.25778406, 5.80654132, 3.53462742, 5.80654132,
5.25778406, 5.80654132, 4.31095206, 3.90353806, 5.80654132,
5.80654132, 4.31095206, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.20058132, 5.25778406, 2.89810483, 3.90353806, 5.80654132,
3.53462742, 5.80654132, 5.25778406, 5.80654132, 2.89810483,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.80654132, 5.25778406, 3.53462742, 4.31095206, 4.76088805,
3.90353806, 5.80654132, 5.80654132, 5.25778406, 3.90353806,
4.76088805, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
3.20058132, 5.80654132, 4.76088805, 5.80654132, 4.76088805,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 2.89810483, 5.80654132, 5.80654132,
2.89810483, 3.53462742, 5.80654132, 5.80654132, 2.89810483,
4.31095206, 3.53462742, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
4.76088805, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 3.90353806, 5.80654132, 5.25778406,
5.80654132, 2.89810483, 2.89810483, 5.80654132, 3.53462742,
5.80654132, 3.53462742, 5.80654132, 4.31095206, 2.89810483,
5.80654132, 5.80654132, 2.89810483, 4.76088805, 5.80654132,
5.80654132, 3.20058132, 5.80654132, 3.90353806, 5.80654132,
5.80654132, 3.20058132, 3.90353806, 4.76088805, 4.76088805,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 4.31095206,
5.80654132, 3.20058132, 4.31095206, 4.76088805, 3.53462742,
5.80654132, 5.80654132, 3.53462742, 3.53462742, 3.53462742,
5.80654132, 5.80654132, 3.90353806, 3.90353806, 3.20058132,
5.80654132, 5.80654132, 2.89810483, 3.90353806, 5.80654132,
2.89810483, 3.53462742, 3.53462742, 4.31095206, 5.80654132,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 4.76088805, 4.76088805, 5.80654132,
5.25778406, 4.31095206, 5.80654132, 4.76088805, 3.90353806,
4.31095206, 5.80654132, 2.89810483, 4.31095206, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.25778406, 5.80654132, 4.76088805, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 4.76088805, 4.31095206, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
4.31095206, 3.20058132, 4.76088805, 5.80654132, 3.20058132,
3.20058132, 5.80654132, 3.90353806, 5.25778406, 3.20058132,
4.76088805, 3.20058132, 3.53462742, 4.76088805, 5.80654132,
5.80654132, 4.31095206, 4.76088805, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 4.31095206, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.25778406, 4.31095206,
5.80654132, 3.90353806, 3.53462742, 4.76088805, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 3.20058132, 5.80654132,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 3.53462742,
2.6242144 , 5.80654132, 5.80654132, 3.53462742, 5.25778406,
3.90353806, 5.80654132, 2.89810483, 5.80654132, 3.90353806,
5.80654132, 5.80654132, 3.90353806, 2.89810483, 5.80654132,
4.76088805, 4.31095206, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.76088805, 5.80654132, 4.76088805, 5.80654132,
4.76088805, 3.53462742, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
3.53462742, 3.53462742, 3.90353806, 5.80654132, 4.31095206,
3.53462742, 5.80654132, 4.76088805, 4.76088805, 3.20058132,
3.90353806, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.31095206, 5.25778406, 4.31095206,
5.80654132, 3.20058132, 5.80654132, 4.31095206, 4.31095206,
4.76088805, 5.80654132, 4.76088805, 4.31095206, 5.80654132,
5.25778406, 3.53462742, 3.53462742, 5.25778406, 5.80654132,
3.90353806, 5.25778406, 4.31095206, 4.31095206, 3.53462742,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 4.76088805,
5.25778406, 3.20058132, 3.90353806, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 4.31095206,
5.25778406, 4.76088805, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.25778406, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.31095206, 5.25778406, 5.80654132, 3.53462742,
3.90353806, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
5.25778406, 2.89810483, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 5.80654132, 4.31095206, 5.80654132,
3.20058132, 3.90353806, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 2.6242144 , 5.80654132, 3.90353806,
5.25778406, 4.76088805, 5.80654132, 5.80654132, 3.90353806,
5.80654132, 3.53462742, 2.89810483, 5.80654132, 3.53462742,
2.89810483, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
4.31095206, 5.80654132, 4.76088805, 3.90353806, 2.89810483,
4.76088805, 5.80654132, 2.6242144 , 3.53462742, 4.31095206,
5.25778406, 5.25778406, 3.20058132, 4.31095206, 4.31095206,
3.20058132, 4.31095206, 5.25778406, 4.31095206, 5.25778406,
3.90353806, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
5.25778406, 3.90353806, 4.31095206, 4.76088805, 3.90353806,
5.80654132, 5.80654132, 5.80654132, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 3.20058132, 5.25778406, 4.76088805,
5.25778406])
class InvGaussIdentity(Medpar1):
"""
Accuracy is different for R vs Stata ML vs Stata IRLS, we are close.
"""
def __init__(self):
super(InvGaussIdentity, self).__init__()
self.params = np.array([ 0.44538838, -1.05872706, 2.83947966])
self.bse = np.array([ 0.02586783, 0.13830023, 0.20834864])
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"igaussident_resids.csv")
self.resids = np.genfromtxt(open(filename, 'rb'), delimiter=",")
self.null_deviance = 335.1539777981053 # from R, Rpy bug
self.df_null = 3675
self.deviance = 305.33661191013988
self.df_resid = 3673
self.df_model = 2
self.aic_R = 18558.677276882016
self.aic_Stata = 6.619290231464371
self.bic_Stata = -29848.45250412075
self.llf_stata = -12163.25544543151
self.chi2 = 567.1229375785638 # in Stata not sm
# self.llf = -9275.3386384410078 # from R
self.llf = -12163.25545 # from Stata, big diff with R
self.scale = 0.10115387793455666
self.pearson_chi2 = 371.5346609292967 # deviance_p in Stata
self.fittedvalues = np.array([ 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
5.51180993, 6.84797506, 5.51180993, 5.06642155, 5.06642155,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.9571983 ,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.40258668, 6.40258668, 4.62103317,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 4.17564479, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 5.51180993, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.40258668,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 5.06642155, 5.9571983 , 6.84797506,
5.06642155, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
5.51180993, 5.51180993, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 4.62103317,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.40258668, 4.17564479, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 5.9571983 , 4.62103317,
5.9571983 , 6.40258668, 6.40258668, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 4.62103317, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 4.62103317, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 4.62103317, 5.9571983 ,
5.51180993, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 5.51180993, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
4.62103317, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
5.9571983 , 4.62103317, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
5.9571983 , 6.84797506, 5.06642155, 4.62103317, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
4.17564479, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 5.51180993, 6.40258668, 6.84797506,
6.40258668, 6.40258668, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 6.84797506,
4.17564479, 6.40258668, 5.51180993, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
5.51180993, 6.84797506, 6.40258668, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.40258668, 6.40258668,
5.06642155, 5.9571983 , 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 5.9571983 , 6.84797506, 5.9571983 ,
6.40258668, 5.9571983 , 5.9571983 , 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 4.62103317, 6.40258668, 4.17564479,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 5.9571983 , 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.06642155,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
5.51180993, 5.06642155, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.84797506, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.40258668, 5.51180993, 5.9571983 , 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.40258668, 6.40258668, 6.84797506, 5.9571983 , 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
5.51180993, 6.84797506, 4.17564479, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.40258668, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.84797506, 6.84797506,
4.17564479, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 5.06642155, 5.51180993, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 5.9571983 ,
5.9571983 , 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 5.06642155, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 4.62103317,
5.9571983 , 6.84797506, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 5.9571983 , 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 4.62103317, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.40258668, 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 5.06642155, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 4.17564479, 5.51180993, 6.40258668,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 6.40258668, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 5.51180993,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
5.9571983 , 6.84797506, 4.17564479, 6.84797506, 5.51180993,
4.17564479, 5.06642155, 6.40258668, 5.9571983 , 6.84797506,
6.40258668, 6.84797506, 4.62103317, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 5.06642155, 6.84797506, 4.62103317, 6.84797506,
5.06642155, 6.84797506, 5.51180993, 5.9571983 , 5.9571983 ,
4.17564479, 5.9571983 , 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
6.40258668, 5.9571983 , 6.84797506, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.40258668, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
5.9571983 , 6.84797506, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.40258668, 4.17564479, 3.73025641, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.06642155, 5.9571983 , 6.84797506, 6.84797506,
4.17564479, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.9571983 ,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 4.62103317,
6.84797506, 5.9571983 , 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.40258668, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
4.17564479, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.40258668, 5.06642155,
5.06642155, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.40258668, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 5.06642155,
4.62103317, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 4.62103317, 4.62103317, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
4.17564479, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 5.06642155, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.84797506, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.51180993, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 5.9571983 ,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 4.17564479,
6.40258668, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
5.9571983 , 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 3.73025641,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 4.62103317, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 4.62103317, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 5.51180993, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 5.06642155, 5.51180993,
5.9571983 , 6.84797506, 6.40258668, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 5.51180993, 5.06642155, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.40258668, 4.17564479, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.40258668, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.84797506, 5.51180993, 6.84797506,
5.06642155, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.40258668, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 5.9571983 , 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 3.73025641,
6.40258668, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
5.51180993, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 4.62103317, 6.84797506, 6.84797506,
5.06642155, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 5.9571983 ,
4.62103317, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 5.51180993,
6.84797506, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 5.06642155, 3.73025641,
3.28486804, 4.17564479, 5.51180993, 6.40258668, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 3.73025641,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 4.17564479,
5.06642155, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.40258668, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 5.51180993, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 4.62103317, 6.84797506, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 5.9571983 , 6.84797506, 5.06642155, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.40258668, 6.84797506,
4.62103317, 6.40258668, 6.84797506, 6.40258668, 5.9571983 ,
6.84797506, 4.62103317, 5.51180993, 5.06642155, 6.84797506,
6.84797506, 6.40258668, 5.51180993, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.40258668, 5.51180993, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 6.84797506,
5.9571983 , 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 6.40258668, 6.40258668,
5.51180993, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 5.51180993, 6.40258668, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
4.62103317, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.9571983 , 5.51180993, 5.9571983 , 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
5.06642155, 5.9571983 , 6.84797506, 6.84797506, 6.40258668,
6.40258668, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.40258668,
6.84797506, 5.06642155, 5.06642155, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
3.73025641, 6.40258668, 5.51180993, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.84797506, 4.62103317,
6.40258668, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.06642155, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 4.62103317, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 5.51180993, 6.84797506,
5.9571983 , 5.06642155, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.9571983 ,
5.51180993, 6.84797506, 5.9571983 , 6.40258668, 4.62103317,
6.84797506, 5.06642155, 4.17564479, 5.51180993, 6.84797506,
6.40258668, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 5.9571983 , 5.51180993, 4.17564479,
5.9571983 , 6.40258668, 6.84797506, 5.51180993, 6.40258668,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 3.73025641,
4.17564479, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 5.06642155, 5.06642155, 6.84797506,
6.40258668, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
5.9571983 , 5.51180993, 6.84797506, 5.06642155, 6.84797506,
4.62103317, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.40258668, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 3.73025641,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 4.62103317,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.40258668, 6.84797506,
5.51180993, 5.9571983 , 5.9571983 , 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 5.06642155, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 4.17564479, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.9571983 , 6.84797506, 5.9571983 , 4.17564479, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.9571983 ,
5.51180993, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 5.06642155,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 3.28486804, 5.9571983 , 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.40258668, 6.84797506, 6.40258668,
5.06642155, 6.84797506, 6.84797506, 5.51180993, 4.62103317,
6.84797506, 6.40258668, 6.84797506, 5.06642155, 5.9571983 ,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
5.06642155, 6.84797506, 3.56230611, 4.89847125, 5.34385962,
4.45308287, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.34385962, 5.789248 , 5.789248 ,
4.00769449, 5.34385962, 4.45308287, 5.789248 , 5.34385962,
3.56230611, 2.67152936, 5.789248 , 5.34385962, 5.789248 ,
2.67152936, 5.789248 , 5.34385962, 3.56230611, 4.89847125,
5.789248 , 3.11691773, 5.789248 , 5.789248 , 4.89847125,
5.789248 , 3.56230611, 3.56230611, 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 4.89847125, 4.00769449,
5.789248 , 3.56230611, 5.789248 , 2.22614098, 3.11691773,
5.789248 , 5.789248 , 4.00769449, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
2.67152936, 5.789248 , 5.789248 , 4.00769449, 3.56230611,
4.45308287, 5.789248 , 5.789248 , 4.89847125, 5.789248 ,
3.56230611, 5.789248 , 4.89847125, 2.67152936, 5.34385962,
4.45308287, 5.789248 , 4.45308287, 5.789248 , 5.789248 ,
4.89847125, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 5.34385962,
5.34385962, 5.789248 , 5.789248 , 3.56230611, 5.789248 ,
3.56230611, 5.789248 , 4.45308287, 5.789248 , 5.789248 ,
5.34385962, 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
3.11691773, 4.00769449, 5.789248 , 5.789248 , 5.34385962,
3.56230611, 3.11691773, 5.789248 , 4.45308287, 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.45308287, 5.789248 , 4.00769449,
5.789248 , 4.45308287, 4.45308287, 5.789248 , 4.89847125,
4.00769449, 4.00769449, 4.89847125, 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 3.56230611, 5.789248 , 3.56230611,
5.789248 , 5.789248 , 5.789248 , 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.89847125, 4.89847125,
5.789248 , 2.67152936, 5.789248 , 4.89847125, 5.789248 ,
5.789248 , 4.45308287, 3.11691773, 5.789248 , 4.89847125,
5.789248 , 2.67152936, 2.67152936, 5.34385962, 4.00769449,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 5.34385962, 4.89847125, 5.789248 ,
2.67152936, 5.34385962, 5.789248 , 5.789248 , 4.45308287,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
2.67152936, 5.789248 , 3.56230611, 4.00769449, 5.34385962,
5.789248 , 3.11691773, 2.67152936, 5.789248 , 4.45308287,
5.789248 , 3.56230611, 5.34385962, 4.89847125, 5.789248 ,
3.56230611, 4.00769449, 5.789248 , 3.11691773, 5.789248 ,
5.789248 , 3.56230611, 5.34385962, 4.89847125, 4.89847125,
5.789248 , 5.789248 , 2.67152936, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.34385962, 5.34385962,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 4.45308287,
5.34385962, 5.789248 , 4.45308287, 4.45308287, 5.789248 ,
5.789248 , 3.56230611, 4.89847125, 3.56230611, 4.89847125,
4.45308287, 5.789248 , 4.00769449, 5.789248 , 4.89847125,
5.789248 , 5.789248 , 5.789248 , 4.45308287, 4.00769449,
5.789248 , 4.89847125, 4.89847125, 3.56230611, 5.789248 ,
5.789248 , 5.34385962, 3.56230611, 3.11691773, 3.56230611,
4.00769449, 5.789248 , 4.45308287, 4.89847125, 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 4.89847125, 2.67152936,
5.789248 , 5.789248 , 5.789248 , 4.89847125, 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.34385962, 4.89847125,
5.789248 , 4.89847125, 4.00769449, 5.789248 , 5.789248 ,
4.89847125, 5.789248 , 5.34385962, 5.789248 , 2.67152936,
5.789248 , 5.34385962, 4.00769449, 4.00769449, 5.789248 ,
5.34385962, 3.56230611, 5.789248 , 4.89847125, 5.34385962,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 5.34385962,
4.00769449, 3.56230611, 5.34385962, 2.67152936, 5.789248 ,
3.56230611, 4.89847125, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.00769449, 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.34385962, 5.34385962,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 4.45308287, 5.789248 , 5.34385962,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.45308287, 5.34385962, 3.56230611, 2.67152936,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 4.45308287,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.00769449,
4.00769449, 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 3.11691773, 4.45308287, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 4.00769449, 3.56230611,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
4.89847125, 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.34385962, 5.789248 , 3.56230611, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 4.00769449,
3.11691773, 5.789248 , 5.789248 , 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.56230611, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.56230611, 5.34385962, 4.00769449,
5.789248 , 4.89847125, 4.89847125, 4.00769449, 5.789248 ,
5.789248 , 4.45308287, 2.67152936, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 3.56230611, 4.00769449, 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 4.45308287, 5.34385962,
5.34385962, 3.11691773, 3.56230611, 5.789248 , 4.45308287,
5.789248 , 4.89847125, 4.00769449, 4.89847125, 4.89847125,
5.789248 , 5.789248 , 5.34385962, 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 3.56230611, 4.45308287, 4.00769449, 4.89847125,
4.45308287, 3.56230611, 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.00769449, 4.89847125,
5.34385962, 3.56230611, 3.11691773, 5.789248 , 4.00769449,
5.789248 , 3.56230611, 5.789248 , 5.789248 , 4.00769449,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 4.00769449, 4.89847125, 5.34385962,
2.67152936, 5.789248 , 4.45308287, 5.789248 , 4.89847125,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
3.56230611, 2.67152936, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.89847125, 5.789248 , 5.34385962, 4.89847125,
5.34385962, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 5.789248 , 2.67152936, 5.34385962, 5.789248 ,
5.789248 , 4.89847125, 4.89847125, 5.34385962, 5.789248 ,
5.789248 , 4.45308287, 3.11691773, 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 4.00769449, 4.89847125, 5.789248 ,
3.56230611, 5.789248 , 5.34385962, 2.67152936, 5.789248 ,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.45308287, 5.789248 , 3.11691773, 5.789248 ,
5.34385962, 4.89847125, 5.34385962, 5.789248 , 4.89847125,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.00769449,
4.89847125, 4.00769449, 5.789248 , 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 3.56230611, 5.789248 ,
4.89847125, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.00769449,
2.22614098, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
4.89847125, 5.789248 , 3.56230611, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.789248 , 5.34385962,
4.45308287, 5.34385962, 4.45308287, 4.45308287, 4.89847125,
5.789248 , 4.89847125, 5.789248 , 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.789248 , 4.00769449, 5.789248 , 4.89847125, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.789248 , 4.45308287, 5.34385962,
4.45308287, 5.789248 , 4.00769449, 5.789248 , 3.56230611,
5.34385962, 5.789248 , 5.789248 , 4.45308287, 4.00769449,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 4.45308287,
5.789248 , 5.789248 , 5.34385962, 4.89847125, 4.45308287,
3.11691773, 5.789248 , 3.56230611, 3.11691773, 5.789248 ,
5.789248 , 3.11691773, 3.11691773, 5.789248 , 4.45308287,
4.45308287, 5.789248 , 5.789248 , 4.00769449, 4.00769449,
3.56230611, 5.789248 , 4.00769449, 3.56230611, 5.789248 ,
4.00769449, 5.34385962, 5.789248 , 3.56230611, 5.789248 ,
5.34385962, 5.789248 , 4.45308287, 4.00769449, 5.789248 ,
5.789248 , 4.45308287, 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.11691773, 5.34385962, 2.67152936, 4.00769449, 5.789248 ,
3.56230611, 5.789248 , 5.34385962, 5.789248 , 2.67152936,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.789248 , 5.34385962, 3.56230611, 4.45308287, 4.89847125,
4.00769449, 5.789248 , 5.789248 , 5.34385962, 4.00769449,
4.89847125, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
3.11691773, 5.789248 , 4.89847125, 5.789248 , 4.89847125,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 2.67152936, 5.789248 , 5.789248 ,
2.67152936, 3.56230611, 5.789248 , 5.789248 , 2.67152936,
4.45308287, 3.56230611, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.45308287,
4.89847125, 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 4.00769449, 5.789248 , 5.34385962,
5.789248 , 2.67152936, 2.67152936, 5.789248 , 3.56230611,
5.789248 , 3.56230611, 5.789248 , 4.45308287, 2.67152936,
5.789248 , 5.789248 , 2.67152936, 4.89847125, 5.789248 ,
5.789248 , 3.11691773, 5.789248 , 4.00769449, 5.789248 ,
5.789248 , 3.11691773, 4.00769449, 4.89847125, 4.89847125,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 4.45308287,
5.789248 , 3.11691773, 4.45308287, 4.89847125, 3.56230611,
5.789248 , 5.789248 , 3.56230611, 3.56230611, 3.56230611,
5.789248 , 5.789248 , 4.00769449, 4.00769449, 3.11691773,
5.789248 , 5.789248 , 2.67152936, 4.00769449, 5.789248 ,
2.67152936, 3.56230611, 3.56230611, 4.45308287, 5.789248 ,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 4.89847125,
5.789248 , 5.789248 , 4.89847125, 4.89847125, 5.789248 ,
5.34385962, 4.45308287, 5.789248 , 4.89847125, 4.00769449,
4.45308287, 5.789248 , 2.67152936, 4.45308287, 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.34385962, 5.789248 , 4.89847125, 5.789248 , 4.45308287,
5.789248 , 5.789248 , 4.89847125, 4.45308287, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.45308287,
4.45308287, 3.11691773, 4.89847125, 5.789248 , 3.11691773,
3.11691773, 5.789248 , 4.00769449, 5.34385962, 3.11691773,
4.89847125, 3.11691773, 3.56230611, 4.89847125, 5.789248 ,
5.789248 , 4.45308287, 4.89847125, 5.789248 , 4.45308287,
5.789248 , 4.89847125, 4.45308287, 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.34385962, 4.45308287,
5.789248 , 4.00769449, 3.56230611, 4.89847125, 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 3.11691773, 5.789248 ,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 3.56230611,
2.22614098, 5.789248 , 5.789248 , 3.56230611, 5.34385962,
4.00769449, 5.789248 , 2.67152936, 5.789248 , 4.00769449,
5.789248 , 5.789248 , 4.00769449, 2.67152936, 5.789248 ,
4.89847125, 4.45308287, 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.89847125, 5.789248 , 4.89847125, 5.789248 ,
4.89847125, 3.56230611, 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
3.56230611, 3.56230611, 4.00769449, 5.789248 , 4.45308287,
3.56230611, 5.789248 , 4.89847125, 4.89847125, 3.11691773,
4.00769449, 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.45308287, 5.34385962, 4.45308287,
5.789248 , 3.11691773, 5.789248 , 4.45308287, 4.45308287,
4.89847125, 5.789248 , 4.89847125, 4.45308287, 5.789248 ,
5.34385962, 3.56230611, 3.56230611, 5.34385962, 5.789248 ,
4.00769449, 5.34385962, 4.45308287, 4.45308287, 3.56230611,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 4.89847125,
5.34385962, 3.11691773, 4.00769449, 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.45308287,
5.34385962, 4.89847125, 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.34385962, 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 4.45308287,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.45308287, 5.34385962, 5.789248 , 3.56230611,
4.00769449, 5.34385962, 4.45308287, 5.789248 , 5.34385962,
5.34385962, 2.67152936, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.89847125,
5.789248 , 5.789248 , 5.789248 , 4.45308287, 5.789248 ,
3.11691773, 4.00769449, 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 2.22614098, 5.789248 , 4.00769449,
5.34385962, 4.89847125, 5.789248 , 5.789248 , 4.00769449,
5.789248 , 3.56230611, 2.67152936, 5.789248 , 3.56230611,
2.67152936, 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
4.45308287, 5.789248 , 4.89847125, 4.00769449, 2.67152936,
4.89847125, 5.789248 , 2.22614098, 3.56230611, 4.45308287,
5.34385962, 5.34385962, 3.11691773, 4.45308287, 4.45308287,
3.11691773, 4.45308287, 5.34385962, 4.45308287, 5.34385962,
4.00769449, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 5.789248 , 4.00769449,
5.34385962, 4.00769449, 4.45308287, 4.89847125, 4.00769449,
5.789248 , 5.789248 , 5.789248 , 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 3.11691773, 5.34385962, 4.89847125,
5.34385962])
class Committee(object):
def __init__(self):
self.resids = np.array([[ -5.04950800e-01, -6.29721800e-01,
-8.35499100e+01,
-1.30628500e+00, -6.62028600e+00],
[ -2.34152200e-01, -2.55423500e-01, -2.16830700e+02,
-7.58866000e-01, -7.18370200e+00],
[ 1.02423700e+00, 7.98775800e-01, 4.83736300e+02,
2.50351500e+00, 2.25135300e+01],
[ -2.85061700e-01, -3.17796600e-01, -7.04115100e+04,
-2.37991800e+00, -1.41745600e+02],
[ 2.09902500e-01, 1.96787700e-01, 2.24751400e+03,
9.51945500e-01, 2.17724200e+01],
[ -4.03483500e-01, -4.75741500e-01, -1.95633600e+04,
-2.63502600e+00, -8.89461400e+01],
[ -1.64413400e-01, -1.74401100e-01, -1.73310300e+04,
-1.16235500e+00, -5.34213500e+01],
[ -4.29607700e-01, -5.13466700e-01, -5.30037000e+03,
-2.24496200e+00, -4.78260300e+01],
[ 3.23713000e-01, 2.94184600e-01, 4.11079400e+03,
1.48684400e+00, 3.65598400e+01],
[ 1.50367200e-01, 1.43429400e-01, 7.28532100e+03,
8.85542900e-01, 3.31355000e+01],
[ 4.21288600e-01, 3.73428000e-01, 1.37315700e+03,
1.52133200e+00, 2.41570200e+01],
[ 4.50658700e-01, 3.96586700e-01, 1.70146900e+03,
1.66177900e+00, 2.78032600e+01],
[ 2.43537500e-01, 2.26174000e-01, 3.18402300e+03,
1.13656200e+00, 2.79073400e+01],
[ 1.05182900e+00, 8.16205400e-01, 6.00135200e+03,
3.89079700e+00, 7.97131300e+01],
[ -5.54450300e-01, -7.12749000e-01, -2.09485200e+03,
-2.45496500e+00, -3.42189900e+01],
[ -6.05750600e-01, -8.06411100e-01, -2.74738200e+02,
-1.90774400e+00, -1.30510500e+01],
[ -3.41215700e-01, -3.90244600e-01, -6.31138000e+02,
-1.27022900e+00, -1.47600100e+01],
[ 2.21898500e-01, 2.07328700e-01, 6.91135800e+02,
8.16876400e-01, 1.24392900e+01],
[ 2.45592500e-01, 2.26639200e-01, 1.99250600e-01,
2.57948300e-01, 2.74723700e-01],
[ -7.58952600e-01, -1.15300800e+00, -2.56739000e+02,
-2.40716600e+00, -1.41474200e+01]])
self.null_deviance = 27.81104693643434 # from R, Rpy bug
self.params = np.array([-0.0268147 , 1.25103364, 2.91070663,
-0.34799563, 0.00659808, -0.31303026, -6.44847076])
self.bse = np.array([ 1.99956263e-02, 4.76820254e-01,
6.48362654e-01, 4.17956107e-01, 1.41512690e-03, 1.07770186e-01,
1.99557656e+00])
self.aic_R = 216.66573352377935
self.aic_Stata = 10.83328660860436
self.deviance = 5.615520158267981
self.scale = 0.38528595746569905
self.llf = -101.33286676188968 # from R
self.llf_Stata = -101.3328660860436 # same as R
self.bic_Stata = -33.32900074962649
self.chi2 = 5.008550263545408
self.df_model = 6
self.df_resid = 13
self.fittedvalues = np.array([12.62019383, 30.18289514, 21.48377849,
496.74068604,
103.23024673, 219.94693494, 324.4301163 , 110.82526477,
112.44244488, 219.86056381, 56.84399998, 61.19840382,
114.09290269, 75.29071944, 61.21994387, 21.05130889,
42.75939828, 55.56133536, 0.72532053, 18.14664665])
class Wfs(object):
"""
Wfs used for TestGlmPoissonOffset
Results are from Stata and R.
"""
def __init__(self):
self.resids = glm_test_resids.wfs_resids
self.null_deviance = 3731.85161919 # from R
self.params = [.9969348, 1.3693953, 1.6137574, 1.7849111, 1.9764051,
.11241858, .15166023, .02297282, -.10127377, -.31014953,
-.11709716]
self.bse = [.0527437, .0510688, .0511949, .0512138, .0500341,
.0324963, .0283292, .0226563, .0309871, .0552107, .0549118]
self.aic_R = 522.14215776 # R adds 2 for dof to AIC
self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 70.665301270867 # from R
self.scale = 1.0
self.llf = -250.0710778504317 # from Stata, ours with scale=1
self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 10
self.df_resid = 59
self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [7.11599,19.11356,33.76075,33.26743,11.94399,
27.49849,35.07923,37.22563,64.18037,108.0408,100.0948,35.67896,
24.10508,73.99577,52.2802,38.88975,35.06507,102.1198,107.251,
41.53885,196.3685,335.8434,205.3413,43.20131,41.98048,96.65113,
63.2286,30.78585,70.46306,172.2402,102.5898,43.06099,358.273,
549.8983,183.958,26.87062,62.53445,141.687,52.47494,13.10253,
114.9587,214.803,90.33611,18.32685,592.5995,457.4376,140.9273,
3.812064,111.3119,97.62744,57.48056,19.43552,130.4872,
151.7268,69.67963,13.04879,721.728,429.2136,128.2132,9.04735,
301.7067,177.3487,46.40818,4.707507,330.4211,330.7497,84.38604,
1456.757,451.005,67.51025]
class CpunishTweediePower15(object):
"""
# From R
setwd('c:/workspace')
data <- read.csv('cpunish.csv', sep=",")
library(statmod)
library(tweedie)
summary(glm(EXECUTIONS ~ INCOME + SOUTH - 1,
family=tweedie(var.power=1.5, link.power=1),
data=data))
"""
def __init__(self):
resid_resp = [28.90498242, 0.5714367394, 4.3135711827, -3.7417822942,
-4.9544111888, 0.4666602184, 0.0747051827, -6.114236142,
-1.0048540116, -6.9747602544, -0.7626907093,
-0.5688093336, -6.9845579527, -1.1594503855,
-0.6365453438, -0.3994222036, -0.732355528]
resid_dev = [3.83881147757395, 0.113622743768915, 2.01981988071128,
-0.938107751845672, -1.29607304923555, 0.316205676540778,
0.045273675744568, -1.69968893354602, -0.699080227540624,
-2.1707839733642, -0.568738719015137, -0.451266938413727,
-2.17218106358745, -0.774613533242944, -0.493831656345955,
-0.336453094366771, -0.551210030548659]
resid_pear = [6.02294407053171, 0.115516970886608, 2.9148208139849,
-0.806210703943481, -1.04601155367613, 0.338668788938945,
0.045708693925888, -1.27176471794657, -0.5964031365026,
-1.46974255264233, -0.498557360800493,
-0.405777068096011, -1.47045242302365, -0.65086941662954,
-0.439928270112046, -0.310433407220704,
-0.485001313250992]
resid_work = [28.9049727916181, 0.571427719513967, 4.31357425907762,
-3.74179256698823, -4.9544210736226, 0.466663015515745,
0.0747086948013966, -6.114245735344, -1.00485035431368,
-6.97477010217068, -0.76268749374494, -0.568806471745149,
-6.98456778258272, -1.15944644619981, -0.636542358439925,
-0.399419650775458, -0.732352367853816]
self.resid_response = resid_resp
self.resid_deviance = resid_dev
self.resid_pearson = resid_pear
self.resid_working = resid_work
# self.null_deviance = 3731.85161919 # N/A
self.params = [0.0000471043, 6.4721324886]
self.bse = [0.0000246888, 3.5288126173]
# self.aic_R = 522.14215776 # R adds 2 for dof to AIC
# self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 36.087307138233 # from R
# self.scale = 1.0
# self.llf = -250.0710778504317 # from Stata, ours with scale=1
# self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 1
self.df_resid = 15
# self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [8.09501758000751, 8.42856326056927,
1.68642881732415, 7.74178229423817,
7.95441118875248, 1.53333978161934,
1.92529481734232, 8.11423614202829,
2.00485401159015, 7.97476025442155,
1.76269070926448, 1.56880933358418,
7.98455795270665, 2.15945038549266,
1.63654534384372, 1.39942220361664,
1.73235552803559]
class CpunishTweediePower2(object):
"""
# From R
setwd('c:/workspace')
data <- read.csv('cpunish.csv', sep=",")
library(statmod)
library(tweedie)
summary(glm(EXECUTIONS ~ INCOME + SOUTH - 1,
family=tweedie(var.power=2, link.power=1),
data=data))
"""
def __init__(self):
resid_resp = [28.9397568116168, 0.605199215492085, 4.30845487128123,
-3.7059362524505, -4.91921022348665, 0.46200835064931,
0.068864196242604, -6.07952005594693, -1.01093636580438,
-6.9396210244365, -0.768038385056284, -0.573568809339664,
-6.94944844711606, -1.16600175635393, -0.641510318056987,
-0.403667790321936, -0.737611172529194]
resid_dev = [2.03295746713119, 0.0704291140028282, 1.60058476017728,
-0.591230836989137, -0.836067997150736, 0.274690511542166,
0.0352446721149477, -1.13465831620614, -0.625909330466303,
-1.5477830210949, -0.520517540529698, -0.421531194473357,
-1.54848147513823, -0.684927882583903, -0.45784673829438,
-0.320960880764019, -0.505992145923248]
resid_pear = [3.59043221590711, 0.0720921473930558, 2.54705286789752,
-0.480919661289957, -0.621174344999372,
0.300397177607798, 0.0356599448410699,
-0.752460543924524, -0.502719222246499,
-0.874049404005278, -0.434401419984914,
-0.364501892726482, -0.874205109115113,
-0.538319857282425, -0.390804925805356,
-0.287580717535275, -0.424497254731367]
resid_work = [28.9397568116168, 0.605199215492085, 4.30845487128123,
-3.7059362524505, -4.91921022348665, 0.46200835064931,
0.068864196242604, -6.07952005594693, -1.01093636580438,
-6.9396210244365, -0.768038385056284, -0.573568809339664,
-6.94944844711606, -1.16600175635393, -0.641510318056987,
-0.403667790321936, -0.737611172529194]
self.resid_response = resid_resp
self.resid_deviance = resid_dev
self.resid_pearson = resid_pear
self.resid_working = resid_work
# self.null_deviance = 3731.85161919 # N/A
self.params = [4.72472244209477e-05, 6.43243456540827]
self.bse = [1.86839521185429e-05, 3.83231672422612]
# self.aic_R = 522.14215776 # R adds 2 for dof to AIC
# self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 15.7840685407599 # from R
# self.scale = 1.0
# self.llf = -250.0710778504317 # from Stata, ours with scale=1
# self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 1
self.df_resid = 15
# self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [8.06024318838318, 8.39480078450791,
1.69154512871877, 7.7059362524505,
7.91921022348665, 1.53799164935069,
1.9311358037574, 8.07952005594693,
2.01093636580438,7.9396210244365,
1.76803838505628, 1.57356880933966,
7.94944844711606, 2.16600175635393,
1.64151031805699, 1.40366779032194,
1.73761117252919]
class CpunishTweedieLog1(object):
"""
# From R
setwd('c:/workspace')
data <- read.csv('cpunish.csv', sep=",")
library(statmod)
library(tweedie)
summary(glm(EXECUTIONS ~ INCOME + SOUTH - 1,
family=tweedie(var.power=1, link.power=0),
data=data))
"""
def __init__(self):
resid_resp = [28.7231009386298, -0.307318358456484, 4.19015460156576,
-3.30975297068573, -4.87746969906705, 0.285041779927669,
0.0315071085472043, -6.33304532673002, -1.02436294926752,
-6.9340610414309, -0.859055122126197, -0.736490247380883,
-6.96145354225969, -1.13750232106315, -0.778363801217565,
-0.636042191521576, -0.839322392162821]
resid_dev = [7.30513948467594, -0.101296157943519, 2.44987904003561,
-1.34021826264378, -1.99062116973315, 0.212014827300475,
0.0223969676885324, -2.63775728156667, -0.798884085657077,
-3.11862021596631, -0.691356293575324, -0.607658243497501,
-3.12628915913493, -0.869326536299756, -0.636663290048755,
-0.536212950673418, -0.67812263418512]
resid_pear = [9.98383729954486, -0.100734032611758, 3.11465040934513,
-1.22417704160631, -1.73780566805242, 0.217661565866984,
0.0224564769560215, -2.19386916576256,
-0.719962160947025, -2.46172701579962,
-0.630049829146329, -0.558895774299477,
-2.4671965358931, -0.778034748813176,
-0.583676657782738, -0.497265896656757,
-0.61887064145702]
resid_work = [3.47027319357873, -0.0330190014589175, 2.31520029566659,
-0.452785885372436, -0.619167053050639,
0.166209168591668, 0.0160057009522403,
-0.759991705123147, -0.506017436072008,
-0.873961141113221, -0.46209233491888,
-0.424125760851072, -0.874394795536774,
-0.532164250702372, -0.437685360377137,
-0.388768819543728, -0.456321521305397]
self.resid_response = resid_resp
self.resid_deviance = resid_dev
self.resid_working = resid_work
self.resid_pearson = resid_pear
# self.null_deviance = 3731.85161919 # N/A
self.params = [1.65700638623525e-05, 1.54257997850499]
self.bse = [1.81044999017907e-05, 0.725739640176733]
# self.aic_R = 522.14215776 # R adds 2 for dof to AIC
# self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 95.0325613464258 # from R
# self.scale = 1.0
# self.llf = -250.0710778504317 # from Stata, ours with scale=1
# self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 1
self.df_resid = 15
# self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [8.27689906137016, 9.30731835845648,
1.80984539843424, 7.30975297068573,
7.87746969906705, 1.71495822007233,
1.9684928914528, 8.33304532673002,
2.02436294926752, 7.9340610414309,
1.8590551221262, 1.73649024738088,
7.96145354225969, 2.13750232106315,
1.77836380121756, 1.63604219152158,
1.83932239216282]
class FairTweedieLog15(object):
"""
# From R
setwd('c:/workspace')
data <- read.csv('fair.csv', sep=",")
library(statmod)
library(tweedie)
model <- glm(affairs ~ rate_marriage + age + yrs_married -1, data=data,
family=tweedie(var.power=1.5, link.power = 0))
r <- resid(model, type='response')
paste(as.character(r[1:17]), collapse=",")
r <- resid(model, type='deviance')
paste(as.character(r[1:17]), collapse=",")
r <- resid(model, type='pearson')
paste(as.character(r[1:17]), collapse=",")
r <- resid(model, type='working')
paste(as.character(r[1:17]), collapse=",")
paste(as.character(model$coefficients[1:17]), collapse=",")
s <- summary(model)
paste(as.character(sqrt(diag(s$cov.scaled))), collapse=",")
s$deviance
paste(as.character(model$fitted.values[1:17]), collapse=",")
"""
def __init__(self):
resid_resp = [-0.997868449815039, 2.69283106662728, 0.677397439981157,
0.220024942629269, 4.30244966465517, 4.12917275616972,
0.669303122309246, 1.64321562230925, 3.73361710426128,
0.271937359562684, 1.70030700747884, 1.55430573164611,
-0.263723852468304, 1.51263973164611, 2.75223392654071,
0.310487741565721, 1.28077676333896 , -0.722602160018842]
resid_dev = [-1.40274708439925, 2.48476334070913, 0.722690630291423,
0.333179337353702, 4.00781035212304, 3.33344591331998,
1.51543361886727, 2.82502498800952, 2.2795411865605,
0.245239170945663, 0.993721205729013, 1.74920359743562,
-0.363141475997386, 1.71412357710318, 2.57445879456298,
0.279858474280908, 1.22953362433333, -1.84397406923697]
resid_pear = [-0.923380371255914, 4.28706294677515, 0.864309147553743,
0.366063826152319, 9.17690493704408, 6.57783985712941,
2.39340023647571, 5.87607098775551, 3.55791152198837,
0.260052421285998, 1.21439278430259, 2.66470328868695,
-0.327698246542009, 2.59327105694137, 4.53096038849505,
0.299198418236691, 1.6399313081981, -0.921987034618483]
resid_work = [-0.899807800767353, 5.00583784559752, 0.937441759049674,
0.433762277766879, 11.8128959278604, 7.6822784352496,
3.65998654763585, 8.98568506862295, 3.50120010377224,
0.256207345500911, 1.08551656668241, 3.18923357641756,
-0.352302468597673, 3.10374035363038, 5.35005901385941,
0.29552727652976, 1.78077778644209, -1]
self.resid_response = resid_resp
self.resid_deviance = resid_dev
self.resid_working = resid_work
self.resid_pearson = resid_pear
# self.null_deviance = 3731.85161919 # N/A
self.params = [-0.389168171340452, 0.0670222370664611,
-0.0970852004566712]
self.bse = [0.0323435784513691, 0.0063805300018014,
0.00893580175352525]
# self.aic_R = 522.14215776 # R adds 2 for dof to AIC
# self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 20741.82 # from R
# self.scale = 1.0
# self.llf = -250.0710778504317 # from Stata, ours with scale=1
# self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 2
self.df_resid = 6363
# self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [1.10897954981504, 0.537938133372725,
0.722602160018842, 0.507247757370731,
0.364216335344828, 0.537493243830281,
0.182870377690754, 0.182870377690754,
1.06638209573872, 1.06139564043732,
1.56635749252116, 0.487360268353893,
0.748572252468304, 0.487360268353893,
0.514430573459285, 1.05062295843428,
0.71922323666104, 0.722602160018842]
| bsd-3-clause | 4,131,202,423,416,573,400 | 68.522606 | 101 | 0.553892 | false |
lebabouin/CouchPotatoServer-develop | couchpotato/core/notifications/core/main.py | 6 | 7961 | from couchpotato import get_session
from couchpotato.api import addApiView, addNonBlockApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import tryInt, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from couchpotato.core.settings.model import Notification as Notif
from couchpotato.environment import Env
from operator import itemgetter
from sqlalchemy.sql.expression import or_
import threading
import time
import traceback
import uuid
log = CPLog(__name__)
class CoreNotifier(Notification):
m_lock = None
listen_to = [
'renamer.after', 'movie.snatched',
'updater.available', 'updater.updated',
'core.message', 'core.message.important',
]
def __init__(self):
super(CoreNotifier, self).__init__()
addEvent('notify', self.notify)
addEvent('notify.frontend', self.frontend)
addApiView('notification.markread', self.markAsRead, docs = {
'desc': 'Mark notifications as read',
'params': {
'ids': {'desc': 'Notification id you want to mark as read. All if ids is empty.', 'type': 'int (comma separated)'},
},
})
addApiView('notification.list', self.listView, docs = {
'desc': 'Get list of notifications',
'params': {
'limit_offset': {'desc': 'Limit and offset the notification list. Examples: "50" or "50,30"'},
},
'return': {'type': 'object', 'example': """{
'success': True,
'empty': bool, any notification returned or not,
'notifications': array, notifications found,
}"""}
})
addNonBlockApiView('notification.listener', (self.addListener, self.removeListener))
addApiView('notification.listener', self.listener)
fireEvent('schedule.interval', 'core.check_messages', self.checkMessages, hours = 12, single = True)
fireEvent('schedule.interval', 'core.clean_messages', self.cleanMessages, seconds = 15, single = True)
addEvent('app.load', self.clean)
addEvent('app.load', self.checkMessages)
self.messages = []
self.listeners = []
self.m_lock = threading.Lock()
def clean(self):
db = get_session()
db.query(Notif).filter(Notif.added <= (int(time.time()) - 2419200)).delete()
db.commit()
def markAsRead(self, ids = None, **kwargs):
ids = splitString(ids) if ids else None
db = get_session()
if ids:
q = db.query(Notif).filter(or_(*[Notif.id == tryInt(s) for s in ids]))
else:
q = db.query(Notif).filter_by(read = False)
q.update({Notif.read: True})
db.commit()
return {
'success': True
}
def listView(self, limit_offset = None, **kwargs):
db = get_session()
q = db.query(Notif)
if limit_offset:
splt = splitString(limit_offset)
limit = splt[0]
offset = 0 if len(splt) is 1 else splt[1]
q = q.limit(limit).offset(offset)
else:
q = q.limit(200)
results = q.all()
notifications = []
for n in results:
ndict = n.to_dict()
ndict['type'] = 'notification'
notifications.append(ndict)
return {
'success': True,
'empty': len(notifications) == 0,
'notifications': notifications
}
def checkMessages(self):
prop_name = 'messages.last_check'
last_check = tryInt(Env.prop(prop_name, default = 0))
messages = fireEvent('cp.messages', last_check = last_check, single = True) or []
for message in messages:
if message.get('time') > last_check:
message['sticky'] = True # Always sticky core messages
message_type = 'core.message.important' if message.get('important') else 'core.message'
fireEvent(message_type, message = message.get('message'), data = message)
if last_check < message.get('time'):
last_check = message.get('time')
Env.prop(prop_name, value = last_check)
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
db = get_session()
data['notification_type'] = listener if listener else 'unknown'
n = Notif(
message = toUnicode(message),
data = data
)
db.add(n)
db.commit()
ndict = n.to_dict()
ndict['type'] = 'notification'
ndict['time'] = time.time()
self.frontend(type = listener, data = data)
return True
def frontend(self, type = 'notification', data = None, message = None):
if not data: data = {}
log.debug('Notifying frontend')
self.m_lock.acquire()
notification = {
'message_id': str(uuid.uuid4()),
'time': time.time(),
'type': type,
'data': data,
'message': message,
}
self.messages.append(notification)
while len(self.listeners) > 0 and not self.shuttingDown():
try:
listener, last_id = self.listeners.pop()
listener({
'success': True,
'result': [notification],
})
except:
log.debug('Failed sending to listener: %s', traceback.format_exc())
self.listeners = []
self.m_lock.release()
log.debug('Done notifying frontend')
def addListener(self, callback, last_id = None):
if last_id:
messages = self.getMessages(last_id)
if len(messages) > 0:
return callback({
'success': True,
'result': messages,
})
self.m_lock.acquire()
self.listeners.append((callback, last_id))
self.m_lock.release()
def removeListener(self, callback):
self.m_lock.acquire()
new_listeners = []
for list_tuple in self.listeners:
try:
listener, last_id = list_tuple
if listener != callback:
new_listeners.append(list_tuple)
except:
log.debug('Failed removing listener: %s', traceback.format_exc())
self.listeners = new_listeners
self.m_lock.release()
def cleanMessages(self):
if len(self.messages) == 0:
return
log.debug('Cleaning messages')
self.m_lock.acquire()
time_ago = (time.time() - 15)
self.messages[:] = [m for m in self.messages if (m['time'] > time_ago)]
self.m_lock.release()
log.debug('Done cleaning messages')
def getMessages(self, last_id):
log.debug('Getting messages with id: %s', last_id)
self.m_lock.acquire()
recent = []
try:
index = map(itemgetter('message_id'), self.messages).index(last_id)
recent = self.messages[index + 1:]
except:
pass
self.m_lock.release()
log.debug('Returning for %s %s messages', (last_id, len(recent)))
return recent
def listener(self, init = False, **kwargs):
messages = []
# Get unread
if init:
db = get_session()
notifications = db.query(Notif) \
.filter(or_(Notif.read == False, Notif.added > (time.time() - 259200))) \
.all()
for n in notifications:
ndict = n.to_dict()
ndict['type'] = 'notification'
messages.append(ndict)
return {
'success': True,
'result': messages,
}
| gpl-3.0 | 8,934,798,980,605,338,000 | 28.161172 | 131 | 0.551941 | false |
richard-ma/search-engine-script-framework | modules/SEmojeek.py | 1 | 1142 | # -*- coding: utf-8 -*-
# bing search engine
import requests
import re
import sys
import time
from urllib import quote
from searchEngine import SearchEngine
class SEmojeek(SearchEngine):
def __init__(self):
super(SEmojeek, self).__init__()
self.engineName = "mojeek-co-uk"
# self.url = "http://cn.bing.com/search?q=%s&first=%d" # 搜索引擎地址 %s for keyword ! %d for page
self.url = "https://www.mojeek.co.uk/search?q=%s&s=%d"
self.url_reg = r'<li><h2><a href="(.*?)" class="ob"' # 必须在子类设置
#self.headers = {} # HTTP请求头,可使用父类默认值
#self.cookies = {} # HTTP cookie,可使用父类默认值
#self.timeout = 1 # 请求超时,超过此数值则判定服务器无响应,终止本次连接,可使用父类默认值
#self.page_count = 1 # 下载链接计数,可使用父类默认值
#self.count_per_page = 10 # 每页链接数量,可使用父类默认值
def preProcess(self, keyword):
return keyword.replace(' ', '+')
if __name__ == '__main__':
se = SEmojeek()
se.search('baidu', 2)
| mit | -1,881,975,293,416,817,200 | 27.606061 | 100 | 0.601695 | false |
gwAdvNet2015/adv-net-samples | sdn/pox/pox/lib/pxpcap/__init__.py | 42 | 11880 | # Copyright 2011,2013 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
enabled = False
try:
import platform
import importlib
_module = 'pox.lib.pxpcap.%s.pxpcap' % (platform.system().lower(),)
pcapc = importlib.import_module(_module)
enabled = True
except:
# Try generic...
try:
import pxpcap as pcapc
enabled = True
except:
# We can at least import the rest
pass
from pox.lib.addresses import IPAddr, EthAddr, IPAddr6
import parser
from threading import Thread, Lock
import pox.lib.packet as pkt
import copy
# pcap's filter compiling function isn't threadsafe, so we use this
# lock when compiling filters.
_compile_lock = Lock()
class PCap (object):
use_select = False # Falls back to non-select
@staticmethod
def get_devices ():
def ip (addr):
if addr is None: return None
return IPAddr(addr, networkOrder=True)
def ip6 (addr):
if addr is None: return None
return IPAddr6.from_raw(addr)
def link (addr):
if addr is None: return None
if len(addr) != 6: return None
return EthAddr(addr)
devs = pcapc.findalldevs()
out = {}
for d in devs:
addrs = {}
n = {'desc':d[1],'addrs':addrs}
out[d[0]] = n
for a in d[2]:
if a[0] == 'AF_INET':
na = {}
addrs[a[0]] = na
na['addr'] = ip(a[1])
na['netmask'] = ip(a[2])
na['broadaddr'] = ip(a[3])
na['dstaddr'] = ip(a[4])
elif a[0] == 'AF_INET6':
na = {}
addrs[a[0]] = na
na['addr'] = ip6(a[1])
na['netmask'] = ip6(a[2])
na['broadaddr'] = ip6(a[3])
na['dstaddr'] = ip6(a[4])
elif a[0] == 'AF_LINK':
na = {}
addrs[a[0]] = na
na['addr'] = link(a[1])
na['netmask'] = link(a[2])
na['broadaddr'] = link(a[3])
na['dstaddr'] = link(a[4])
elif a[0] == 'AF_PACKET':
addrs[a[0]] = {'addr':link(a[1])}
elif a[0] == 'ethernet':
addrs[a[0]] = {'addr':link(a[1])}
return out
@staticmethod
def get_device_names ():
return [d[0] for d in pcapc.findalldevs()]
def __init__ (self, device = None, promiscuous = True, period = 10,
start = True, callback = None, filter = None,
use_bytearray = False, **kw):
"""
Initialize this instance
use_bytearray: specifies capturing to bytearray buffers instead of bytes
"""
if filter is not None:
self.deferred_filter = (filter,)
else:
self.deferred_filter = None
self.packets_received = 0
self.packets_dropped = 0
self._thread = None
self.pcap = None
self.promiscuous = promiscuous
self.device = None
self.use_bytearray = use_bytearray
self.period = period
self.netmask = IPAddr("0.0.0.0")
self._quitting = False
self.addresses = {}
if callback is None:
self.callback = self.__class__._handle_rx
else:
self.callback = callback
for k,v in kw.items():
assert not hasattr(self, k)
setattr(self, k, v)
if device is not None:
self.open(device)
if self.pcap is not None:
if start:
self.start()
def _handle_rx (self, data, sec, usec, length):
pass
def open (self, device, promiscuous = None, period = None,
incoming = True, outgoing = False):
assert self.device is None
self.addresses = self.get_devices()[device]['addrs']
if 'AF_INET' in self.addresses:
self.netmask = self.addresses['AF_INET'].get('netmask')
if self.netmask is None: self.netmask = IPAddr("0.0.0.0")
#print "NM:",self.netmask
#print self.addresses['AF_LINK']['addr']
self.device = device
if period is not None:
self.period = period
if promiscuous is not None:
self.promiscuous = promiscuous
self.pcap = pcapc.open_live(device, 65535,
1 if self.promiscuous else 0, self.period)
pcapc.setdirection(self.pcap, incoming, outgoing)
self.packets_received = 0
self.packets_dropped = 0
if self.deferred_filter is not None:
self.set_filter(*self.deferred_filter)
self.deferred_filter = None
def set_direction (self, incoming, outgoing):
pcapc.setdirection(self._pcap, incoming, outgoing)
def set_nonblocking (self, nonblocking = True):
pcapc.setnonblock(self._pcap, 1 if nonblocking else 0)
def set_blocking (self, blocking = True):
self.set_nonblocking(nonblocking = not blocking)
@property
def blocking (self):
return False if pcapc.getnonblock(self._pcap) else True
@blocking.setter
def blocking (self, value):
self.set_blocking(value)
def next_packet (self, allow_threads = True):
"""
Get next packet
Returns tuple with:
data, timestamp_seconds, timestamp_useconds, total length, and
the pcap_next_ex return value -- 1 is success
"""
return pcapc.next_ex(self._pcap, bool(self.use_bytearray), allow_threads)
def _select_thread_func (self):
try:
import select
fd = [self.fileno()]
except:
# Fall back
self._thread_func()
return
self.blocking = False
while not self._quitting:
rr,ww,xx = select.select(fd, [], fd, 2)
if xx:
# Apparently we're done here.
break
if rr:
r = self.next_packet(allow_threads = False)
if r[-1] == 0: continue
if r[-1] == 1:
self.callback(self, r[0], r[1], r[2], r[3])
else:
break
self._quitting = False
self._thread = None
def _thread_func (self):
while not self._quitting:
pcapc.dispatch(self.pcap,100,self.callback,self,bool(self.use_bytearray),True)
self.packets_received,self.packets_dropped = pcapc.stats(self.pcap)
self._quitting = False
self._thread = None
def _handle_GoingDownEvent (self, event):
self.close()
def start (self):
assert self._thread is None
from pox.core import core
core.addListeners(self, weak=True)
if self.use_select:
self._thread = Thread(target=self._select_thread_func)
else:
self._thread = Thread(target=self._thread_func)
#self._thread.daemon = True
self._thread.start()
def stop (self):
t = self._thread
if t is not None:
self._quitting = True
pcapc.breakloop(self.pcap)
t.join()
def close (self):
if self.pcap is None: return
self.stop()
pcapc.close(self.pcap)
self.pcap = None
def __del__ (self):
self.close()
@property
def _pcap (self):
if self.pcap is None:
raise RuntimeError("PCap object not open")
return self.pcap
def inject (self, data):
if isinstance(data, pkt.ethernet):
data = data.pack()
if not isinstance(data, (bytes,bytearray)):
data = bytes(data) # Give it a try...
return pcapc.inject(self.pcap, data)
def set_filter (self, filter, optimize = True):
if self.pcap is None:
self.deferred_filter = (filter, optimize)
return
if isinstance(filter, str):
filter = Filter(filter, optimize, self.netmask.toSignedN(),
pcap_obj=self)
elif isinstance(filter, Filter):
pass
else:
raise RuntimeError("Filter must be string or Filter object")
pcapc.setfilter(self.pcap, filter._pprogram)
def fileno (self):
if self.pcap is None:
raise RuntimeError("PCap object not open")
r = pcapc.get_selectable_fd(self.pcap)
if r == -1:
raise RuntimeError("Selectable FD not available")
return r
def __str__ (self):
return "PCap(device=%s)" % (self.device)
class Filter (object):
def __init__ (self, filter, optimize = True, netmask = None,
pcap_obj = None, link_type = 1, snaplen = 65535):
self._pprogram = None
if netmask is None:
netmask = 0
elif isinstance(netmask, IPAddr):
netmask = netmask.toSignedN()
delpc = False
if pcap_obj is None:
delpc = True
pcap_obj = pcapc.open_dead(link_type, snaplen)
if isinstance(pcap_obj, PCap):
pcap_obj = pcap_obj.pcap
with _compile_lock:
self._pprogram = pcapc.compile(pcap_obj, filter,
1 if optimize else 0, netmask)
if delpc:
pcapc.close(pcap_obj)
def __del__ (self):
if self._pprogram:
pcapc.freecode(self._pprogram)
try:
_link_type_names = {}
for k,v in copy.copy(pcapc.__dict__).iteritems():
if k.startswith("DLT_"):
_link_type_names[v] = k
except:
pass
def get_link_type_name (dlt):
return _link_type_names.get(dlt, "<Unknown " + str(dlt) + ">")
def test (interface = "en1"):
""" Test function """
global drop,total,bytes_got,bytes_real,bytes_diff
drop = 0
total = 0
bytes_got = 0
bytes_real = 0
bytes_diff = 0
def cb (obj, data, sec, usec, length):
global drop,total,bytes_got,bytes_real,bytes_diff
#print ">>>",data
t,d = pcapc.stats(obj.pcap)
bytes_got += len(data)
bytes_real += length
nbd = bytes_real - bytes_got
if nbd != bytes_diff:
bytes_diff = nbd
print "lost bytes:",nbd
if t > total:
total = t + 500
print t,"total"
if d > drop:
drop = d
print d, "dropped"
p = pkt.ethernet(data)
ip = p.find('ipv4')
if ip:
print ip.srcip,"\t",ip.dstip, p
print "\n".join(["%i. %s" % x for x in
enumerate(PCap.get_device_names())])
if interface.startswith("#"):
interface = int(interface[1:])
interface = PCap.get_device_names()[interface]
print "Interface:",interface
p = PCap(interface, callback = cb,
filter = "icmp")
#[icmptype] != icmp-echoreply")
#filter = "ip host 74.125.224.148")
p.set_direction(True, True)
def ping (eth='00:18:02:6e:ce:55', ip='192.168.0.1'):
e = pkt.ethernet()
e.src = p.addresses['ethernet']['addr'] or '02:00:00:11:22:33'
e.dst = EthAddr(eth)
e.type = e.IP_TYPE
ipp = pkt.ipv4()
ipp.protocol = ipp.ICMP_PROTOCOL
ipp.srcip = p.addresses['AF_INET']['addr']
ipp.dstip = IPAddr(ip)
icmp = pkt.icmp()
icmp.type = pkt.ICMP.TYPE_ECHO_REQUEST
icmp.payload = "PingPing" * 6
ipp.payload = icmp
e.payload = ipp
p.inject(e)
def broadcast ():
ping('ff:ff:ff:ff:ff:ff','255.255.255.255')
import code
code.interact(local=locals())
def no_select ():
"""
Sets default PCap behavior to not try to use select()
"""
PCap.use_select = False
def do_select ():
"""
Sets default PCap behavior to try to use select()
"""
PCap.use_select = True
def interfaces (verbose = False):
"""
Show interfaces
"""
if not verbose:
print "\n".join(["%i. %s" % x for x in
enumerate(PCap.get_device_names())])
else:
import pprint
print pprint.pprint(PCap.get_devices())
from pox.core import core
core.quit()
def launch (interface, no_incoming=False, no_outgoing=False):
"""
pxshark -- prints packets
"""
def cb (obj, data, sec, usec, length):
p = pkt.ethernet(data)
print p.dump()
if interface.startswith("#"):
interface = int(interface[1:])
interface = PCap.get_device_names()[interface]
p = PCap(interface, callback = cb, start=False)
p.set_direction(not no_incoming, not no_outgoing)
#p.use_select = False
p.start()
| mit | 4,501,091,839,821,451,300 | 25.756757 | 84 | 0.603704 | false |
kshedstrom/pyroms | pyroms/pyroms/remapping/sta2z.py | 1 | 2284 | # encoding: utf-8
import numpy as np
import _interp
import pdb
def sta2z(var, grd, grdz, Cpos='rho', srange=None, \
spval=1e37, mode='linear'):
"""
varz = roms2z(var, grd, grdz)
optional switch:
- Cpos specify vertical grid type
- srange specify grid sub-sample of stations
- spval=1e37 define spval value
- mode='linear' or 'spline' specify the type of interpolation
Interpolate the variable from stations grid grd to z vertical grid grdz
"""
var = var.copy()
assert len(var.shape) == 2, 'var must be 2D'
if mode=='linear':
imode=0
elif mode=='spline':
imode=1
else:
imode=0
raise Warning, '%s not supported, defaulting to linear' % mode
if Cpos is 'rho':
z = grd.vgrid.z_r[0,:]
depth = grdz.vgrid.z
elif Cpos is 'w':
z = grd.vgrid.z_w[0,:]
depth = grdz.vgrid.z
else:
raise Warning, '%s unknown position. Cpos must be rho or w.' % Cpos
var = var.T
Nm, Sm = var.shape
nlev = grdz.vgrid.N
# putting in a fake i dimension
var = np.dstack(var).T
z = np.dstack(z).T
depth = np.dstack(depth).T
mask = np.ones((Sm, 1))
# copy surface value to high in the sky
var = np.concatenate((var, var[-2:-1,:,:]), 0)
z = np.concatenate((z, 100*np.ones((1,z.shape[1], z.shape[2]))), 0)
# print 'nlev', nlev, 'var.shape', var.shape, srange
# print 'z.shape', z.shape
# print 'mask.shape', mask.shape
# print 'depth.shape', depth.shape
if srange is None:
srange = (0,Sm)
else:
assert var.shape[1] == srange[1]-srange[0], \
'var shape and srange must agree'
varz = np.zeros((nlev, srange[1]-srange[0], 1))
for k in range(nlev):
varz[k,:] = _interp.xhslice(var, \
z[:, srange[0]:srange[1], :], \
depth[k, srange[0]:srange[1], :], \
mask[srange[0]:srange[1], :], \
imode, spval)
# pdb.set_trace()
#mask
idx = np.where(abs((varz-spval)/spval)<=1e-5)
varz[idx] = spval
#varz = np.ma.masked_values(varz, spval, rtol=1e-5)
return varz
| bsd-3-clause | 9,219,616,154,942,931,000 | 27.197531 | 75 | 0.532837 | false |
caesar2164/edx-platform | lms/djangoapps/verify_student/views.py | 9 | 61095 | """
Views for the verification flow
"""
import datetime
import decimal
import json
import logging
import urllib
from pytz import UTC
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponse, HttpResponseBadRequest, Http404
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.views.generic.base import View
import analytics
from eventtracking import tracker
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from commerce.utils import EcommerceService
from course_modes.models import CourseMode
from courseware.url_helpers import get_redirect_url
from edx_rest_api_client.exceptions import SlumberBaseException
from edxmako.shortcuts import render_to_response, render_to_string
from openedx.core.djangoapps.embargo import api as embargo_api
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.djangoapps.user_api.accounts import NAME_MIN_LENGTH
from openedx.core.djangoapps.user_api.accounts.api import update_account_settings
from openedx.core.djangoapps.user_api.errors import UserNotFound, AccountValidationError
from openedx.core.djangoapps.credit.api import set_credit_requirement_status
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.lib.log_utils import audit_log
from student.models import CourseEnrollment
from shoppingcart.models import Order, CertificateItem
from shoppingcart.processors import (
get_signed_purchase_params, get_purchase_endpoint
)
from lms.djangoapps.verify_student.ssencrypt import has_valid_signature
from lms.djangoapps.verify_student.models import (
VerificationDeadline,
SoftwareSecurePhotoVerification,
VerificationCheckpoint,
VerificationStatus,
IcrvStatusEmailsConfiguration,
)
from lms.djangoapps.verify_student.image import decode_image_data, InvalidImageData
from util.json_request import JsonResponse
from util.date_utils import get_default_time_display
from util.db import outer_atomic
from xmodule.modulestore.django import modulestore
from django.contrib.staticfiles.storage import staticfiles_storage
log = logging.getLogger(__name__)
class PayAndVerifyView(View):
"""
View for the "verify and pay" flow.
This view is somewhat complicated, because the user
can enter it from a number of different places:
* From the "choose your track" page.
* After completing payment.
* From the dashboard in order to complete verification.
* From the dashboard in order to upgrade to a verified track.
The page will display different steps and requirements
depending on:
* Whether the user has submitted a photo verification recently.
* Whether the user has paid for the course.
* How the user reached the page (mostly affects messaging)
We are also super-paranoid about how users reach this page.
If they somehow aren't enrolled, or the course doesn't exist,
or they've unenrolled, or they've already paid/verified,
... then we try to redirect them to the page with the
most appropriate messaging (including the dashboard).
Note that this page does NOT handle re-verification
(photo verification that was denied or had an error);
that is handled by the "reverify" view.
"""
# Step definitions
#
# These represent the numbered steps a user sees in
# the verify / payment flow.
#
# Steps can either be:
# - displayed or hidden
# - complete or incomplete
#
# For example, when a user enters the verification/payment
# flow for the first time, the user will see steps
# for both payment and verification. As the user
# completes these steps (for example, submitting a photo)
# the steps will be marked "complete".
#
# If a user has already verified for another course,
# then the verification steps will be hidden,
# since the user has already completed them.
#
# If a user re-enters the flow from another application
# (for example, after completing payment through
# a third-party payment processor), then the user
# will resume the flow at an intermediate step.
#
INTRO_STEP = 'intro-step'
MAKE_PAYMENT_STEP = 'make-payment-step'
PAYMENT_CONFIRMATION_STEP = 'payment-confirmation-step'
FACE_PHOTO_STEP = 'face-photo-step'
ID_PHOTO_STEP = 'id-photo-step'
REVIEW_PHOTOS_STEP = 'review-photos-step'
ENROLLMENT_CONFIRMATION_STEP = 'enrollment-confirmation-step'
ALL_STEPS = [
INTRO_STEP,
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP,
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
PAYMENT_STEPS = [
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP
]
VERIFICATION_STEPS = [
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
# These steps can be skipped using the ?skip-first-step GET param
SKIP_STEPS = [
INTRO_STEP,
]
STEP_TITLES = {
INTRO_STEP: ugettext_lazy("Intro"),
MAKE_PAYMENT_STEP: ugettext_lazy("Make payment"),
PAYMENT_CONFIRMATION_STEP: ugettext_lazy("Payment confirmation"),
FACE_PHOTO_STEP: ugettext_lazy("Take photo"),
ID_PHOTO_STEP: ugettext_lazy("Take a photo of your ID"),
REVIEW_PHOTOS_STEP: ugettext_lazy("Review your info"),
ENROLLMENT_CONFIRMATION_STEP: ugettext_lazy("Enrollment confirmation"),
}
# Messages
#
# Depending on how the user entered reached the page,
# we will display different text messaging.
# For example, we show users who are upgrading
# slightly different copy than users who are verifying
# for the first time.
#
FIRST_TIME_VERIFY_MSG = 'first-time-verify'
VERIFY_NOW_MSG = 'verify-now'
VERIFY_LATER_MSG = 'verify-later'
UPGRADE_MSG = 'upgrade'
PAYMENT_CONFIRMATION_MSG = 'payment-confirmation'
# Requirements
#
# These explain to the user what he or she
# will need to successfully pay and/or verify.
#
# These are determined by the steps displayed
# to the user; for example, if the user does not
# need to complete the verification steps,
# then the photo ID and webcam requirements are hidden.
#
ACCOUNT_ACTIVATION_REQ = "account-activation-required"
PHOTO_ID_REQ = "photo-id-required"
WEBCAM_REQ = "webcam-required"
STEP_REQUIREMENTS = {
ID_PHOTO_STEP: [PHOTO_ID_REQ, WEBCAM_REQ],
FACE_PHOTO_STEP: [WEBCAM_REQ],
}
# Deadline types
VERIFICATION_DEADLINE = "verification"
UPGRADE_DEADLINE = "upgrade"
@method_decorator(login_required)
def get(
self, request, course_id,
always_show_payment=False,
current_step=None,
message=FIRST_TIME_VERIFY_MSG
):
"""
Render the payment and verification flow.
Arguments:
request (HttpRequest): The request object.
course_id (unicode): The ID of the course the user is trying
to enroll in.
Keyword Arguments:
always_show_payment (bool): If True, show the payment steps
even if the user has already paid. This is useful
for users returning to the flow after paying.
current_step (string): The current step in the flow.
message (string): The messaging to display.
Returns:
HttpResponse
Raises:
Http404: The course does not exist or does not
have a verified mode.
"""
# Parse the course key
# The URL regex should guarantee that the key format is valid.
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
# Verify that the course exists
if course is None:
log.warn(u"Could not find course with ID %s.", course_id)
raise Http404
# Check whether the user has access to this course
# based on country access rules.
redirect_url = embargo_api.redirect_if_blocked(
course_key,
user=request.user,
ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return redirect(redirect_url)
# If the verification deadline has passed
# then show the user a message that he/she can't verify.
#
# We're making the assumptions (enforced in Django admin) that:
#
# 1) Only verified modes have verification deadlines.
#
# 2) If set, verification deadlines are always AFTER upgrade deadlines, because why would you
# let someone upgrade into a verified track if they can't complete verification?
#
verification_deadline = VerificationDeadline.deadline_for_course(course.id)
response = self._response_if_deadline_passed(course, self.VERIFICATION_DEADLINE, verification_deadline)
if response is not None:
log.info(u"Verification deadline for '%s' has passed.", course.id)
return response
# Retrieve the relevant course mode for the payment/verification flow.
#
# WARNING: this is technical debt! A much better way to do this would be to
# separate out the payment flow and use the product SKU to figure out what
# the user is trying to purchase.
#
# Nonetheless, for the time being we continue to make the really ugly assumption
# that at some point there was a paid course mode we can query for the price.
relevant_course_mode = self._get_paid_mode(course_key)
# If we can find a relevant course mode, then log that we're entering the flow
# Otherwise, this course does not support payment/verification, so respond with a 404.
if relevant_course_mode is not None:
if CourseMode.is_verified_mode(relevant_course_mode):
log.info(
u"Entering payment and verification flow for user '%s', course '%s', with current step '%s'.",
request.user.id, course_id, current_step
)
else:
log.info(
u"Entering payment flow for user '%s', course '%s', with current step '%s'",
request.user.id, course_id, current_step
)
else:
# Otherwise, there has never been a verified/paid mode,
# so return a page not found response.
log.warn(
u"No paid/verified course mode found for course '%s' for verification/payment flow request",
course_id
)
raise Http404
# If the user is trying to *pay* and the upgrade deadline has passed,
# then they shouldn't be able to enter the flow.
#
# NOTE: This should match the availability dates used by the E-Commerce service
# to determine whether a user can purchase a product. The idea is that if the service
# won't fulfill the order, we shouldn't even let the user get into the payment flow.
#
user_is_trying_to_pay = message in [self.FIRST_TIME_VERIFY_MSG, self.UPGRADE_MSG]
if user_is_trying_to_pay:
upgrade_deadline = relevant_course_mode.expiration_datetime
response = self._response_if_deadline_passed(course, self.UPGRADE_DEADLINE, upgrade_deadline)
if response is not None:
log.info(u"Upgrade deadline for '%s' has passed.", course.id)
return response
# Check whether the user has verified, paid, and enrolled.
# A user is considered "paid" if he or she has an enrollment
# with a paid course mode (such as "verified").
# For this reason, every paid user is enrolled, but not
# every enrolled user is paid.
# If the course mode is not verified(i.e only paid) then already_verified is always True
already_verified = (
self._check_already_verified(request.user)
if CourseMode.is_verified_mode(relevant_course_mode)
else True
)
already_paid, is_enrolled = self._check_enrollment(request.user, course_key)
# Redirect the user to a more appropriate page if the
# messaging won't make sense based on the user's
# enrollment / payment / verification status.
sku_to_use = relevant_course_mode.sku
purchase_workflow = request.GET.get('purchase_workflow', 'single')
if purchase_workflow == 'bulk' and relevant_course_mode.bulk_sku:
sku_to_use = relevant_course_mode.bulk_sku
redirect_response = self._redirect_if_necessary(
message,
already_verified,
already_paid,
is_enrolled,
course_key,
user_is_trying_to_pay,
request.user,
sku_to_use
)
if redirect_response is not None:
return redirect_response
display_steps = self._display_steps(
always_show_payment,
already_verified,
already_paid,
relevant_course_mode
)
requirements = self._requirements(display_steps, request.user.is_active)
if current_step is None:
current_step = display_steps[0]['name']
# Allow the caller to skip the first page
# This is useful if we want the user to be able to
# use the "back" button to return to the previous step.
# This parameter should only work for known skip-able steps
if request.GET.get('skip-first-step') and current_step in self.SKIP_STEPS:
display_step_names = [step['name'] for step in display_steps]
current_step_idx = display_step_names.index(current_step)
if (current_step_idx + 1) < len(display_steps):
current_step = display_steps[current_step_idx + 1]['name']
courseware_url = ""
if not course.start or course.start < datetime.datetime.today().replace(tzinfo=UTC):
courseware_url = reverse(
'course_root',
kwargs={'course_id': unicode(course_key)}
)
full_name = (
request.user.profile.name
if request.user.profile.name
else ""
)
# If the user set a contribution amount on another page,
# use that amount to pre-fill the price selection form.
contribution_amount = request.session.get(
'donation_for_course', {}
).get(unicode(course_key), '')
# Remember whether the user is upgrading
# so we can fire an analytics event upon payment.
request.session['attempting_upgrade'] = (message == self.UPGRADE_MSG)
# Determine the photo verification status
verification_good_until = self._verification_valid_until(request.user)
# get available payment processors
if relevant_course_mode.sku:
# transaction will be conducted via ecommerce service
processors = ecommerce_api_client(request.user).payment.processors.get()
else:
# transaction will be conducted using legacy shopping cart
processors = [settings.CC_PROCESSOR_NAME]
# Render the top-level page
context = {
'contribution_amount': contribution_amount,
'course': course,
'course_key': unicode(course_key),
'checkpoint_location': request.GET.get('checkpoint'),
'course_mode': relevant_course_mode,
'courseware_url': courseware_url,
'current_step': current_step,
'disable_courseware_js': True,
'display_steps': display_steps,
'is_active': json.dumps(request.user.is_active),
'user_email': request.user.email,
'message_key': message,
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'processors': processors,
'requirements': requirements,
'user_full_name': full_name,
'verification_deadline': verification_deadline or "",
'already_verified': already_verified,
'verification_good_until': verification_good_until,
'capture_sound': staticfiles_storage.url("audio/camera_capture.wav"),
'nav_hidden': True,
'is_ab_testing': 'begin-flow' in request.path,
}
return render_to_response("verify_student/pay_and_verify.html", context)
def _redirect_if_necessary(
self, message, already_verified, already_paid, is_enrolled, course_key, # pylint: disable=bad-continuation
user_is_trying_to_pay, user, sku # pylint: disable=bad-continuation
):
"""Redirect the user to a more appropriate page if necessary.
In some cases, a user may visit this page with
verification / enrollment / payment state that
we don't anticipate. For example, a user may unenroll
from the course after paying for it, then visit the
"verify now" page to complete verification.
When this happens, we try to redirect the user to
the most appropriate page.
Arguments:
message (string): The messaging of the page. Should be a key
in `MESSAGES`.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
is_enrolled (bool): Whether the user has an active enrollment
in the course.
course_key (CourseKey): The key for the course.
Returns:
HttpResponse or None
"""
url = None
course_kwargs = {'course_id': unicode(course_key)}
if already_verified and already_paid:
# If they've already paid and verified, there's nothing else to do,
# so redirect them to the dashboard.
if message != self.PAYMENT_CONFIRMATION_MSG:
url = reverse('dashboard')
elif message in [self.VERIFY_NOW_MSG, self.VERIFY_LATER_MSG, self.PAYMENT_CONFIRMATION_MSG]:
if is_enrolled:
# If the user is already enrolled but hasn't yet paid,
# then the "upgrade" messaging is more appropriate.
if not already_paid:
url = reverse('verify_student_upgrade_and_verify', kwargs=course_kwargs)
else:
# If the user is NOT enrolled, then send him/her
# to the first time verification page.
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
elif message == self.UPGRADE_MSG:
if is_enrolled:
if already_paid:
# If the student has paid, but not verified, redirect to the verification flow.
url = reverse('verify_student_verify_now', kwargs=course_kwargs)
else:
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
if user_is_trying_to_pay and user.is_active and not already_paid:
# If the user is trying to pay, has activated their account, and the ecommerce service
# is enabled redirect him to the ecommerce checkout page.
ecommerce_service = EcommerceService()
if ecommerce_service.is_enabled(user):
url = ecommerce_service.checkout_page_url(sku)
# Redirect if necessary, otherwise implicitly return None
if url is not None:
return redirect(url)
def _get_paid_mode(self, course_key):
"""
Retrieve the paid course mode for a course.
The returned course mode may or may not be expired.
Unexpired modes are preferred to expired modes.
Arguments:
course_key (CourseKey): The location of the course.
Returns:
CourseMode tuple
"""
# Retrieve all the modes at once to reduce the number of database queries
all_modes, unexpired_modes = CourseMode.all_and_unexpired_modes_for_courses([course_key])
# Retrieve the first mode that matches the following criteria:
# * Unexpired
# * Price > 0
# * Not credit
for mode in unexpired_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, find the first non credit expired paid mode
for mode in all_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, return None and so the view knows to respond with a 404.
return None
def _display_steps(self, always_show_payment, already_verified, already_paid, course_mode):
"""Determine which steps to display to the user.
Includes all steps by default, but removes steps
if the user has already completed them.
Arguments:
always_show_payment (bool): If True, display the payment steps
even if the user has already paid.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
Returns:
list
"""
display_steps = self.ALL_STEPS
remove_steps = set()
if already_verified or not CourseMode.is_verified_mode(course_mode):
remove_steps |= set(self.VERIFICATION_STEPS)
if already_paid and not always_show_payment:
remove_steps |= set(self.PAYMENT_STEPS)
else:
# The "make payment" step doubles as an intro step,
# so if we're showing the payment step, hide the intro step.
remove_steps |= set([self.INTRO_STEP])
return [
{
'name': step,
'title': unicode(self.STEP_TITLES[step]),
}
for step in display_steps
if step not in remove_steps
]
def _requirements(self, display_steps, is_active):
"""Determine which requirements to show the user.
For example, if the user needs to submit a photo
verification, tell the user that she will need
a photo ID and a webcam.
Arguments:
display_steps (list): The steps to display to the user.
is_active (bool): If False, adds a requirement to activate the user account.
Returns:
dict: Keys are requirement names, values are booleans
indicating whether to show the requirement.
"""
all_requirements = {
self.ACCOUNT_ACTIVATION_REQ: not is_active,
self.PHOTO_ID_REQ: False,
self.WEBCAM_REQ: False,
}
display_steps = set(step['name'] for step in display_steps)
for step, step_requirements in self.STEP_REQUIREMENTS.iteritems():
if step in display_steps:
for requirement in step_requirements:
all_requirements[requirement] = True
return all_requirements
def _verification_valid_until(self, user, date_format="%m/%d/%Y"):
"""
Check whether the user has a valid or pending verification.
Arguments:
user:
date_format: optional parameter for formatting datetime
object to string in response
Returns:
datetime object in string format
"""
photo_verifications = SoftwareSecurePhotoVerification.verification_valid_or_pending(user)
# return 'expiration_datetime' of latest photo verification if found,
# otherwise implicitly return ''
if photo_verifications:
return photo_verifications[0].expiration_datetime.strftime(date_format)
return ''
def _check_already_verified(self, user):
"""Check whether the user has a valid or pending verification.
Note that this includes cases in which the user's verification
has not been accepted (either because it hasn't been processed,
or there was an error).
This should return True if the user has done their part:
submitted photos within the expiration period.
"""
return SoftwareSecurePhotoVerification.user_has_valid_or_pending(user)
def _check_enrollment(self, user, course_key):
"""Check whether the user has an active enrollment and has paid.
If a user is enrolled in a paid course mode, we assume
that the user has paid.
Arguments:
user (User): The user to check.
course_key (CourseKey): The key of the course to check.
Returns:
Tuple `(has_paid, is_active)` indicating whether the user
has paid and whether the user has an active account.
"""
enrollment_mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_key)
has_paid = False
if enrollment_mode is not None and is_active:
all_modes = CourseMode.modes_for_course_dict(course_key, include_expired=True)
course_mode = all_modes.get(enrollment_mode)
has_paid = (course_mode and course_mode.min_price > 0)
return (has_paid, bool(is_active))
def _response_if_deadline_passed(self, course, deadline_name, deadline_datetime):
"""
Respond with some error messaging if the deadline has passed.
Arguments:
course (Course): The course the user is trying to enroll in.
deadline_name (str): One of the deadline constants.
deadline_datetime (datetime): The deadline.
Returns: HttpResponse or None
"""
if deadline_name not in [self.VERIFICATION_DEADLINE, self.UPGRADE_DEADLINE]:
log.error("Invalid deadline name %s. Skipping check for whether the deadline passed.", deadline_name)
return None
deadline_passed = (
deadline_datetime is not None and
deadline_datetime < datetime.datetime.now(UTC)
)
if deadline_passed:
context = {
'course': course,
'deadline_name': deadline_name,
'deadline': deadline_datetime
}
return render_to_response("verify_student/missed_deadline.html", context)
def checkout_with_ecommerce_service(user, course_key, course_mode, processor):
""" Create a new basket and trigger immediate checkout, using the E-Commerce API. """
course_id = unicode(course_key)
try:
api = ecommerce_api_client(user)
# Make an API call to create the order and retrieve the results
result = api.baskets.post({
'products': [{'sku': course_mode.sku}],
'checkout': True,
'payment_processor_name': processor
})
# Pass the payment parameters directly from the API response.
return result.get('payment_data')
except SlumberBaseException:
params = {'username': user.username, 'mode': course_mode.slug, 'course_id': course_id}
log.exception('Failed to create order for %(username)s %(mode)s mode of %(course_id)s', params)
raise
finally:
audit_log(
'checkout_requested',
course_id=course_id,
mode=course_mode.slug,
processor_name=processor,
user_id=user.id
)
def checkout_with_shoppingcart(request, user, course_key, course_mode, amount):
""" Create an order and trigger checkout using shoppingcart."""
cart = Order.get_cart_for_user(user)
cart.clear()
enrollment_mode = course_mode.slug
CertificateItem.add_to_order(cart, course_key, amount, enrollment_mode)
# Change the order's status so that we don't accidentally modify it later.
# We need to do this to ensure that the parameters we send to the payment system
# match what we store in the database.
# (Ordinarily we would do this client-side when the user submits the form, but since
# the JavaScript on this page does that immediately, we make the change here instead.
# This avoids a second AJAX call and some additional complication of the JavaScript.)
# If a user later re-enters the verification / payment flow, she will create a new order.
cart.start_purchase()
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
payment_data = {
'payment_processor_name': settings.CC_PROCESSOR_NAME,
'payment_page_url': get_purchase_endpoint(),
'payment_form_data': get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=[unicode(course_key), course_mode.slug]
),
}
return payment_data
@require_POST
@login_required
def create_order(request):
"""
This endpoint is named 'create_order' for backward compatibility, but its
actual use is to add a single product to the user's cart and request
immediate checkout.
"""
course_id = request.POST['course_id']
course_id = CourseKey.from_string(course_id)
donation_for_course = request.session.get('donation_for_course', {})
contribution = request.POST.get("contribution", donation_for_course.get(unicode(course_id), 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
current_mode = None
sku = request.POST.get('sku', None)
if sku:
try:
current_mode = CourseMode.objects.get(sku=sku)
except CourseMode.DoesNotExist:
log.exception(u'Failed to find CourseMode with SKU [%s].', sku)
if not current_mode:
# Check if there are more than 1 paid(mode with min_price>0 e.g verified/professional/no-id-professional) modes
# for course exist then choose the first one
paid_modes = CourseMode.paid_modes_for_course(course_id)
if paid_modes:
if len(paid_modes) > 1:
log.warn(u"Multiple paid course modes found for course '%s' for create order request", course_id)
current_mode = paid_modes[0]
# Make sure this course has a paid mode
if not current_mode:
log.warn(u"Create order requested for course '%s' without a paid mode.", course_id)
return HttpResponseBadRequest(_("This course doesn't support paid certificates"))
if CourseMode.is_professional_mode(current_mode):
amount = current_mode.min_price
if amount < current_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
if current_mode.sku:
# if request.POST doesn't contain 'processor' then the service's default payment processor will be used.
payment_data = checkout_with_ecommerce_service(
request.user,
course_id,
current_mode,
request.POST.get('processor')
)
else:
payment_data = checkout_with_shoppingcart(request, request.user, course_id, current_mode, amount)
if 'processor' not in request.POST:
# (XCOM-214) To be removed after release.
# the absence of this key in the POST payload indicates that the request was initiated from
# a stale js client, which expects a response containing only the 'payment_form_data' part of
# the payment data result.
payment_data = payment_data['payment_form_data']
return HttpResponse(json.dumps(payment_data), content_type="application/json")
class SubmitPhotosView(View):
"""
End-point for submitting photos for verification.
"""
@method_decorator(transaction.non_atomic_requests)
def dispatch(self, *args, **kwargs): # pylint: disable=missing-docstring
return super(SubmitPhotosView, self).dispatch(*args, **kwargs)
@method_decorator(login_required)
@method_decorator(outer_atomic(read_committed=True))
def post(self, request):
"""
Submit photos for verification.
This end-point is used for the following cases:
* Initial verification through the pay-and-verify flow.
* Initial verification initiated from a checkpoint within a course.
* Re-verification initiated from a checkpoint within a course.
POST Parameters:
face_image (str): base64-encoded image data of the user's face.
photo_id_image (str): base64-encoded image data of the user's photo ID.
full_name (str): The user's full name, if the user is requesting a name change as well.
course_key (str): Identifier for the course, if initiated from a checkpoint.
checkpoint (str): Location of the checkpoint in the course.
"""
# If the user already has an initial verification attempt, we can re-use the photo ID
# the user submitted with the initial attempt. This is useful for the in-course reverification
# case in which users submit only the face photo and have it matched against their ID photos
# submitted with the initial verification.
initial_verification = SoftwareSecurePhotoVerification.get_initial_verification(request.user)
# Validate the POST parameters
params, response = self._validate_parameters(request, bool(initial_verification))
if response is not None:
return response
# If necessary, update the user's full name
if "full_name" in params:
response = self._update_full_name(request.user, params["full_name"])
if response is not None:
return response
# Retrieve the image data
# Validation ensures that we'll have a face image, but we may not have
# a photo ID image if this is a reverification.
face_image, photo_id_image, response = self._decode_image_data(
params["face_image"], params.get("photo_id_image")
)
# If we have a photo_id we do not want use the initial verification image.
if photo_id_image is not None:
initial_verification = None
if response is not None:
return response
# Submit the attempt
attempt = self._submit_attempt(request.user, face_image, photo_id_image, initial_verification)
# If this attempt was submitted at a checkpoint, then associate
# the attempt with the checkpoint.
submitted_at_checkpoint = "checkpoint" in params and "course_key" in params
if submitted_at_checkpoint:
checkpoint = self._associate_attempt_with_checkpoint(
request.user, attempt,
params["course_key"],
params["checkpoint"]
)
# If the submission came from an in-course checkpoint
if initial_verification is not None and submitted_at_checkpoint:
self._fire_event(request.user, "edx.bi.reverify.submitted", {
"category": "verification",
"label": unicode(params["course_key"]),
"checkpoint": checkpoint.checkpoint_name,
})
# Send a URL that the client can redirect to in order
# to return to the checkpoint in the courseware.
redirect_url = get_redirect_url(params["course_key"], params["checkpoint"])
return JsonResponse({"url": redirect_url})
# Otherwise, the submission came from an initial verification flow.
else:
self._fire_event(request.user, "edx.bi.verify.submitted", {"category": "verification"})
self._send_confirmation_email(request.user)
redirect_url = None
return JsonResponse({})
def _validate_parameters(self, request, has_initial_verification):
"""
Check that the POST parameters are valid.
Arguments:
request (HttpRequest): The request object.
has_initial_verification (bool): Whether the user has an initial verification attempt.
Returns:
HttpResponse or None
"""
# Pull out the parameters we care about.
params = {
param_name: request.POST[param_name]
for param_name in [
"face_image",
"photo_id_image",
"course_key",
"checkpoint",
"full_name"
]
if param_name in request.POST
}
# If the user already has an initial verification attempt, then we don't
# require the user to submit a photo ID image, since we can re-use the photo ID
# image from the initial attempt.
# If we don't have an initial verification OR a photo ID image, something has gone
# terribly wrong in the JavaScript. Log this as an error so we can track it down.
if "photo_id_image" not in params and not has_initial_verification:
log.error(
(
"User %s does not have an initial verification attempt "
"and no photo ID image data was provided. "
"This most likely means that the JavaScript client is not "
"correctly constructing the request to submit photos."
), request.user.id
)
return None, HttpResponseBadRequest(
_("Photo ID image is required if the user does not have an initial verification attempt.")
)
# The face image is always required.
if "face_image" not in params:
msg = _("Missing required parameter face_image")
return None, HttpResponseBadRequest(msg)
# If provided, parse the course key and checkpoint location
if "course_key" in params:
try:
params["course_key"] = CourseKey.from_string(params["course_key"])
except InvalidKeyError:
return None, HttpResponseBadRequest(_("Invalid course key"))
if "checkpoint" in params:
try:
params["checkpoint"] = UsageKey.from_string(params["checkpoint"]).replace(
course_key=params["course_key"]
)
except InvalidKeyError:
return None, HttpResponseBadRequest(_("Invalid checkpoint location"))
return params, None
def _update_full_name(self, user, full_name):
"""
Update the user's full name.
Arguments:
user (User): The user to update.
full_name (unicode): The user's updated full name.
Returns:
HttpResponse or None
"""
try:
update_account_settings(user, {"name": full_name})
except UserNotFound:
return HttpResponseBadRequest(_("No profile found for user"))
except AccountValidationError:
msg = _(
"Name must be at least {min_length} characters long."
).format(min_length=NAME_MIN_LENGTH)
return HttpResponseBadRequest(msg)
def _decode_image_data(self, face_data, photo_id_data=None):
"""
Decode image data sent with the request.
Arguments:
face_data (str): base64-encoded face image data.
Keyword Arguments:
photo_id_data (str): base64-encoded photo ID image data.
Returns:
tuple of (str, str, HttpResponse)
"""
try:
# Decode face image data (used for both an initial and re-verification)
face_image = decode_image_data(face_data)
# Decode the photo ID image data if it's provided
photo_id_image = (
decode_image_data(photo_id_data)
if photo_id_data is not None else None
)
return face_image, photo_id_image, None
except InvalidImageData:
msg = _("Image data is not valid.")
return None, None, HttpResponseBadRequest(msg)
def _submit_attempt(self, user, face_image, photo_id_image=None, initial_verification=None):
"""
Submit a verification attempt.
Arguments:
user (User): The user making the attempt.
face_image (str): Decoded face image data.
Keyword Arguments:
photo_id_image (str or None): Decoded photo ID image data.
initial_verification (SoftwareSecurePhotoVerification): The initial verification attempt.
"""
attempt = SoftwareSecurePhotoVerification(user=user)
# We will always have face image data, so upload the face image
attempt.upload_face_image(face_image)
# If an ID photo wasn't submitted, re-use the ID photo from the initial attempt.
# Earlier validation rules ensure that at least one of these is available.
if photo_id_image is not None:
attempt.upload_photo_id_image(photo_id_image)
elif initial_verification is None:
# Earlier validation should ensure that we never get here.
log.error(
"Neither a photo ID image or initial verification attempt provided. "
"Parameter validation in the view should prevent this from happening!"
)
# Submit the attempt
attempt.mark_ready()
attempt.submit(copy_id_photo_from=initial_verification)
return attempt
def _associate_attempt_with_checkpoint(self, user, attempt, course_key, usage_id):
"""
Associate the verification attempt with a checkpoint within a course.
Arguments:
user (User): The user making the attempt.
attempt (SoftwareSecurePhotoVerification): The verification attempt.
course_key (CourseKey): The identifier for the course.
usage_key (UsageKey): The location of the checkpoint within the course.
Returns:
VerificationCheckpoint
"""
checkpoint = VerificationCheckpoint.get_or_create_verification_checkpoint(course_key, usage_id)
checkpoint.add_verification_attempt(attempt)
VerificationStatus.add_verification_status(checkpoint, user, "submitted")
return checkpoint
def _send_confirmation_email(self, user):
"""
Send an email confirming that the user submitted photos
for initial verification.
"""
context = {
'full_name': user.profile.name,
'platform_name': configuration_helpers.get_value("PLATFORM_NAME", settings.PLATFORM_NAME)
}
subject = _("Verification photos received")
message = render_to_string('emails/photo_submission_confirmation.txt', context)
from_address = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
to_address = user.email
try:
send_mail(subject, message, from_address, [to_address], fail_silently=False)
except: # pylint: disable=bare-except
# We catch all exceptions and log them.
# It would be much, much worse to roll back the transaction due to an uncaught
# exception than to skip sending the notification email.
log.exception("Could not send notification email for initial verification for user %s", user.id)
def _fire_event(self, user, event_name, parameters):
"""
Fire an analytics event.
Arguments:
user (User): The user who submitted photos.
event_name (str): Name of the analytics event.
parameters (dict): Event parameters.
Returns: None
"""
if settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
context = {
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
analytics.track(user.id, event_name, parameters, context=context)
def _compose_message_reverification_email(
course_key, user_id, related_assessment_location, status, request
): # pylint: disable=invalid-name
"""
Compose subject and message for photo reverification email.
Args:
course_key(CourseKey): CourseKey object
user_id(str): User Id
related_assessment_location(str): Location of reverification XBlock
photo_verification(QuerySet): Queryset of SoftwareSecure objects
status(str): Approval status
is_secure(Bool): Is running on secure protocol or not
Returns:
None if any error occurred else Tuple of subject and message strings
"""
try:
usage_key = UsageKey.from_string(related_assessment_location)
reverification_block = modulestore().get_item(usage_key)
course = modulestore().get_course(course_key)
redirect_url = get_redirect_url(course_key, usage_key.replace(course_key=course_key))
subject = "Re-verification Status"
context = {
"status": status,
"course_name": course.display_name_with_default_escaped,
"assessment": reverification_block.related_assessment
}
# Allowed attempts is 1 if not set on verification block
allowed_attempts = reverification_block.attempts + 1
used_attempts = VerificationStatus.get_user_attempts(user_id, course_key, related_assessment_location)
left_attempts = allowed_attempts - used_attempts
is_attempt_allowed = left_attempts > 0
verification_open = True
if reverification_block.due:
verification_open = timezone.now() <= reverification_block.due
context["left_attempts"] = left_attempts
context["is_attempt_allowed"] = is_attempt_allowed
context["verification_open"] = verification_open
context["due_date"] = get_default_time_display(reverification_block.due)
context['platform_name'] = configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
context["used_attempts"] = used_attempts
context["allowed_attempts"] = allowed_attempts
context["support_link"] = configuration_helpers.get_value('email_from_address', settings.CONTACT_EMAIL)
re_verification_link = reverse(
'verify_student_incourse_reverify',
args=(
unicode(course_key),
related_assessment_location
)
)
context["course_link"] = request.build_absolute_uri(redirect_url)
context["reverify_link"] = request.build_absolute_uri(re_verification_link)
message = render_to_string('emails/reverification_processed.txt', context)
log.info(
"Sending email to User_Id=%s. Attempts left for this user are %s. "
"Allowed attempts %s. "
"Due Date %s",
str(user_id), left_attempts, allowed_attempts, str(reverification_block.due)
)
return subject, message
# Catch all exception to avoid raising back to view
except: # pylint: disable=bare-except
log.exception("The email for re-verification sending failed for user_id %s", user_id)
def _send_email(user_id, subject, message):
""" Send email to given user
Args:
user_id(str): User Id
subject(str): Subject lines of emails
message(str): Email message body
Returns:
None
"""
from_address = configuration_helpers.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
user = User.objects.get(id=user_id)
user.email_user(subject, message, from_address)
def _set_user_requirement_status(attempt, namespace, status, reason=None):
"""Sets the status of a credit requirement for the user,
based on a verification checkpoint.
"""
checkpoint = None
try:
checkpoint = VerificationCheckpoint.objects.get(photo_verification=attempt)
except VerificationCheckpoint.DoesNotExist:
log.error("Unable to find checkpoint for user with id %d", attempt.user.id)
if checkpoint is not None:
try:
set_credit_requirement_status(
attempt.user,
checkpoint.course_id,
namespace,
checkpoint.checkpoint_location,
status=status,
reason=reason,
)
except Exception: # pylint: disable=broad-except
# Catch exception if unable to add credit requirement
# status for user
log.error("Unable to add Credit requirement status for user with id %d", attempt.user.id)
@require_POST
@csrf_exempt # SS does its own message signing, and their API won't have a cookie value
def results_callback(request):
"""
Software Secure will call this callback to tell us whether a user is
verified to be who they said they are.
"""
body = request.body
try:
body_dict = json.loads(body)
except ValueError:
log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body))
return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body))
if not isinstance(body_dict, dict):
log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body))
return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body))
headers = {
"Authorization": request.META.get("HTTP_AUTHORIZATION", ""),
"Date": request.META.get("HTTP_DATE", "")
}
has_valid_signature(
"POST",
headers,
body_dict,
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"],
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
)
_response, access_key_and_sig = headers["Authorization"].split(" ")
access_key = access_key_and_sig.split(":")[0]
# This is what we should be doing...
#if not sig_valid:
# return HttpResponseBadRequest("Signature is invalid")
# This is what we're doing until we can figure out why we disagree on sigs
if access_key != settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]:
return HttpResponseBadRequest("Access key invalid")
receipt_id = body_dict.get("EdX-ID")
result = body_dict.get("Result")
reason = body_dict.get("Reason", "")
error_code = body_dict.get("MessageType", "")
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
log.error("Software Secure posted back for receipt_id %s, but not found", receipt_id)
return HttpResponseBadRequest("edX ID {} not found".format(receipt_id))
if result == "PASS":
log.debug("Approving verification for %s", receipt_id)
attempt.approve()
status = "approved"
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif result == "FAIL":
log.debug("Denying verification for %s", receipt_id)
attempt.deny(json.dumps(reason), error_code=error_code)
status = "denied"
_set_user_requirement_status(
attempt, 'reverification', 'failed', json.dumps(reason)
)
elif result == "SYSTEM FAIL":
log.debug("System failure for %s -- resetting to must_retry", receipt_id)
attempt.system_error(json.dumps(reason), error_code=error_code)
status = "error"
log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason)
else:
log.error("Software Secure returned unknown result %s", result)
return HttpResponseBadRequest(
"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result)
)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(checkpoints=checkpoints, user=attempt.user, status=status)
# Trigger ICRV email only if ICRV status emails config is enabled
icrv_status_emails = IcrvStatusEmailsConfiguration.current()
if icrv_status_emails.enabled and checkpoints:
user_id = attempt.user.id
course_key = checkpoints[0].course_id
related_assessment_location = checkpoints[0].checkpoint_location
subject, message = _compose_message_reverification_email(
course_key, user_id, related_assessment_location, status, request
)
_send_email(user_id, subject, message)
return HttpResponse("OK!")
class ReverifyView(View):
"""
Reverification occurs when a user's initial verification is denied
or expires. When this happens, users can re-submit photos through
the re-verification flow.
Unlike in-course reverification, this flow requires users to submit
*both* face and ID photos. In contrast, during in-course reverification,
students submit only face photos, which are matched against the ID photo
the user submitted during initial verification.
"""
@method_decorator(login_required)
def get(self, request):
"""
Render the reverification flow.
Most of the work is done client-side by composing the same
Backbone views used in the initial verification flow.
"""
status, _ = SoftwareSecurePhotoVerification.user_status(request.user)
expiration_datetime = SoftwareSecurePhotoVerification.get_expiration_datetime(request.user)
can_reverify = False
if expiration_datetime:
if SoftwareSecurePhotoVerification.is_verification_expiring_soon(expiration_datetime):
# The user has an active verification, but the verification
# is set to expire within "EXPIRING_SOON_WINDOW" days (default is 4 weeks).
# In this case user can resubmit photos for reverification.
can_reverify = True
# If the user has no initial verification or if the verification
# process is still ongoing 'pending' or expired then allow the user to
# submit the photo verification.
# A photo verification is marked as 'pending' if its status is either
# 'submitted' or 'must_retry'.
if status in ["none", "must_reverify", "expired", "pending"] or can_reverify:
context = {
"user_full_name": request.user.profile.name,
"platform_name": configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
"capture_sound": staticfiles_storage.url("audio/camera_capture.wav"),
}
return render_to_response("verify_student/reverify.html", context)
else:
context = {
"status": status
}
return render_to_response("verify_student/reverify_not_allowed.html", context)
class InCourseReverifyView(View):
"""
The in-course reverification view.
In-course reverification occurs while a student is taking a course.
At points in the course, students are prompted to submit face photos,
which are matched against the ID photos the user submitted during their
initial verification.
Students are prompted to enter this flow from an "In Course Reverification"
XBlock (courseware component) that course authors add to the course.
See https://github.com/edx/edx-reverification-block for more details.
"""
@method_decorator(login_required)
def get(self, request, course_id, usage_id):
"""Display the view for face photo submission.
Args:
request(HttpRequest): HttpRequest object
course_id(str): A string of course id
usage_id(str): Location of Reverification XBlock in courseware
Returns:
HttpResponse
"""
user = request.user
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
if course is None:
log.error(u"Could not find course '%s' for in-course reverification.", course_key)
raise Http404
try:
checkpoint = VerificationCheckpoint.objects.get(course_id=course_key, checkpoint_location=usage_id)
except VerificationCheckpoint.DoesNotExist:
log.error(
u"No verification checkpoint exists for the "
u"course '%s' and checkpoint location '%s'.",
course_key, usage_id
)
raise Http404
initial_verification = SoftwareSecurePhotoVerification.get_initial_verification(user)
if not initial_verification:
return self._redirect_to_initial_verification(user, course_key, usage_id)
# emit the reverification event
self._track_reverification_events('edx.bi.reverify.started', user.id, course_id, checkpoint.checkpoint_name)
context = {
'course_key': unicode(course_key),
'course_name': course.display_name_with_default_escaped,
'checkpoint_name': checkpoint.checkpoint_name,
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'usage_id': usage_id,
'capture_sound': staticfiles_storage.url("audio/camera_capture.wav"),
}
return render_to_response("verify_student/incourse_reverify.html", context)
def _track_reverification_events(self, event_name, user_id, course_id, checkpoint):
"""Track re-verification events for a user against a reverification
checkpoint of a course.
Arguments:
event_name (str): Name of event being tracked
user_id (str): The ID of the user
course_id (unicode): ID associated with the course
checkpoint (str): Checkpoint name
Returns:
None
"""
log.info(
u"In-course reverification: event %s occurred for user '%s' in course '%s' at checkpoint '%s'",
event_name, user_id, course_id, checkpoint
)
if settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
analytics.track(
user_id,
event_name,
{
'category': "verification",
'label': unicode(course_id),
'checkpoint': checkpoint
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
def _redirect_to_initial_verification(self, user, course_key, checkpoint):
"""
Redirect because the user does not have an initial verification.
We will redirect the user to the initial verification flow,
passing the identifier for this checkpoint. When the user
submits a verification attempt, it will count for *both*
the initial and checkpoint verification.
Arguments:
user (User): The user who made the request.
course_key (CourseKey): The identifier for the course for which
the user is attempting to re-verify.
checkpoint (string): Location of the checkpoint in the courseware.
Returns:
HttpResponse
"""
log.info(
u"User %s does not have an initial verification, so "
u"he/she will be redirected to the \"verify later\" flow "
u"for the course %s.",
user.id, course_key
)
base_url = reverse('verify_student_verify_now', kwargs={'course_id': unicode(course_key)})
params = urllib.urlencode({"checkpoint": checkpoint})
full_url = u"{base}?{params}".format(base=base_url, params=params)
return redirect(full_url)
| agpl-3.0 | 5,973,419,883,898,226,000 | 39.009823 | 119 | 0.633538 | false |
wooga/airflow | tests/providers/papermill/operators/test_papermill.py | 5 | 1718 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest.mock import patch
from airflow.providers.papermill.operators.papermill import PapermillOperator
class TestPapermillOperator(unittest.TestCase):
@patch('airflow.providers.papermill.operators.papermill.pm')
def test_execute(self, mock_papermill):
in_nb = "/tmp/does_not_exist"
out_nb = "/tmp/will_not_exist"
parameters = {"msg": "hello_world",
"train": 1}
op = PapermillOperator(
input_nb=in_nb, output_nb=out_nb, parameters=parameters,
task_id="papermill_operator_test",
dag=None
)
op.pre_execute(context={}) # make sure to have the inlets
op.execute(context={})
mock_papermill.execute_notebook.assert_called_once_with(
in_nb,
out_nb,
parameters=parameters,
progress_bar=False,
report_mode=True
)
| apache-2.0 | -3,563,179,610,061,426,700 | 35.553191 | 77 | 0.681607 | false |
sarvex/tensorflow | tensorflow/python/keras/optimizer_v2/rmsprop.py | 6 | 12979 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RMSprop optimizer implementation."""
# pylint: disable=g-classes-have-attributes
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend_config
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import gen_training_ops
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.optimizers.RMSprop")
class RMSprop(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the RMSprop algorithm.
The gist of RMSprop is to:
- Maintain a moving (discounted) average of the square of gradients
- Divide the gradient by the root of this average
This implementation of RMSprop uses plain momentum, not Nesterov momentum.
The centered version additionally maintains a moving average of the
gradients, and uses that average to estimate the variance.
Args:
learning_rate: A `Tensor`, floating point value, or a schedule that is a
`tf.keras.optimizers.schedules.LearningRateSchedule`, or a callable
that takes no arguments and returns the actual value to use. The
learning rate. Defaults to 0.001.
rho: Discounting factor for the history/coming gradient. Defaults to 0.9.
momentum: A scalar or a scalar `Tensor`. Defaults to 0.0.
epsilon: A small constant for numerical stability. This epsilon is
"epsilon hat" in the Kingma and Ba paper (in the formula just before
Section 2.1), not the epsilon in Algorithm 1 of the paper. Defaults to
1e-7.
centered: Boolean. If `True`, gradients are normalized by the estimated
variance of the gradient; if False, by the uncentered second moment.
Setting this to `True` may help with training, but is slightly more
expensive in terms of computation and memory. Defaults to `False`.
name: Optional name prefix for the operations created when applying
gradients. Defaults to `"RMSprop"`.
**kwargs: Keyword arguments. Allowed to be one of
`"clipnorm"` or `"clipvalue"`.
`"clipnorm"` (float) clips gradients by norm; `"clipvalue"` (float) clips
gradients by value.
Note that in the dense implementation of this algorithm, variables and their
corresponding accumulators (momentum, gradient moving average, square
gradient moving average) will be updated even if the gradient is zero
(i.e. accumulators will decay, momentum will be applied). The sparse
implementation (used when the gradient is an `IndexedSlices` object,
typically because of `tf.gather` or an embedding lookup in the forward pass)
will not update variable slices or their accumulators unless those slices
were used in the forward pass (nor is there an "eventual" correction to
account for these omitted updates). This leads to more efficient updates for
large embedding lookup tables (where most of the slices are not accessed in
a particular graph execution), but differs from the published algorithm.
Usage:
>>> opt = tf.keras.optimizers.RMSprop(learning_rate=0.1)
>>> var1 = tf.Variable(10.0)
>>> loss = lambda: (var1 ** 2) / 2.0 # d(loss) / d(var1) = var1
>>> step_count = opt.minimize(loss, [var1]).numpy()
>>> var1.numpy()
9.683772
Reference:
- [Hinton, 2012](
http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)
"""
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
rho=0.9,
momentum=0.0,
epsilon=1e-7,
centered=False,
name="RMSprop",
**kwargs):
"""Construct a new RMSprop optimizer.
Args:
learning_rate: A `Tensor`, floating point value, or a schedule that is a
`tf.keras.optimizers.schedules.LearningRateSchedule`, or a callable
that takes no arguments and returns the actual value to use. The
learning rate. Defaults to 0.001.
rho: Discounting factor for the history/coming gradient. Defaults to 0.9.
momentum: A scalar or a scalar `Tensor`. Defaults to 0.0.
epsilon: A small constant for numerical stability. This epsilon is
"epsilon hat" in the Kingma and Ba paper (in the formula just before
Section 2.1), not the epsilon in Algorithm 1 of the paper. Defaults to
1e-7.
centered: Boolean. If `True`, gradients are normalized by the estimated
variance of the gradient; if False, by the uncentered second moment.
Setting this to `True` may help with training, but is slightly more
expensive in terms of computation and memory. Defaults to `False`.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "RMSprop".
**kwargs: keyword arguments. Allowed to be {`clipnorm`, `clipvalue`, `lr`,
`decay`}. `clipnorm` is clip gradients by norm; `clipvalue` is clip
gradients by value, `decay` is included for backward compatibility to
allow time inverse decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
@compatibility(eager)
When eager execution is enabled, `learning_rate`, `decay`, `momentum`, and
`epsilon` can each be a callable that takes no arguments and returns the
actual value to use. This can be useful for changing these values across
different invocations of optimizer functions.
@end_compatibility
"""
super(RMSprop, self).__init__(name, **kwargs)
self._set_hyper("learning_rate", kwargs.get("lr", learning_rate))
self._set_hyper("decay", self._initial_decay)
self._set_hyper("rho", rho)
self._momentum = False
if isinstance(momentum, ops.Tensor) or callable(momentum) or momentum > 0:
self._momentum = True
if isinstance(momentum, (int, float)) and (momentum < 0 or momentum > 1):
raise ValueError("`momentum` must be between [0, 1].")
self._set_hyper("momentum", momentum)
self.epsilon = epsilon or backend_config.epsilon()
self.centered = centered
def _create_slots(self, var_list):
for var in var_list:
self.add_slot(var, "rms")
if self._momentum:
for var in var_list:
self.add_slot(var, "momentum")
if self.centered:
for var in var_list:
self.add_slot(var, "mg")
def _prepare_local(self, var_device, var_dtype, apply_state):
super(RMSprop, self)._prepare_local(var_device, var_dtype, apply_state)
rho = array_ops.identity(self._get_hyper("rho", var_dtype))
apply_state[(var_device, var_dtype)].update(
dict(
neg_lr_t=-apply_state[(var_device, var_dtype)]["lr_t"],
epsilon=ops.convert_to_tensor_v2_with_dispatch(
self.epsilon, var_dtype),
rho=rho,
momentum=array_ops.identity(self._get_hyper("momentum", var_dtype)),
one_minus_rho=1. - rho))
def _resource_apply_dense(self, grad, var, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
rms = self.get_slot(var, "rms")
if self._momentum:
mom = self.get_slot(var, "momentum")
if self.centered:
mg = self.get_slot(var, "mg")
return gen_training_ops.ResourceApplyCenteredRMSProp(
var=var.handle,
mg=mg.handle,
ms=rms.handle,
mom=mom.handle,
lr=coefficients["lr_t"],
rho=coefficients["rho"],
momentum=coefficients["momentum"],
epsilon=coefficients["epsilon"],
grad=grad,
use_locking=self._use_locking)
else:
return gen_training_ops.ResourceApplyRMSProp(
var=var.handle,
ms=rms.handle,
mom=mom.handle,
lr=coefficients["lr_t"],
rho=coefficients["rho"],
momentum=coefficients["momentum"],
epsilon=coefficients["epsilon"],
grad=grad,
use_locking=self._use_locking)
else:
rms_t = (coefficients["rho"] * rms +
coefficients["one_minus_rho"] * math_ops.square(grad))
rms_t = state_ops.assign(rms, rms_t, use_locking=self._use_locking)
denom_t = rms_t
if self.centered:
mg = self.get_slot(var, "mg")
mg_t = coefficients["rho"] * mg + coefficients["one_minus_rho"] * grad
mg_t = state_ops.assign(mg, mg_t, use_locking=self._use_locking)
denom_t = rms_t - math_ops.square(mg_t)
var_t = var - coefficients["lr_t"] * grad / (
math_ops.sqrt(denom_t) + coefficients["epsilon"])
return state_ops.assign(var, var_t, use_locking=self._use_locking).op
def _resource_apply_sparse(self, grad, var, indices, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
rms = self.get_slot(var, "rms")
if self._momentum:
mom = self.get_slot(var, "momentum")
if self.centered:
mg = self.get_slot(var, "mg")
return gen_training_ops.ResourceSparseApplyCenteredRMSProp(
var=var.handle,
mg=mg.handle,
ms=rms.handle,
mom=mom.handle,
lr=coefficients["lr_t"],
rho=coefficients["rho"],
momentum=coefficients["momentum"],
epsilon=coefficients["epsilon"],
grad=grad,
indices=indices,
use_locking=self._use_locking)
else:
return gen_training_ops.ResourceSparseApplyRMSProp(
var=var.handle,
ms=rms.handle,
mom=mom.handle,
lr=coefficients["lr_t"],
rho=coefficients["rho"],
momentum=coefficients["momentum"],
epsilon=coefficients["epsilon"],
grad=grad,
indices=indices,
use_locking=self._use_locking)
else:
rms_scaled_g_values = (grad * grad) * coefficients["one_minus_rho"]
rms_t = state_ops.assign(rms, rms * coefficients["rho"],
use_locking=self._use_locking)
with ops.control_dependencies([rms_t]):
rms_t = self._resource_scatter_add(rms, indices, rms_scaled_g_values)
rms_slice = array_ops.gather(rms_t, indices)
denom_slice = rms_slice
if self.centered:
mg = self.get_slot(var, "mg")
mg_scaled_g_values = grad * coefficients["one_minus_rho"]
mg_t = state_ops.assign(mg, mg * coefficients["rho"],
use_locking=self._use_locking)
with ops.control_dependencies([mg_t]):
mg_t = self._resource_scatter_add(mg, indices, mg_scaled_g_values)
mg_slice = array_ops.gather(mg_t, indices)
denom_slice = rms_slice - math_ops.square(mg_slice)
var_update = self._resource_scatter_add(
var, indices, coefficients["neg_lr_t"] * grad / (
math_ops.sqrt(denom_slice) + coefficients["epsilon"]))
if self.centered:
return control_flow_ops.group(*[var_update, rms_t, mg_t])
return control_flow_ops.group(*[var_update, rms_t])
def set_weights(self, weights):
params = self.weights
# Override set_weights for backward compatibility of Keras V1 optimizer
# since it does not include iteration at head of the weight list. Set
# iteration to 0.
if len(params) == len(weights) + 1:
weights = [np.array(0)] + weights
super(RMSprop, self).set_weights(weights)
def get_config(self):
config = super(RMSprop, self).get_config()
config.update({
"learning_rate": self._serialize_hyperparameter("learning_rate"),
"decay": self._initial_decay,
"rho": self._serialize_hyperparameter("rho"),
"momentum": self._serialize_hyperparameter("momentum"),
"epsilon": self.epsilon,
"centered": self.centered,
})
return config
RMSProp = RMSprop
| apache-2.0 | -828,736,742,712,696,600 | 42.408027 | 80 | 0.648509 | false |
adrienverge/yamllint | tests/test_linter.py | 1 | 2051 | # -*- coding: utf-8 -*-
# Copyright (C) 2016 Adrien Vergé
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import io
import unittest
from yamllint.config import YamlLintConfig
from yamllint import linter
class LinterTestCase(unittest.TestCase):
def fake_config(self):
return YamlLintConfig('extends: default')
def test_run_on_string(self):
linter.run('test: document', self.fake_config())
def test_run_on_bytes(self):
linter.run(b'test: document', self.fake_config())
def test_run_on_unicode(self):
linter.run(u'test: document', self.fake_config())
def test_run_on_stream(self):
linter.run(io.StringIO(u'hello'), self.fake_config())
def test_run_on_int(self):
self.assertRaises(TypeError, linter.run, 42, self.fake_config())
def test_run_on_list(self):
self.assertRaises(TypeError, linter.run,
['h', 'e', 'l', 'l', 'o'], self.fake_config())
def test_run_on_non_ascii_chars(self):
s = (u'- hétérogénéité\n'
u'# 19.99 €\n')
linter.run(s, self.fake_config())
linter.run(s.encode('utf-8'), self.fake_config())
linter.run(s.encode('iso-8859-15'), self.fake_config())
s = (u'- お早う御座います。\n'
u'# الأَبْجَدِيَّة العَرَبِيَّة\n')
linter.run(s, self.fake_config())
linter.run(s.encode('utf-8'), self.fake_config())
| gpl-3.0 | 5,439,690,419,335,600,000 | 34.070175 | 72 | 0.650825 | false |
hasteur/edit_request_aging | families/krefeldwiki_family.py | 10 | 1630 | # -*- coding: utf-8 -*-
import family
# The city wiki of Krefeld, Germany, Europe.
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'krefeldwiki'
self.langs = {
'de': 'krefeldwiki.de',
}
self.namespaces[4] = {
'_default': u'Krefeld Wiki',
}
self.namespaces[5] = {
'_default': u'Krefeld Wiki Diskussion',
}
self.namespaces[6] = {
'_default': u'Bild',
}
self.namespaces[7] = {
'_default': u'Bild Diskussion',
}
self.namespaces[106] = {
'_default': [u'Formular', 'Form'],
}
self.namespaces[107] = {
'_default': [u'Formular Diskussion', 'Form talk'],
}
self.namespaces[110] = {
'_default': [u'Relation', 'Relation'],
}
self.namespaces[111] = {
'_default': [u'Relation Diskussion', 'Relation talk'],
}
self.namespaces[112] = {
'_default': [u'Attribut', 'Attribut'],
}
self.namespaces[113] = {
'_default': [u'Attribut Diskussion', 'Attribut talk'],
}
self.namespaces[114] = {
'_default': [u'Datentyp', 'Data type'],
}
self.namespaces[115] = {
'_default': [u'Datentyp Diskussion', 'Data type talk'],
}
def version(self, code):
return "1.12alpha"
def scriptpath(self, code):
return '/w'
def path(self, code):
return '%s/index.php5' % self.scriptpath(code)
| gpl-3.0 | 4,674,809,516,235,590,000 | 21.638889 | 67 | 0.477914 | false |
MiniSEC/GRR_clone | tools/config_updater.py | 1 | 17442 | #!/usr/bin/env python
"""Util for modifying the GRR server configuration."""
import argparse
import ConfigParser
import getpass
import os
# importing readline enables the raw_input calls to have history etc.
import readline # pylint: disable=unused-import
import sys
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=g-bad-import-order,unused-import
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import flags
# pylint: disable=g-import-not-at-top,no-name-in-module
try:
# FIXME(dbilby): Temporary hack until key_utils is deprecated.
from grr.lib import key_utils
except ImportError:
pass
from grr.lib import maintenance_utils
from grr.lib import rdfvalue
from grr.lib import startup
from grr.lib import utils
from grr.lib.aff4_objects import user_managers
# pylint: enable=g-import-not-at-top,no-name-in-module
parser = flags.PARSER
parser.description = ("Set configuration parameters for the GRR Server."
"\nThis script has numerous subcommands to perform "
"various actions. When you are first setting up, you "
"probably only care about 'initialize'.")
# Generic arguments.
parser.add_argument(
"--share_dir", default="/usr/share/grr",
help="Path to the directory containing grr data.")
subparsers = parser.add_subparsers(
title="subcommands", dest="subparser_name", description="valid subcommands")
# Subparsers.
parser_memory = subparsers.add_parser(
"load_memory_drivers", help="Load memory drivers from disk to database.")
parser_generate_keys = subparsers.add_parser(
"generate_keys", help="Generate crypto keys in the configuration.")
parser_repack_clients = subparsers.add_parser(
"repack_clients",
help="Repack the clients binaries with the current configuration.")
parser_initialize = subparsers.add_parser(
"initialize",
help="Interactively run all the required steps to setup a new GRR install.")
# Parent parser used in other user based parsers.
parser_user_args = argparse.ArgumentParser(add_help=False)
# User arguments.
parser_user_args.add_argument(
"--username", required=True,
help="Username to create.")
parser_user_args.add_argument(
"--noadmin", default=False, action="store_true",
help="Don't create the user as an administrator.")
parser_user_args.add_argument(
"--password", default=None,
help="Password to set for the user. If None, user will be prompted.")
parser_user_args.add_argument(
"--label", default=[], action="append",
help="Labels to add to the user object. These are used to control access.")
parser_add_user = subparsers.add_parser(
"add_user", parents=[parser_user_args],
help="Add a user to the system.")
parser_update_user = subparsers.add_parser(
"update_user", parents=[parser_user_args],
help="Update user settings.")
# Generate Keys Arguments
parser_generate_keys.add_argument(
"--overwrite", default=False, action="store_true",
help="Required to overwrite existing keys.")
# Repack arguments.
parser_repack_clients.add_argument(
"--upload", default=True, action="store_false",
help="Upload the client binaries to the datastore.")
# Parent parser used in other upload based parsers.
parser_upload_args = argparse.ArgumentParser(add_help=False)
parser_upload_signed_args = argparse.ArgumentParser(add_help=False)
# Upload arguments.
parser_upload_args.add_argument(
"--file", help="The file to upload", required=True)
parser_upload_args.add_argument(
"--dest_path", required=False, default=None,
help="The destination path to upload the file to, specified in aff4: form,"
"e.g. aff4:/config/test.raw")
parser_upload_signed_args.add_argument(
"--platform", required=True, choices=maintenance_utils.SUPPORTED_PLATFORMS,
default="windows",
help="The platform the file will be used on. This determines which signing"
" keys to use, and the path on the server the file will be uploaded to.")
# Upload parsers.
parser_upload_raw = subparsers.add_parser(
"upload_raw", parents=[parser_upload_args],
help="Upload a raw file to an aff4 path.")
parser_upload_python = subparsers.add_parser(
"upload_python", parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload a 'python hack' which can be used to execute code on "
"a client.")
parser_upload_exe = subparsers.add_parser(
"upload_exe", parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload an executable which can be used to execute code on "
"a client.")
parser_upload_memory_driver = subparsers.add_parser(
"upload_memory_driver",
parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload a memory driver for a specific platform.")
def LoadMemoryDrivers(grr_dir):
"""Load memory drivers from disk to database."""
f_path = os.path.join(grr_dir, config_lib.CONFIG.Get(
"MemoryDriver.driver_file", context=["Platform:Darwin", "Arch:amd64"]))
print "Signing and uploading %s" % f_path
up_path = maintenance_utils.UploadSignedDriverBlob(
open(f_path).read(), platform="Darwin", file_name="pmem")
print "uploaded %s" % up_path
f_path = os.path.join(grr_dir, config_lib.CONFIG.Get(
"MemoryDriver.driver_file", context=["Platform:Windows", "Arch:i386"]))
print "Signing and uploading %s" % f_path
up_path = maintenance_utils.UploadSignedDriverBlob(
open(f_path).read(), platform="Windows", file_name="winpmem.i386.sys")
print "uploaded %s" % up_path
f_path = os.path.join(grr_dir, config_lib.CONFIG.Get(
"MemoryDriver.driver_file", context=["Platform:Windows", "Arch:amd64"]))
print "Signing and uploading %s" % f_path
up_path = maintenance_utils.UploadSignedDriverBlob(
open(f_path).read(), platform="Windows", file_name="winpmem.amd64.sys")
print "uploaded %s" % up_path
def ImportConfig(filename, config):
"""Reads an old config file and imports keys and user accounts."""
sections_to_import = ["PrivateKeys"]
entries_to_import = ["Client.driver_signing_public_key",
"Client.executable_signing_public_key",
"CA.certificate",
"Frontend.certificate"]
options_imported = 0
old_config = config_lib.CONFIG.MakeNewConfig()
old_config.Initialize(filename)
user_manager = None
for entry in old_config.raw_data.keys():
try:
section = entry.split(".")[0]
if section in sections_to_import or entry in entries_to_import:
config.Set(entry, old_config.Get(entry))
print "Imported %s." % entry
options_imported += 1
elif section == "Users":
if user_manager is None:
user_manager = user_managers.ConfigBasedUserManager()
user = entry.split(".", 1)[1]
hash_str, labels = old_config.Get(entry).split(":")
user_manager.SetRaw(user, hash_str, labels.split(","))
print "Imported user %s." % user
options_imported += 1
except Exception as e: # pylint: disable=broad-except
print "Exception during import of %s: %s" % (entry, e)
return options_imported
def GenerateDjangoKey(config):
"""Update a config with a random django key."""
try:
secret_key = config["AdminUI.django_secret_key"]
except ConfigParser.NoOptionError:
secret_key = "CHANGE_ME" # This is the config file default.
if not secret_key or secret_key.strip().upper() == "CHANGE_ME":
key = utils.GeneratePassphrase(length=100)
config.Set("AdminUI.django_secret_key", key)
else:
print "Not updating django_secret_key as it is already set."
def GenerateKeys(config):
"""Generate the keys we need for a GRR server."""
if not hasattr(key_utils, "MakeCACert"):
parser.error("Generate keys can only run with open source key_utils.")
if (config.Get("PrivateKeys.server_key", default=None) and
not flags.FLAGS.overwrite):
raise RuntimeError("Config %s already has keys, use --overwrite to "
"override." % config.parser)
print "Generating executable signing key"
priv_key, pub_key = key_utils.GenerateRSAKey()
config.Set("PrivateKeys.executable_signing_private_key", priv_key)
config.Set("Client.executable_signing_public_key", pub_key)
print "Generating driver signing key"
priv_key, pub_key = key_utils.GenerateRSAKey()
config.Set("PrivateKeys.driver_signing_private_key", priv_key)
config.Set("Client.driver_signing_public_key", pub_key)
print "Generating CA keys"
ca_cert, ca_pk, _ = key_utils.MakeCACert()
cipher = None
config.Set("CA.certificate", ca_cert.as_pem())
config.Set("PrivateKeys.ca_key", ca_pk.as_pem(cipher))
print "Generating Server keys"
server_cert, server_key = key_utils.MakeCASignedCert("grr", ca_pk, bits=2048)
config.Set("Frontend.certificate", server_cert.as_pem())
config.Set("PrivateKeys.server_key", server_key.as_pem(cipher))
print "Generating Django Secret key (used for xsrf protection etc)"
GenerateDjangoKey(config)
def ConfigureBaseOptions(config):
"""Configure the basic options required to run the server."""
print "We are now going to configure the server using a bunch of questions.\n"
print """\nFor GRR to work each client has to be able to communicate with the
server. To do this we normally need a public dns name or IP address to
communicate with. In the standard configuration this will be used to host both
the client facing server and the admin user interface.\n"""
print "Guessing public hostname of your server..."
try:
hostname = maintenance_utils.GuessPublicHostname()
print "Using %s as public hostname" % hostname
except OSError:
print "Sorry, we couldn't guess your public hostname"
hostname = raw_input(
"Please enter it manually e.g. grr.example.com: ").strip()
print """\n\nServer URL
The Server URL specifies the URL that the clients will connect to
communicate with the server. This needs to be publically accessible. By default
this will be port 8080 with the URL ending in /control.
"""
def_location = "http://%s:8080/control" % hostname
location = raw_input("Server URL [%s]: " % def_location) or def_location
config.Set("Client.location", location)
print """\nUI URL:
The UI URL specifies where the Administrative Web Interface can be found.
"""
def_url = "http://%s:8000" % hostname
ui_url = raw_input("AdminUI URL [%s]: " % def_url) or def_url
config.Set("AdminUI.url", ui_url)
print """\nMonitoring email address
Address where monitoring events get sent, e.g. crashed clients, broken server
etc.
"""
def_email = "[email protected]"
email = raw_input("Monitoring email [%s]: " % def_email) or def_email
config.Set("Monitoring.emergency_access_email", email)
print """\nEmergency email address
Address where high priority events such as an emergency ACL bypass are sent.
"""
def_email = "[email protected]"
emergency_email = raw_input("Emergency email [%s]: " % email) or email
config.Set("Monitoring.emergency_access_email", emergency_email)
config.Write()
print ("Configuration parameters set. You can edit these in %s" %
config.parser)
def Initialize(config=None):
"""Initialize or update a GRR configuration."""
print "Checking write access on config %s" % config.parser
if not os.access(config.parser.filename, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
print "\nStep 0: Importing Configuration from previous installation."
options_imported = 0
prev_config_file = config.Get("ConfigUpdater.old_config", default=None)
if prev_config_file and os.access(prev_config_file, os.R_OK):
print "Found config file %s." % prev_config_file
if raw_input("Do you want to import this configuration?"
" [yN]: ").upper() == "Y":
options_imported = ImportConfig(prev_config_file, config)
else:
print "No old config file found."
print "\nStep 1: Key Generation"
if config.Get("PrivateKeys.server_key", default=None):
if options_imported > 0:
print ("Since you have imported keys from another installation in the "
"last step,\nyou probably do not want to generate new keys now.")
if ((raw_input("You already have keys in your config, do you want to"
" overwrite them? [yN]: ").upper() or "N") == "Y"):
flags.FLAGS.overwrite = True
GenerateKeys(config)
else:
GenerateKeys(config)
print "\nStep 2: Setting Basic Configuration Parameters"
ConfigureBaseOptions(config)
# Now load our modified config.
startup.ConfigInit()
print "\nStep 3: Adding Admin User"
password = getpass.getpass(prompt="Please enter password for user 'admin': ")
data_store.DB.security_manager.user_manager.UpdateUser(
"admin", password=password, admin=True)
print "User admin added."
print "\nStep 4: Uploading Memory Drivers to the Database"
LoadMemoryDrivers(flags.FLAGS.share_dir)
print "\nStep 5: Repackaging clients with new configuration."
# We need to update the config to point to the installed templates now.
config.Set("ClientBuilder.executables_path", os.path.join(
flags.FLAGS.share_dir, "executables"))
maintenance_utils.RepackAllBinaries(upload=True)
print "\nInitialization complete, writing configuration."
config.Write()
print "Please restart the service for it to take effect.\n\n"
def UploadRaw(file_path, aff4_path):
"""Upload a file to the datastore."""
full_path = rdfvalue.RDFURN(aff4_path).Add(os.path.basename(file_path))
fd = aff4.FACTORY.Create(full_path, "AFF4Image", mode="w")
fd.Write(open(file_path).read(1024*1024*30))
fd.Close()
return str(fd.urn)
def main(unused_argv):
"""Main."""
config_lib.CONFIG.AddContext("Commandline Context")
config_lib.CONFIG.AddContext("ConfigUpdater Context")
startup.Init()
print "Using configuration %s" % config_lib.CONFIG.parser
if flags.FLAGS.subparser_name == "load_memory_drivers":
LoadMemoryDrivers(flags.FLAGS.share_dir)
elif flags.FLAGS.subparser_name == "generate_keys":
try:
GenerateKeys(config_lib.CONFIG)
except RuntimeError, e:
# GenerateKeys will raise if keys exist and --overwrite is not set.
print "ERROR: %s" % e
sys.exit(1)
config_lib.CONFIG.Write()
elif flags.FLAGS.subparser_name == "repack_clients":
maintenance_utils.RepackAllBinaries(upload=flags.FLAGS.upload)
if flags.FLAGS.subparser_name == "add_user":
if flags.FLAGS.password:
password = flags.FLAGS.password
else:
password = getpass.getpass(prompt="Please enter password for user %s: " %
flags.FLAGS.username)
admin = not flags.FLAGS.noadmin
data_store.DB.security_manager.user_manager.AddUser(
flags.FLAGS.username, password=password, admin=admin,
labels=flags.FLAGS.label)
elif flags.FLAGS.subparser_name == "update_user":
admin = not flags.FLAGS.noadmin
data_store.DB.security_manager.user_manager.UpdateUser(
flags.FLAGS.username, password=flags.FLAGS.password, admin=admin,
labels=flags.FLAGS.label)
elif flags.FLAGS.subparser_name == "initialize":
Initialize(config_lib.CONFIG)
elif flags.FLAGS.subparser_name == "upload_python":
content = open(flags.FLAGS.file).read(1024*1024*30)
if flags.FLAGS.dest_path:
uploaded = maintenance_utils.UploadSignedConfigBlob(
content, platform=flags.FLAGS.platform,
aff4_path=flags.FLAGS.dest_path)
else:
uploaded = maintenance_utils.UploadSignedConfigBlob(
content, file_name=os.path.basename(flags.FLAGS.file),
platform=flags.FLAGS.platform,
aff4_path="/config/python_hacks/{file_name}")
print "Uploaded to %s" % uploaded
elif flags.FLAGS.subparser_name == "upload_exe":
content = open(flags.FLAGS.file).read(1024*1024*30)
context = ["Platform:%s" % flags.FLAGS.platform.title(),
"Client"]
if flags.FLAGS.dest_path:
dest_path = rdfvalue.RDFURN(flags.FLAGS.dest_path)
else:
dest_path = config_lib.CONFIG.Get(
"Executables.aff4_path", context=context).Add(
os.path.basename(flags.FLAGS.file))
# Now upload to the destination.
uploaded = maintenance_utils.UploadSignedConfigBlob(
content, aff4_path=dest_path, client_context=context)
print "Uploaded to %s" % dest_path
elif flags.FLAGS.subparser_name == "upload_memory_driver":
content = open(flags.FLAGS.file).read(1024*1024*30)
if flags.FLAGS.dest_path:
uploaded = maintenance_utils.UploadSignedDriverBlob(
content, platform=flags.FLAGS.platform,
aff4_path=flags.FLAGS.dest_path)
else:
uploaded = maintenance_utils.UploadSignedDriverBlob(
content, platform=flags.FLAGS.platform,
file_name=os.path.basename(flags.FLAGS.file))
print "Uploaded to %s" % uploaded
elif flags.FLAGS.subparser_name == "upload_raw":
if not flags.FLAGS.dest_path:
flags.FLAGS.dest_path = aff4.ROOT_URN.Add("config").Add("raw")
uploaded = UploadRaw(flags.FLAGS.file, flags.FLAGS.dest_path)
print "Uploaded to %s" % uploaded
def ConsoleMain():
"""Helper function for calling with setup tools entry points."""
flags.StartMain(main)
if __name__ == "__main__":
ConsoleMain()
| apache-2.0 | -6,693,416,173,666,864,000 | 36.189765 | 80 | 0.698486 | false |
HBEE/odoo-addons | sale_order_mail_product_attachment/sale.py | 3 | 1871 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import netsvc
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
res = super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging, fiscal_position, flag, context)
partner_obj = self.pool.get('res.partner')
lang = partner_obj.browse(cr, uid, partner_id).lang
context_partner = {'lang': lang, 'partner_id': partner_id}
product_obj = self.pool.get('product.product')
attachment_obj = self.pool['ir.attachment']
if product:
product_obj = product_obj.browse(cr, uid, product, context=context_partner)
if not flag:
attachment_ids = attachment_obj.search(cr, uid, [('res_model','=','product.product'),('res_id','=',product)], context=context)
if attachment_ids:
attachemnt_desc = ', '.join([at.name for at in attachment_obj.browse(cr, uid, attachment_ids, context=context)])
res['value']['name'] += '\n' + ('Ver adjuntos: ') + attachemnt_desc
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,962,823,167,135,322,400 | 54.029412 | 142 | 0.572421 | false |
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/openstack/tests/unit/cluster/v1/test_proxy.py | 3 | 1790 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.cluster.v1 import _proxy
from openstack.cluster.v1 import cluster
from openstack.tests.unit import test_proxy_base
class TestClusterProxy(test_proxy_base.TestProxyBase):
def setUp(self):
super(TestClusterProxy, self).setUp()
self.proxy = _proxy.Proxy(self.session)
def test_cluster_create(self):
self.verify_create(self.proxy.create_cluster, cluster.Cluster)
def test_cluster_delete(self):
self.verify_delete(self.proxy.delete_cluster, cluster.Cluster, False)
def test_cluster_delete_ignore(self):
self.verify_delete(self.proxy.delete_cluster, cluster.Cluster, True)
def test_cluster_find(self):
self.verify_find('openstack.cluster.v1.cluster.Cluster.find',
self.proxy.find_cluster)
def test_cluster_get(self):
self.verify_get(self.proxy.get_cluster, cluster.Cluster)
def test_clusters(self):
self.verify_list(self.proxy.clusters, cluster.Cluster,
paginated=True,
method_kwargs={'limit': 2},
expected_kwargs={'limit': 2})
def test_cluster_update(self):
self.verify_update(self.proxy.update_cluster, cluster.Cluster)
| mit | -6,791,646,774,791,227,000 | 37.913043 | 77 | 0.691061 | false |
kennedyshead/home-assistant | tests/components/homekit/test_type_remote.py | 5 | 4403 | """Test different accessory types: Remotes."""
from homeassistant.components.homekit.const import (
ATTR_KEY_NAME,
ATTR_VALUE,
EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED,
KEY_ARROW_RIGHT,
)
from homeassistant.components.homekit.type_remotes import ActivityRemote
from homeassistant.components.remote import (
ATTR_ACTIVITY,
ATTR_ACTIVITY_LIST,
ATTR_CURRENT_ACTIVITY,
DOMAIN,
SUPPORT_ACTIVITY,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
STATE_OFF,
STATE_ON,
STATE_STANDBY,
)
from tests.common import async_mock_service
async def test_activity_remote(hass, hk_driver, events, caplog):
"""Test if remote accessory and HA are updated accordingly."""
entity_id = "remote.harmony"
hass.states.async_set(
entity_id,
None,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_ACTIVITY,
ATTR_CURRENT_ACTIVITY: "Apple TV",
ATTR_ACTIVITY_LIST: ["TV", "Apple TV"],
},
)
await hass.async_block_till_done()
acc = ActivityRemote(hass, hk_driver, "ActivityRemote", entity_id, 2, None)
await acc.run()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 31 # Television
assert acc.char_active.value == 0
assert acc.char_remote_key.value == 0
assert acc.char_input_source.value == 1
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_ACTIVITY,
ATTR_CURRENT_ACTIVITY: "Apple TV",
ATTR_ACTIVITY_LIST: ["TV", "Apple TV"],
},
)
await hass.async_block_till_done()
assert acc.char_active.value == 1
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.char_active.value == 1
hass.states.async_set(entity_id, STATE_STANDBY)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_ACTIVITY,
ATTR_CURRENT_ACTIVITY: "TV",
ATTR_ACTIVITY_LIST: ["TV", "Apple TV"],
},
)
await hass.async_block_till_done()
assert acc.char_input_source.value == 0
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_ACTIVITY,
ATTR_CURRENT_ACTIVITY: "Apple TV",
ATTR_ACTIVITY_LIST: ["TV", "Apple TV"],
},
)
await hass.async_block_till_done()
assert acc.char_input_source.value == 1
# Set from HomeKit
call_turn_on = async_mock_service(hass, DOMAIN, "turn_on")
call_turn_off = async_mock_service(hass, DOMAIN, "turn_off")
acc.char_active.client_update_value(1)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
acc.char_active.client_update_value(0)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
acc.char_input_source.client_update_value(1)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[1].data[ATTR_ENTITY_ID] == entity_id
assert call_turn_on[1].data[ATTR_ACTIVITY] == "Apple TV"
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
acc.char_input_source.client_update_value(0)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[2].data[ATTR_ENTITY_ID] == entity_id
assert call_turn_on[2].data[ATTR_ACTIVITY] == "TV"
assert len(events) == 4
assert events[-1].data[ATTR_VALUE] is None
events = []
def listener(event):
events.append(event)
hass.bus.async_listen(EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED, listener)
acc.char_remote_key.client_update_value(20)
await hass.async_block_till_done()
acc.char_remote_key.client_update_value(7)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data[ATTR_KEY_NAME] == KEY_ARROW_RIGHT
| apache-2.0 | -3,007,216,356,524,634,600 | 28.75 | 79 | 0.639564 | false |
AkademieOlympia/sympy | sympy/functions/special/tests/test_elliptic_integrals.py | 97 | 5587 | from sympy import (S, Symbol, pi, I, oo, zoo, sin, sqrt, tan, gamma,
atanh, hyper, meijerg, O)
from sympy.functions.special.elliptic_integrals import (elliptic_k as K,
elliptic_f as F, elliptic_e as E, elliptic_pi as P)
from sympy.utilities.randtest import (test_derivative_numerically as td,
random_complex_number as randcplx,
verify_numerically as tn)
from sympy.abc import z, m, n
i = Symbol('i', integer=True)
j = Symbol('k', integer=True, positive=True)
def test_K():
assert K(0) == pi/2
assert K(S(1)/2) == 8*pi**(S(3)/2)/gamma(-S(1)/4)**2
assert K(1) == zoo
assert K(-1) == gamma(S(1)/4)**2/(4*sqrt(2*pi))
assert K(oo) == 0
assert K(-oo) == 0
assert K(I*oo) == 0
assert K(-I*oo) == 0
assert K(zoo) == 0
assert K(z).diff(z) == (E(z) - (1 - z)*K(z))/(2*z*(1 - z))
assert td(K(z), z)
zi = Symbol('z', real=False)
assert K(zi).conjugate() == K(zi.conjugate())
zr = Symbol('z', real=True, negative=True)
assert K(zr).conjugate() == K(zr)
assert K(z).rewrite(hyper) == \
(pi/2)*hyper((S.Half, S.Half), (S.One,), z)
assert tn(K(z), (pi/2)*hyper((S.Half, S.Half), (S.One,), z))
assert K(z).rewrite(meijerg) == \
meijerg(((S.Half, S.Half), []), ((S.Zero,), (S.Zero,)), -z)/2
assert tn(K(z), meijerg(((S.Half, S.Half), []), ((S.Zero,), (S.Zero,)), -z)/2)
assert K(z).series(z) == pi/2 + pi*z/8 + 9*pi*z**2/128 + \
25*pi*z**3/512 + 1225*pi*z**4/32768 + 3969*pi*z**5/131072 + O(z**6)
def test_F():
assert F(z, 0) == z
assert F(0, m) == 0
assert F(pi*i/2, m) == i*K(m)
assert F(z, oo) == 0
assert F(z, -oo) == 0
assert F(-z, m) == -F(z, m)
assert F(z, m).diff(z) == 1/sqrt(1 - m*sin(z)**2)
assert F(z, m).diff(m) == E(z, m)/(2*m*(1 - m)) - F(z, m)/(2*m) - \
sin(2*z)/(4*(1 - m)*sqrt(1 - m*sin(z)**2))
r = randcplx()
assert td(F(z, r), z)
assert td(F(r, m), m)
mi = Symbol('m', real=False)
assert F(z, mi).conjugate() == F(z.conjugate(), mi.conjugate())
mr = Symbol('m', real=True, negative=True)
assert F(z, mr).conjugate() == F(z.conjugate(), mr)
assert F(z, m).series(z) == \
z + z**5*(3*m**2/40 - m/30) + m*z**3/6 + O(z**6)
def test_E():
assert E(z, 0) == z
assert E(0, m) == 0
assert E(i*pi/2, m) == i*E(m)
assert E(z, oo) == zoo
assert E(z, -oo) == zoo
assert E(0) == pi/2
assert E(1) == 1
assert E(oo) == I*oo
assert E(-oo) == oo
assert E(zoo) == zoo
assert E(-z, m) == -E(z, m)
assert E(z, m).diff(z) == sqrt(1 - m*sin(z)**2)
assert E(z, m).diff(m) == (E(z, m) - F(z, m))/(2*m)
assert E(z).diff(z) == (E(z) - K(z))/(2*z)
r = randcplx()
assert td(E(r, m), m)
assert td(E(z, r), z)
assert td(E(z), z)
mi = Symbol('m', real=False)
assert E(z, mi).conjugate() == E(z.conjugate(), mi.conjugate())
assert E(mi).conjugate() == E(mi.conjugate())
mr = Symbol('m', real=True, negative=True)
assert E(z, mr).conjugate() == E(z.conjugate(), mr)
assert E(mr).conjugate() == E(mr)
assert E(z).rewrite(hyper) == (pi/2)*hyper((-S.Half, S.Half), (S.One,), z)
assert tn(E(z), (pi/2)*hyper((-S.Half, S.Half), (S.One,), z))
assert E(z).rewrite(meijerg) == \
-meijerg(((S.Half, S(3)/2), []), ((S.Zero,), (S.Zero,)), -z)/4
assert tn(E(z), -meijerg(((S.Half, S(3)/2), []), ((S.Zero,), (S.Zero,)), -z)/4)
assert E(z, m).series(z) == \
z + z**5*(-m**2/40 + m/30) - m*z**3/6 + O(z**6)
assert E(z).series(z) == pi/2 - pi*z/8 - 3*pi*z**2/128 - \
5*pi*z**3/512 - 175*pi*z**4/32768 - 441*pi*z**5/131072 + O(z**6)
def test_P():
assert P(0, z, m) == F(z, m)
assert P(1, z, m) == F(z, m) + \
(sqrt(1 - m*sin(z)**2)*tan(z) - E(z, m))/(1 - m)
assert P(n, i*pi/2, m) == i*P(n, m)
assert P(n, z, 0) == atanh(sqrt(n - 1)*tan(z))/sqrt(n - 1)
assert P(n, z, n) == F(z, n) - P(1, z, n) + tan(z)/sqrt(1 - n*sin(z)**2)
assert P(oo, z, m) == 0
assert P(-oo, z, m) == 0
assert P(n, z, oo) == 0
assert P(n, z, -oo) == 0
assert P(0, m) == K(m)
assert P(1, m) == zoo
assert P(n, 0) == pi/(2*sqrt(1 - n))
assert P(2, 1) == -oo
assert P(-1, 1) == oo
assert P(n, n) == E(n)/(1 - n)
assert P(n, -z, m) == -P(n, z, m)
ni, mi = Symbol('n', real=False), Symbol('m', real=False)
assert P(ni, z, mi).conjugate() == \
P(ni.conjugate(), z.conjugate(), mi.conjugate())
nr, mr = Symbol('n', real=True, negative=True), \
Symbol('m', real=True, negative=True)
assert P(nr, z, mr).conjugate() == P(nr, z.conjugate(), mr)
assert P(n, m).conjugate() == P(n.conjugate(), m.conjugate())
assert P(n, z, m).diff(n) == (E(z, m) + (m - n)*F(z, m)/n +
(n**2 - m)*P(n, z, m)/n - n*sqrt(1 -
m*sin(z)**2)*sin(2*z)/(2*(1 - n*sin(z)**2)))/(2*(m - n)*(n - 1))
assert P(n, z, m).diff(z) == 1/(sqrt(1 - m*sin(z)**2)*(1 - n*sin(z)**2))
assert P(n, z, m).diff(m) == (E(z, m)/(m - 1) + P(n, z, m) -
m*sin(2*z)/(2*(m - 1)*sqrt(1 - m*sin(z)**2)))/(2*(n - m))
assert P(n, m).diff(n) == (E(m) + (m - n)*K(m)/n +
(n**2 - m)*P(n, m)/n)/(2*(m - n)*(n - 1))
assert P(n, m).diff(m) == (E(m)/(m - 1) + P(n, m))/(2*(n - m))
rx, ry = randcplx(), randcplx()
assert td(P(n, rx, ry), n)
assert td(P(rx, z, ry), z)
assert td(P(rx, ry, m), m)
assert P(n, z, m).series(z) == z + z**3*(m/6 + n/3) + \
z**5*(3*m**2/40 + m*n/10 - m/30 + n**2/5 - n/15) + O(z**6)
| bsd-3-clause | -7,884,247,484,898,591,000 | 35.756579 | 83 | 0.476642 | false |
ghchinoy/tensorflow | tensorflow/python/kernel_tests/reverse_sequence_op_test.py | 22 | 6952 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.reverse_sequence_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.platform import test
class ReverseSequenceTest(test.TestCase):
def _testReverseSequence(self,
x,
batch_axis,
seq_axis,
seq_lengths,
truth,
use_gpu=False,
expected_err_re=None):
with self.cached_session(use_gpu=use_gpu):
ans = array_ops.reverse_sequence(
x, batch_axis=batch_axis, seq_axis=seq_axis, seq_lengths=seq_lengths)
if expected_err_re is None:
tf_ans = self.evaluate(ans)
self.assertAllClose(tf_ans, truth, atol=1e-10)
self.assertShapeEqual(truth, ans)
else:
with self.assertRaisesOpError(expected_err_re):
self.evaluate(ans)
def _testBothReverseSequence(self,
x,
batch_axis,
seq_axis,
seq_lengths,
truth,
expected_err_re=None):
self._testReverseSequence(x, batch_axis, seq_axis, seq_lengths, truth, True,
expected_err_re)
self._testReverseSequence(x, batch_axis, seq_axis, seq_lengths, truth,
False, expected_err_re)
def _testBasic(self, dtype, len_dtype=np.int64):
x = np.asarray(
[[[1, 2, 3, 4], [5, 6, 7, 8]], [[9, 10, 11, 12], [13, 14, 15, 16]],
[[17, 18, 19, 20], [21, 22, 23, 24]]],
dtype=dtype)
x = x.reshape(3, 2, 4, 1, 1)
x = x.transpose([2, 1, 0, 3, 4]) # permute axes 0 <=> 2
# reverse dim 2 up to (0:3, none, 0:4) along dim=0
seq_lengths = np.asarray([3, 0, 4], dtype=len_dtype)
truth_orig = np.asarray(
[
[[3, 2, 1, 4], [7, 6, 5, 8]], # reverse 0:3
[[9, 10, 11, 12], [13, 14, 15, 16]], # reverse none
[[20, 19, 18, 17], [24, 23, 22, 21]]
], # reverse 0:4 (all)
dtype=dtype)
truth_orig = truth_orig.reshape(3, 2, 4, 1, 1)
truth = truth_orig.transpose([2, 1, 0, 3, 4]) # permute axes 0 <=> 2
seq_axis = 0 # permute seq_axis and batch_axis (originally 2 and 0, resp.)
batch_axis = 2
self._testBothReverseSequence(x, batch_axis, seq_axis, seq_lengths, truth)
def testSeqLengthInt32(self):
self._testBasic(np.float32, np.int32)
def testFloatBasic(self):
self._testBasic(np.float32)
def testDoubleBasic(self):
self._testBasic(np.float64)
def testInt32Basic(self):
self._testBasic(np.int32)
def testInt64Basic(self):
self._testBasic(np.int64)
def testComplex64Basic(self):
self._testBasic(np.complex64)
def testComplex128Basic(self):
self._testBasic(np.complex128)
@test_util.run_deprecated_v1
def testFloatReverseSequenceGrad(self):
x = np.asarray(
[[[1, 2, 3, 4], [5, 6, 7, 8]], [[9, 10, 11, 12], [13, 14, 15, 16]],
[[17, 18, 19, 20], [21, 22, 23, 24]]],
dtype=np.float)
x = x.reshape(3, 2, 4, 1, 1)
x = x.transpose([2, 1, 0, 3, 4]) # transpose axes 0 <=> 2
# reverse dim 0 up to (0:3, none, 0:4) along dim=2
seq_axis = 0
batch_axis = 2
seq_lengths = np.asarray([3, 0, 4], dtype=np.int64)
with self.cached_session():
input_t = constant_op.constant(x, shape=x.shape)
seq_lengths_t = constant_op.constant(seq_lengths, shape=seq_lengths.shape)
reverse_sequence_out = array_ops.reverse_sequence(
input_t,
batch_axis=batch_axis,
seq_axis=seq_axis,
seq_lengths=seq_lengths_t)
err = gradient_checker.compute_gradient_error(
input_t, x.shape, reverse_sequence_out, x.shape, x_init_value=x)
print("ReverseSequence gradient error = %g" % err)
self.assertLess(err, 1e-8)
@test_util.run_deprecated_v1
def testShapeFunctionEdgeCases(self):
t = array_ops.reverse_sequence(
array_ops.placeholder(
dtypes.float32, shape=None),
seq_lengths=array_ops.placeholder(
dtypes.int64, shape=(32,)),
batch_axis=0,
seq_axis=1)
self.assertIs(t.get_shape().ndims, None)
# Batch size mismatched between input and seq_lengths.
with self.assertRaises(ValueError):
array_ops.reverse_sequence(
array_ops.placeholder(
dtypes.float32, shape=(32, 2, 3)),
seq_lengths=array_ops.placeholder(
dtypes.int64, shape=(33,)),
seq_axis=3)
# seq_axis out of bounds.
with self.assertRaisesRegexp(ValueError, "seq_dim must be < input rank"):
array_ops.reverse_sequence(
array_ops.placeholder(
dtypes.float32, shape=(32, 2, 3)),
seq_lengths=array_ops.placeholder(
dtypes.int64, shape=(32,)),
seq_axis=3)
# batch_axis out of bounds.
with self.assertRaisesRegexp(ValueError, "batch_dim must be < input rank"):
array_ops.reverse_sequence(
array_ops.placeholder(
dtypes.float32, shape=(32, 2, 3)),
seq_lengths=array_ops.placeholder(
dtypes.int64, shape=(32,)),
seq_axis=0,
batch_axis=3)
with self.cached_session():
inputs = array_ops.placeholder(dtypes.float32, shape=(32, 2, 3))
seq_lengths = array_ops.placeholder(dtypes.int64, shape=(32,))
output = array_ops.reverse_sequence(
inputs, seq_lengths=seq_lengths,
seq_axis=0) # batch_axis default is 0
with self.assertRaisesOpError("batch_dim == seq_dim"):
output.eval(feed_dict={
inputs: np.random.rand(32, 2, 3),
seq_lengths: xrange(32)
})
if __name__ == "__main__":
test.main()
| apache-2.0 | 4,702,862,580,759,777,000 | 35.397906 | 80 | 0.586306 | false |
sebrandon1/nova | nova/tests/unit/virt/hyperv/test_driver.py | 3 | 20257 | # Copyright 2015 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit tests for the Hyper-V Driver.
"""
import platform
import sys
import mock
from os_win import exceptions as os_win_exc
from nova import exception
from nova import safe_utils
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.hyperv import test_base
from nova.virt import driver as base_driver
from nova.virt.hyperv import driver
class HyperVDriverTestCase(test_base.HyperVBaseTestCase):
FAKE_WIN_2008R2_VERSION = '6.0.0'
@mock.patch.object(driver.HyperVDriver, '_check_minimum_windows_version')
def setUp(self, mock_check_minimum_windows_version):
super(HyperVDriverTestCase, self).setUp()
self.context = 'context'
self.driver = driver.HyperVDriver(mock.sentinel.virtapi)
self.driver._hostops = mock.MagicMock()
self.driver._volumeops = mock.MagicMock()
self.driver._vmops = mock.MagicMock()
self.driver._snapshotops = mock.MagicMock()
self.driver._livemigrationops = mock.MagicMock()
self.driver._migrationops = mock.MagicMock()
self.driver._rdpconsoleops = mock.MagicMock()
self.driver._serialconsoleops = mock.MagicMock()
self.driver._imagecache = mock.MagicMock()
@mock.patch.object(driver.utilsfactory, 'get_hostutils')
def test_check_minimum_windows_version(self, mock_get_hostutils):
mock_hostutils = mock_get_hostutils.return_value
mock_hostutils.check_min_windows_version.return_value = False
self.assertRaises(exception.HypervisorTooOld,
self.driver._check_minimum_windows_version)
def test_public_api_signatures(self):
# NOTE(claudiub): wrapped functions do not keep the same signature in
# Python 2.7, which causes this test to fail. Instead, we should
# compare the public API signatures of the unwrapped methods.
for attr in driver.HyperVDriver.__dict__:
class_member = getattr(driver.HyperVDriver, attr)
if callable(class_member):
mocked_method = mock.patch.object(
driver.HyperVDriver, attr,
safe_utils.get_wrapped_function(class_member))
mocked_method.start()
self.addCleanup(mocked_method.stop)
self.assertPublicAPISignatures(base_driver.ComputeDriver,
driver.HyperVDriver)
def test_converted_exception(self):
self.driver._vmops.get_info.side_effect = (
os_win_exc.OSWinException)
self.assertRaises(exception.NovaException,
self.driver.get_info, mock.sentinel.instance)
self.driver._vmops.get_info.side_effect = os_win_exc.HyperVException
self.assertRaises(exception.NovaException,
self.driver.get_info, mock.sentinel.instance)
self.driver._vmops.get_info.side_effect = (
os_win_exc.HyperVVMNotFoundException(vm_name='foofoo'))
self.assertRaises(exception.InstanceNotFound,
self.driver.get_info, mock.sentinel.instance)
def test_assert_original_traceback_maintained(self):
def bar(self):
foo = "foofoo"
raise os_win_exc.HyperVVMNotFoundException(vm_name=foo)
self.driver._vmops.get_info.side_effect = bar
try:
self.driver.get_info(mock.sentinel.instance)
self.fail("Test expected exception, but it was not raised.")
except exception.InstanceNotFound:
# exception has been raised as expected.
_, _, trace = sys.exc_info()
while trace.tb_next:
# iterate until the original exception source, bar.
trace = trace.tb_next
# original frame will contain the 'foo' variable.
self.assertEqual('foofoo', trace.tb_frame.f_locals['foo'])
@mock.patch.object(driver.eventhandler, 'InstanceEventHandler')
def test_init_host(self, mock_InstanceEventHandler):
self.driver.init_host(mock.sentinel.host)
mock_start_console_handlers = (
self.driver._serialconsoleops.start_console_handlers)
mock_start_console_handlers.assert_called_once_with()
mock_InstanceEventHandler.assert_called_once_with(
state_change_callback=self.driver.emit_event)
fake_event_handler = mock_InstanceEventHandler.return_value
fake_event_handler.start_listener.assert_called_once_with()
def test_list_instance_uuids(self):
self.driver.list_instance_uuids()
self.driver._vmops.list_instance_uuids.assert_called_once_with()
def test_list_instances(self):
self.driver.list_instances()
self.driver._vmops.list_instances.assert_called_once_with()
def test_estimate_instance_overhead(self):
self.driver.estimate_instance_overhead(mock.sentinel.instance)
self.driver._vmops.estimate_instance_overhead.assert_called_once_with(
mock.sentinel.instance)
def test_spawn(self):
self.driver.spawn(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.image_meta, mock.sentinel.injected_files,
mock.sentinel.admin_password, mock.sentinel.network_info,
mock.sentinel.block_device_info)
self.driver._vmops.spawn.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.image_meta, mock.sentinel.injected_files,
mock.sentinel.admin_password, mock.sentinel.network_info,
mock.sentinel.block_device_info)
def test_reboot(self):
self.driver.reboot(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.reboot_type,
mock.sentinel.block_device_info, mock.sentinel.bad_vol_callback)
self.driver._vmops.reboot.assert_called_once_with(
mock.sentinel.instance, mock.sentinel.network_info,
mock.sentinel.reboot_type)
def test_destroy(self):
self.driver.destroy(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info,
mock.sentinel.destroy_disks, mock.sentinel.migrate_data)
self.driver._vmops.destroy.assert_called_once_with(
mock.sentinel.instance, mock.sentinel.network_info,
mock.sentinel.block_device_info, mock.sentinel.destroy_disks)
def test_get_info(self):
self.driver.get_info(mock.sentinel.instance)
self.driver._vmops.get_info.assert_called_once_with(
mock.sentinel.instance)
def test_attach_volume(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self.driver.attach_volume(
mock.sentinel.context, mock.sentinel.connection_info,
mock_instance, mock.sentinel.mountpoint, mock.sentinel.disk_bus,
mock.sentinel.device_type, mock.sentinel.encryption)
self.driver._volumeops.attach_volume.assert_called_once_with(
mock.sentinel.connection_info,
mock_instance.name)
def test_detach_volume(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self.driver.detach_volume(
mock.sentinel.connection_info, mock_instance,
mock.sentinel.mountpoint, mock.sentinel.encryption)
self.driver._volumeops.detach_volume.assert_called_once_with(
mock.sentinel.connection_info,
mock_instance.name)
def test_get_volume_connector(self):
self.driver.get_volume_connector(mock.sentinel.instance)
self.driver._volumeops.get_volume_connector.assert_called_once_with(
mock.sentinel.instance)
def test_get_available_resource(self):
self.driver.get_available_resource(mock.sentinel.nodename)
self.driver._hostops.get_available_resource.assert_called_once_with()
def test_get_available_nodes(self):
response = self.driver.get_available_nodes(mock.sentinel.refresh)
self.assertEqual([platform.node()], response)
def test_host_power_action(self):
self.driver.host_power_action(mock.sentinel.action)
self.driver._hostops.host_power_action.assert_called_once_with(
mock.sentinel.action)
def test_snapshot(self):
self.driver.snapshot(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.image_id, mock.sentinel.update_task_state)
self.driver._snapshotops.snapshot.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.image_id, mock.sentinel.update_task_state)
def test_pause(self):
self.driver.pause(mock.sentinel.instance)
self.driver._vmops.pause.assert_called_once_with(
mock.sentinel.instance)
def test_unpause(self):
self.driver.unpause(mock.sentinel.instance)
self.driver._vmops.unpause.assert_called_once_with(
mock.sentinel.instance)
def test_suspend(self):
self.driver.suspend(mock.sentinel.context, mock.sentinel.instance)
self.driver._vmops.suspend.assert_called_once_with(
mock.sentinel.instance)
def test_resume(self):
self.driver.resume(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info)
self.driver._vmops.resume.assert_called_once_with(
mock.sentinel.instance)
def test_power_off(self):
self.driver.power_off(
mock.sentinel.instance, mock.sentinel.timeout,
mock.sentinel.retry_interval)
self.driver._vmops.power_off.assert_called_once_with(
mock.sentinel.instance, mock.sentinel.timeout,
mock.sentinel.retry_interval)
def test_power_on(self):
self.driver.power_on(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info)
self.driver._vmops.power_on.assert_called_once_with(
mock.sentinel.instance, mock.sentinel.block_device_info)
def test_resume_state_on_host_boot(self):
self.driver.resume_state_on_host_boot(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info)
self.driver._vmops.resume_state_on_host_boot.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info)
def test_live_migration(self):
self.driver.live_migration(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.dest, mock.sentinel.post_method,
mock.sentinel.recover_method, mock.sentinel.block_migration,
mock.sentinel.migrate_data)
self.driver._livemigrationops.live_migration.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.dest, mock.sentinel.post_method,
mock.sentinel.recover_method, mock.sentinel.block_migration,
mock.sentinel.migrate_data)
@mock.patch.object(driver.HyperVDriver, 'destroy')
def test_rollback_live_migration_at_destination(self, mock_destroy):
self.driver.rollback_live_migration_at_destination(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info,
mock.sentinel.destroy_disks, mock.sentinel.migrate_data)
mock_destroy.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info,
destroy_disks=mock.sentinel.destroy_disks)
def test_pre_live_migration(self):
migrate_data = self.driver.pre_live_migration(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.block_device_info, mock.sentinel.network_info,
mock.sentinel.disk_info, mock.sentinel.migrate_data)
self.assertEqual(mock.sentinel.migrate_data, migrate_data)
pre_live_migration = self.driver._livemigrationops.pre_live_migration
pre_live_migration.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.block_device_info, mock.sentinel.network_info)
def test_post_live_migration(self):
self.driver.post_live_migration(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.block_device_info, mock.sentinel.migrate_data)
post_live_migration = self.driver._livemigrationops.post_live_migration
post_live_migration.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.block_device_info,
mock.sentinel.migrate_data)
def test_post_live_migration_at_destination(self):
self.driver.post_live_migration_at_destination(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_migration,
mock.sentinel.block_device_info)
mtd = self.driver._livemigrationops.post_live_migration_at_destination
mtd.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_migration)
def test_check_can_live_migrate_destination(self):
self.driver.check_can_live_migrate_destination(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.src_compute_info, mock.sentinel.dst_compute_info,
mock.sentinel.block_migration, mock.sentinel.disk_over_commit)
mtd = self.driver._livemigrationops.check_can_live_migrate_destination
mtd.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.src_compute_info, mock.sentinel.dst_compute_info,
mock.sentinel.block_migration, mock.sentinel.disk_over_commit)
def test_cleanup_live_migration_destination_check(self):
self.driver.cleanup_live_migration_destination_check(
mock.sentinel.context, mock.sentinel.dest_check_data)
_livemigrops = self.driver._livemigrationops
method = _livemigrops.cleanup_live_migration_destination_check
method.assert_called_once_with(
mock.sentinel.context, mock.sentinel.dest_check_data)
def test_check_can_live_migrate_source(self):
self.driver.check_can_live_migrate_source(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.dest_check_data, mock.sentinel.block_device_info)
method = self.driver._livemigrationops.check_can_live_migrate_source
method.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.dest_check_data)
def test_plug_vifs(self):
self.assertRaises(NotImplementedError, self.driver.plug_vifs,
mock.sentinel.instance, mock.sentinel.network_info)
def test_unplug_vifs(self):
self.assertRaises(NotImplementedError, self.driver.unplug_vifs,
mock.sentinel.instance, mock.sentinel.network_info)
def test_refresh_instance_security_rules(self):
self.assertRaises(NotImplementedError,
self.driver.refresh_instance_security_rules,
instance=mock.sentinel.instance)
def test_migrate_disk_and_power_off(self):
self.driver.migrate_disk_and_power_off(
mock.sentinel.context, mock.sentinel.instance, mock.sentinel.dest,
mock.sentinel.flavor, mock.sentinel.network_info,
mock.sentinel.block_device_info, mock.sentinel.timeout,
mock.sentinel.retry_interval)
migr_power_off = self.driver._migrationops.migrate_disk_and_power_off
migr_power_off.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance, mock.sentinel.dest,
mock.sentinel.flavor, mock.sentinel.network_info,
mock.sentinel.block_device_info, mock.sentinel.timeout,
mock.sentinel.retry_interval)
def test_confirm_migration(self):
self.driver.confirm_migration(
mock.sentinel.migration, mock.sentinel.instance,
mock.sentinel.network_info)
self.driver._migrationops.confirm_migration.assert_called_once_with(
mock.sentinel.migration, mock.sentinel.instance,
mock.sentinel.network_info)
def test_finish_revert_migration(self):
self.driver.finish_revert_migration(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info,
mock.sentinel.power_on)
finish_revert_migr = self.driver._migrationops.finish_revert_migration
finish_revert_migr.assert_called_once_with(
mock.sentinel.context, mock.sentinel.instance,
mock.sentinel.network_info, mock.sentinel.block_device_info,
mock.sentinel.power_on)
def test_finish_migration(self):
self.driver.finish_migration(
mock.sentinel.context, mock.sentinel.migration,
mock.sentinel.instance, mock.sentinel.disk_info,
mock.sentinel.network_info, mock.sentinel.image_meta,
mock.sentinel.resize_instance, mock.sentinel.block_device_info,
mock.sentinel.power_on)
self.driver._migrationops.finish_migration.assert_called_once_with(
mock.sentinel.context, mock.sentinel.migration,
mock.sentinel.instance, mock.sentinel.disk_info,
mock.sentinel.network_info, mock.sentinel.image_meta,
mock.sentinel.resize_instance, mock.sentinel.block_device_info,
mock.sentinel.power_on)
def test_get_host_ip_addr(self):
self.driver.get_host_ip_addr()
self.driver._hostops.get_host_ip_addr.assert_called_once_with()
def test_get_host_uptime(self):
self.driver.get_host_uptime()
self.driver._hostops.get_host_uptime.assert_called_once_with()
def test_get_rdp_console(self):
self.driver.get_rdp_console(
mock.sentinel.context, mock.sentinel.instance)
self.driver._rdpconsoleops.get_rdp_console.assert_called_once_with(
mock.sentinel.instance)
def test_get_console_output(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self.driver.get_console_output(self.context, mock_instance)
mock_get_console_output = (
self.driver._serialconsoleops.get_console_output)
mock_get_console_output.assert_called_once_with(
mock_instance.name)
def test_get_serial_console(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self.driver.get_console_output(self.context, mock_instance)
mock_get_serial_console = (
self.driver._serialconsoleops.get_console_output)
mock_get_serial_console.assert_called_once_with(
mock_instance.name)
def test_manage_image_cache(self):
self.driver.manage_image_cache(mock.sentinel.context,
mock.sentinel.all_instances)
self.driver._imagecache.update.assert_called_once_with(
mock.sentinel.context, mock.sentinel.all_instances)
| apache-2.0 | -5,678,939,997,001,641,000 | 42.75162 | 79 | 0.669349 | false |
drayanaindra/shoop | shoop/admin/breadcrumbs.py | 6 | 3358 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from shoop.admin.base import MenuEntry
from shoop.admin.module_registry import get_modules
def _get_admin_module_for_url(url_names):
for module in get_modules():
for url in module.get_urls():
if url.name in url_names:
return module
class Breadcrumbs(object):
@classmethod
def infer(cls, context):
"""
Infer breadcrumbs from the rendering context.
:param context: Jinja Context
:type context: jinja2.runtime.Context
:return: Breadcrumbs object or None if things fail
:rtype: Breadcrumbs|None
"""
request = context["request"]
if not getattr(request, "resolver_match", None):
# If we don't have a resolver match, we can't infer anything.
return None
url_names = (
request.resolver_match.url_name,
"%s:%s" % (request.resolver_match.app_name, request.resolver_match.url_name)
)
url_admin_module = _get_admin_module_for_url(url_names)
# Synthesize a menu entry for the current view.
current_view_entry = MenuEntry(url=request.path, text="")
if url_admin_module:
# See if we have an idea for the title of this view from the menu entries
for entry in url_admin_module.get_menu_entries(request):
if entry.original_url in url_names:
current_view_entry.text = entry.text
break
# See if we have a title for the synthesized entry in the context.
view = context.get("view") # This should be the CBV view object.
title = (
context.get("title") or
context.get("breadcrumb_title") or
(view and getattr(view, "title", None))
)
if title:
current_view_entry.text = force_text(title)
# Begin building the entries...
entries = []
# See if we have the top level menu entry ("Contacts" for example).
if url_admin_module and url_admin_module.breadcrumbs_menu_entry:
# (But don't duplicate steps)
if url_admin_module.breadcrumbs_menu_entry.url != request.path or not current_view_entry.text:
entries.append(url_admin_module.breadcrumbs_menu_entry)
# See if the view declares parents...
parent_getter = getattr(view, "get_breadcrumb_parents", None)
if parent_getter:
entries.extend(parent_getter() or ())
# If the current entry seems valid (would be visible), then go for it!
if current_view_entry.text:
entries.append(current_view_entry)
return cls(entries)
def __init__(self, entries):
self.entries = list(entries)
def get_entries(self, request):
if not len(self.entries):
return
entries = ([
MenuEntry(_("Home"), url="shoop_admin:dashboard")
] + self.entries)
return entries
| agpl-3.0 | 5,737,444,948,274,382,000 | 33.265306 | 106 | 0.612865 | false |
qutebrowser/qutebrowser | qutebrowser/commands/__init__.py | 2 | 1700 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""In qutebrowser, all keybindings are mapped to commands.
Some commands are hidden, which means they don't show up in the command
completion when pressing `:`, as they're typically not useful to run by hand.
For command arguments, there are also some variables you can use:
- `{url}` expands to the URL of the current page
- `{url:pretty}` expands to the URL in decoded format
- `{url:host}`, `{url:domain}`, `{url:auth}`, `{url:scheme}`, `{url:username}`,
`{url:password}`, `{url:host}`, `{url:port}`, `{url:path}` and `{url:query}`
expand to the respective parts of the current URL
- `{title}` expands to the current page's title
- `{clipboard}` expands to the clipboard contents
- `{primary}` expands to the primary selection contents
Those variables can be escaped by doubling the braces, e.g. `{{url}}`. It is
possible to run or bind multiple commands by separating them with `;;`.
"""
| gpl-3.0 | 8,470,717,131,738,291,000 | 43.736842 | 79 | 0.734118 | false |
rr-/szurubooru | server/szurubooru/tests/func/test_mime.py | 1 | 3869 | import pytest
from szurubooru.func import mime
@pytest.mark.parametrize(
"input_path,expected_mime_type",
[
("mp4.mp4", "video/mp4"),
("webm.webm", "video/webm"),
("flash.swf", "application/x-shockwave-flash"),
("png.png", "image/png"),
("jpeg.jpg", "image/jpeg"),
("gif.gif", "image/gif"),
("webp.webp", "image/webp"),
("bmp.bmp", "image/bmp"),
("avif.avif", "image/avif"),
("avif-avis.avif", "image/avif"),
("heif.heif", "image/heif"),
("heic.heic", "image/heic"),
("heic-heix.heic", "image/heic"),
("text.txt", "application/octet-stream"),
],
)
def test_get_mime_type(read_asset, input_path, expected_mime_type):
assert mime.get_mime_type(read_asset(input_path)) == expected_mime_type
def test_get_mime_type_for_empty_file():
assert mime.get_mime_type(b"") == "application/octet-stream"
@pytest.mark.parametrize(
"mime_type,expected_extension",
[
("video/mp4", "mp4"),
("video/webm", "webm"),
("application/x-shockwave-flash", "swf"),
("image/png", "png"),
("image/jpeg", "jpg"),
("image/gif", "gif"),
("image/webp", "webp"),
("image/bmp", "bmp"),
("image/avif", "avif"),
("image/heif", "heif"),
("image/heic", "heic"),
("application/octet-stream", "dat"),
],
)
def test_get_extension(mime_type, expected_extension):
assert mime.get_extension(mime_type) == expected_extension
@pytest.mark.parametrize(
"input_mime_type,expected_state",
[
("application/x-shockwave-flash", True),
("APPLICATION/X-SHOCKWAVE-FLASH", True),
("application/x-shockwave", False),
],
)
def test_is_flash(input_mime_type, expected_state):
assert mime.is_flash(input_mime_type) == expected_state
@pytest.mark.parametrize(
"input_mime_type,expected_state",
[
("video/webm", True),
("VIDEO/WEBM", True),
("video/mp4", True),
("VIDEO/MP4", True),
("video/anything_else", False),
("application/ogg", True),
("not a video", False),
],
)
def test_is_video(input_mime_type, expected_state):
assert mime.is_video(input_mime_type) == expected_state
@pytest.mark.parametrize(
"input_mime_type,expected_state",
[
("image/gif", True),
("image/png", True),
("image/jpeg", True),
("image/bmp", True),
("image/avif", True),
("image/heic", True),
("image/heif", True),
("IMAGE/GIF", True),
("IMAGE/PNG", True),
("IMAGE/JPEG", True),
("IMAGE/BMP", True),
("IMAGE/AVIF", True),
("IMAGE/HEIC", True),
("IMAGE/HEIF", True),
("image/anything_else", False),
("not an image", False),
],
)
def test_is_image(input_mime_type, expected_state):
assert mime.is_image(input_mime_type) == expected_state
@pytest.mark.parametrize(
"input_path,expected_state",
[
("gif.gif", False),
("gif-animated.gif", True),
],
)
def test_is_animated_gif(read_asset, input_path, expected_state):
assert mime.is_animated_gif(read_asset(input_path)) == expected_state
@pytest.mark.parametrize(
"input_mime_type,expected_state",
[
("image/gif", False),
("image/png", False),
("image/jpeg", False),
("image/avif", True),
("image/heic", True),
("image/heif", True),
("IMAGE/GIF", False),
("IMAGE/PNG", False),
("IMAGE/JPEG", False),
("IMAGE/AVIF", True),
("IMAGE/HEIC", True),
("IMAGE/HEIF", True),
("image/anything_else", False),
("not an image", False),
],
)
def test_is_heif(input_mime_type, expected_state):
assert mime.is_heif(input_mime_type) == expected_state
| gpl-3.0 | -2,750,532,537,537,543,700 | 27.036232 | 75 | 0.551564 | false |
2013Commons/HUE-SHARK | desktop/core/ext-py/Babel-0.9.6/babel/messages/tests/mofile.py | 61 | 2585 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import doctest
import gettext
import unittest
from StringIO import StringIO
from babel.messages import mofile, Catalog
class WriteMoTestCase(unittest.TestCase):
def test_sorting(self):
# Ensure the header is sorted to the first entry so that its charset
# can be applied to all subsequent messages by GNUTranslations
# (ensuring all messages are safely converted to unicode)
catalog = Catalog(locale='en_US')
catalog.add(u'', '''\
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n''')
catalog.add(u'foo', 'Voh')
catalog.add((u'There is', u'There are'), (u'Es gibt', u'Es gibt'))
catalog.add(u'Fizz', '')
catalog.add(('Fuzz', 'Fuzzes'), ('', ''))
buf = StringIO()
mofile.write_mo(buf, catalog)
buf.seek(0)
translations = gettext.GNUTranslations(fp=buf)
self.assertEqual(u'Voh', translations.ugettext('foo'))
assert isinstance(translations.ugettext('foo'), unicode)
self.assertEqual(u'Es gibt', translations.ungettext('There is', 'There are', 1))
assert isinstance(translations.ungettext('There is', 'There are', 1), unicode)
self.assertEqual(u'Fizz', translations.ugettext('Fizz'))
assert isinstance(translations.ugettext('Fizz'), unicode)
self.assertEqual(u'Fuzz', translations.ugettext('Fuzz'))
assert isinstance(translations.ugettext('Fuzz'), unicode)
self.assertEqual(u'Fuzzes', translations.ugettext('Fuzzes'))
assert isinstance(translations.ugettext('Fuzzes'), unicode)
def test_more_plural_forms(self):
catalog2 = Catalog(locale='ru_RU')
catalog2.add(('Fuzz', 'Fuzzes'), ('', '', ''))
buf = StringIO()
mofile.write_mo(buf, catalog2)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(mofile))
suite.addTest(unittest.makeSuite(WriteMoTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| apache-2.0 | 6,890,375,844,375,850,000 | 38.390625 | 88 | 0.65764 | false |
DavidBord/mongoengine | tests/fields/geo.py | 23 | 16272 | # -*- coding: utf-8 -*-
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import *
from mongoengine.connection import get_db
__all__ = ("GeoFieldTest", )
class GeoFieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def _test_for_expected_error(self, Cls, loc, expected):
try:
Cls(loc=loc).validate()
self.fail('Should not validate the location {0}'.format(loc))
except ValidationError as e:
self.assertEqual(expected, e.to_dict()['loc'])
def test_geopoint_validation(self):
class Location(Document):
loc = GeoPointField()
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
def test_point_validation(self):
class Location(Document):
loc = PointField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": []}
expected = 'PointField type must be "Point"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]}
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "PointField can only accept lists of [x, y]"
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
Location(loc=[1, 2]).validate()
Location(loc={
"type": "Point",
"coordinates": [
81.4471435546875,
23.61432859499169
]}).validate()
def test_linestring_validation(self):
class Location(Document):
loc = LineStringField()
invalid_coords = {"x": 1, "y": 2}
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'LineStringField type must be "LineString"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "Invalid LineString must contain at least one valid point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1, 2, 3]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
for coord in invalid_coords:
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
def test_polygon_validation(self):
class Location(Document):
loc = PolygonField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'PolygonField type must be "Polygon"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]}
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[5, "a"]]]
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[]]]
expected = "Invalid Polygon must contain at least one valid linestring"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2, 3]]]
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2], [3, 4]]]
expected = "Invalid Polygon:\nLineStrings must start and end at the same point"
self._test_for_expected_error(Location, invalid_coords, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipoint_validation(self):
class Location(Document):
loc = MultiPointField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiPointField type must be "MultiPoint"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]}
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[]]
expected = "Invalid MultiPoint must contain at least one valid point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1]], [[1, 2, 3]]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2]]).validate()
Location(loc={
"type": "MultiPoint",
"coordinates": [
[1, 2],
[81.4471435546875, 23.61432859499169]
]}).validate()
def test_multilinestring_validation(self):
class Location(Document):
loc = MultiLineStringField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiLineStringField type must be "MultiLineString"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]}
expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "Invalid MultiLineString must contain at least one valid linestring"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1]]]
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2, 3]]]
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
for coord in invalid_coords:
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_multipolygon_validation(self):
class Location(Document):
loc = MultiPolygonField()
invalid_coords = {"x": 1, "y": 2}
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'MultiPolygonField type must be "MultiPolygon"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[5, "a"]]]]
expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[]]]]
expected = "Invalid MultiPolygon must contain at least one valid Polygon"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[1, 2, 3]]]]
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[[1, 2], [3, 4]]]]
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
self._test_for_expected_error(Location, invalid_coords, expected)
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
def test_indexes_geopoint(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
location = GeoPointField()
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
def test_geopoint_embedded_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields on
embedded documents.
"""
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
def test_indexes_2dsphere(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
def test_indexes_2dsphere_embedded(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Venue(EmbeddedDocument):
name = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
def test_geo_indexes_recursion(self):
class Location(Document):
name = StringField()
location = GeoPointField()
class Parent(Document):
name = StringField()
location = ReferenceField(Location)
Location.drop_collection()
Parent.drop_collection()
Parent(name='Berlin').save()
info = Parent._get_collection().index_information()
self.assertFalse('location_2d' in info)
info = Location._get_collection().index_information()
self.assertTrue('location_2d' in info)
self.assertEqual(len(Parent._geo_indices()), 0)
self.assertEqual(len(Location._geo_indices()), 1)
def test_geo_indexes_auto_index(self):
# Test just listing the fields
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
# Test listing explicitly
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
if __name__ == '__main__':
unittest.main()
| mit | -2,686,738,127,616,651,300 | 40.723077 | 152 | 0.596731 | false |
GuessWhoSamFoo/pandas | pandas/tests/generic/test_label_or_level_utils.py | 2 | 11936 | import pytest
from pandas.core.dtypes.missing import array_equivalent
import pandas as pd
import pandas.util.testing as tm
# Fixtures
# ========
@pytest.fixture
def df():
"""DataFrame with columns 'L1', 'L2', and 'L3' """
return pd.DataFrame({'L1': [1, 2, 3],
'L2': [11, 12, 13],
'L3': ['A', 'B', 'C']})
@pytest.fixture(params=[[], ['L1'], ['L1', 'L2'], ['L1', 'L2', 'L3']])
def df_levels(request, df):
"""DataFrame with columns or index levels 'L1', 'L2', and 'L3' """
levels = request.param
if levels:
df = df.set_index(levels)
return df
@pytest.fixture
def df_ambig(df):
"""DataFrame with levels 'L1' and 'L2' and labels 'L1' and 'L3' """
df = df.set_index(['L1', 'L2'])
df['L1'] = df['L3']
return df
@pytest.fixture
def df_duplabels(df):
"""DataFrame with level 'L1' and labels 'L2', 'L3', and 'L2' """
df = df.set_index(['L1'])
df = pd.concat([df, df['L2']], axis=1)
return df
@pytest.fixture
def panel():
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
return pd.Panel()
# Test is label/level reference
# =============================
def get_labels_levels(df_levels):
expected_labels = list(df_levels.columns)
expected_levels = [name for name in df_levels.index.names
if name is not None]
return expected_labels, expected_levels
def assert_label_reference(frame, labels, axis):
for label in labels:
assert frame._is_label_reference(label, axis=axis)
assert not frame._is_level_reference(label, axis=axis)
assert frame._is_label_or_level_reference(label, axis=axis)
def assert_level_reference(frame, levels, axis):
for level in levels:
assert frame._is_level_reference(level, axis=axis)
assert not frame._is_label_reference(level, axis=axis)
assert frame._is_label_or_level_reference(level, axis=axis)
# DataFrame
# ---------
def test_is_level_or_label_reference_df_simple(df_levels, axis):
# Compute expected labels and levels
expected_labels, expected_levels = get_labels_levels(df_levels)
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_levels = df_levels.T
# Perform checks
assert_level_reference(df_levels, expected_levels, axis=axis)
assert_label_reference(df_levels, expected_labels, axis=axis)
def test_is_level_reference_df_ambig(df_ambig, axis):
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_ambig = df_ambig.T
# df has both an on-axis level and off-axis label named L1
# Therefore L1 should reference the label, not the level
assert_label_reference(df_ambig, ['L1'], axis=axis)
# df has an on-axis level named L2 and it is not ambiguous
# Therefore L2 is an level reference
assert_level_reference(df_ambig, ['L2'], axis=axis)
# df has a column named L3 and it not an level reference
assert_label_reference(df_ambig, ['L3'], axis=axis)
# Series
# ------
def test_is_level_reference_series_simple_axis0(df):
# Make series with L1 as index
s = df.set_index('L1').L2
assert_level_reference(s, ['L1'], axis=0)
assert not s._is_level_reference('L2')
# Make series with L1 and L2 as index
s = df.set_index(['L1', 'L2']).L3
assert_level_reference(s, ['L1', 'L2'], axis=0)
assert not s._is_level_reference('L3')
def test_is_level_reference_series_axis1_error(df):
# Make series with L1 as index
s = df.set_index('L1').L2
with pytest.raises(ValueError, match="No axis named 1"):
s._is_level_reference('L1', axis=1)
# Panel
# -----
def test_is_level_reference_panel_error(panel):
msg = ("_is_level_reference is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._is_level_reference('L1', axis=0)
def test_is_label_reference_panel_error(panel):
msg = ("_is_label_reference is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._is_label_reference('L1', axis=0)
def test_is_label_or_level_reference_panel_error(panel):
msg = ("_is_label_or_level_reference is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._is_label_or_level_reference('L1', axis=0)
# Test _check_label_or_level_ambiguity_df
# =======================================
# DataFrame
# ---------
def test_check_label_or_level_ambiguity_df(df_ambig, axis):
# Transpose frame if axis == 1
if axis in {1, "columns"}:
df_ambig = df_ambig.T
if axis in {0, "index"}:
msg = "'L1' is both an index level and a column label"
else:
msg = "'L1' is both a column level and an index label"
# df_ambig has both an on-axis level and off-axis label named L1
# Therefore, L1 is ambiguous.
with pytest.raises(ValueError, match=msg):
df_ambig._check_label_or_level_ambiguity("L1", axis=axis)
# df_ambig has an on-axis level named L2,, and it is not ambiguous.
df_ambig._check_label_or_level_ambiguity("L2", axis=axis)
# df_ambig has an off-axis label named L3, and it is not ambiguous
assert not df_ambig._check_label_or_level_ambiguity("L3", axis=axis)
# Series
# ------
def test_check_label_or_level_ambiguity_series(df):
# A series has no columns and therefore references are never ambiguous
# Make series with L1 as index
s = df.set_index("L1").L2
s._check_label_or_level_ambiguity("L1", axis=0)
s._check_label_or_level_ambiguity("L2", axis=0)
# Make series with L1 and L2 as index
s = df.set_index(["L1", "L2"]).L3
s._check_label_or_level_ambiguity("L1", axis=0)
s._check_label_or_level_ambiguity("L2", axis=0)
s._check_label_or_level_ambiguity("L3", axis=0)
def test_check_label_or_level_ambiguity_series_axis1_error(df):
# Make series with L1 as index
s = df.set_index('L1').L2
with pytest.raises(ValueError, match="No axis named 1"):
s._check_label_or_level_ambiguity('L1', axis=1)
# Panel
# -----
def test_check_label_or_level_ambiguity_panel_error(panel):
msg = ("_check_label_or_level_ambiguity is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._check_label_or_level_ambiguity("L1", axis=0)
# Test _get_label_or_level_values
# ===============================
def assert_label_values(frame, labels, axis):
for label in labels:
if axis in {0, 'index'}:
expected = frame[label]._values
else:
expected = frame.loc[label]._values
result = frame._get_label_or_level_values(label, axis=axis)
assert array_equivalent(expected, result)
def assert_level_values(frame, levels, axis):
for level in levels:
if axis in {0, "index"}:
expected = frame.index.get_level_values(level=level)._values
else:
expected = frame.columns.get_level_values(level=level)._values
result = frame._get_label_or_level_values(level, axis=axis)
assert array_equivalent(expected, result)
# DataFrame
# ---------
def test_get_label_or_level_values_df_simple(df_levels, axis):
# Compute expected labels and levels
expected_labels, expected_levels = get_labels_levels(df_levels)
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_levels = df_levels.T
# Perform checks
assert_label_values(df_levels, expected_labels, axis=axis)
assert_level_values(df_levels, expected_levels, axis=axis)
def test_get_label_or_level_values_df_ambig(df_ambig, axis):
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_ambig = df_ambig.T
# df has an on-axis level named L2, and it is not ambiguous.
assert_level_values(df_ambig, ['L2'], axis=axis)
# df has an off-axis label named L3, and it is not ambiguous.
assert_label_values(df_ambig, ['L3'], axis=axis)
def test_get_label_or_level_values_df_duplabels(df_duplabels, axis):
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_duplabels = df_duplabels.T
# df has unambiguous level 'L1'
assert_level_values(df_duplabels, ['L1'], axis=axis)
# df has unique label 'L3'
assert_label_values(df_duplabels, ['L3'], axis=axis)
# df has duplicate labels 'L2'
if axis in {0, 'index'}:
expected_msg = "The column label 'L2' is not unique"
else:
expected_msg = "The index label 'L2' is not unique"
with pytest.raises(ValueError, match=expected_msg):
assert_label_values(df_duplabels, ['L2'], axis=axis)
# Series
# ------
def test_get_label_or_level_values_series_axis0(df):
# Make series with L1 as index
s = df.set_index('L1').L2
assert_level_values(s, ['L1'], axis=0)
# Make series with L1 and L2 as index
s = df.set_index(['L1', 'L2']).L3
assert_level_values(s, ['L1', 'L2'], axis=0)
def test_get_label_or_level_values_series_axis1_error(df):
# Make series with L1 as index
s = df.set_index('L1').L2
with pytest.raises(ValueError, match="No axis named 1"):
s._get_label_or_level_values('L1', axis=1)
# Panel
# -----
def test_get_label_or_level_values_panel_error(panel):
msg = ("_get_label_or_level_values is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._get_label_or_level_values('L1', axis=0)
# Test _drop_labels_or_levels
# ===========================
def assert_labels_dropped(frame, labels, axis):
for label in labels:
df_dropped = frame._drop_labels_or_levels(label, axis=axis)
if axis in {0, 'index'}:
assert label in frame.columns
assert label not in df_dropped.columns
else:
assert label in frame.index
assert label not in df_dropped.index
def assert_levels_dropped(frame, levels, axis):
for level in levels:
df_dropped = frame._drop_labels_or_levels(level, axis=axis)
if axis in {0, 'index'}:
assert level in frame.index.names
assert level not in df_dropped.index.names
else:
assert level in frame.columns.names
assert level not in df_dropped.columns.names
# DataFrame
# ---------
def test_drop_labels_or_levels_df(df_levels, axis):
# Compute expected labels and levels
expected_labels, expected_levels = get_labels_levels(df_levels)
# Transpose frame if axis == 1
if axis in {1, 'columns'}:
df_levels = df_levels.T
# Perform checks
assert_labels_dropped(df_levels, expected_labels, axis=axis)
assert_levels_dropped(df_levels, expected_levels, axis=axis)
with pytest.raises(ValueError, match="not valid labels or levels"):
df_levels._drop_labels_or_levels('L4', axis=axis)
# Series
# ------
def test_drop_labels_or_levels_series(df):
# Make series with L1 as index
s = df.set_index('L1').L2
assert_levels_dropped(s, ['L1'], axis=0)
with pytest.raises(ValueError, match="not valid labels or levels"):
s._drop_labels_or_levels('L4', axis=0)
# Make series with L1 and L2 as index
s = df.set_index(['L1', 'L2']).L3
assert_levels_dropped(s, ['L1', 'L2'], axis=0)
with pytest.raises(ValueError, match="not valid labels or levels"):
s._drop_labels_or_levels('L4', axis=0)
# Panel
# -----
def test_drop_labels_or_levels_panel_error(panel):
msg = ("_drop_labels_or_levels is not implemented for {type}"
.format(type=type(panel)))
with pytest.raises(NotImplementedError, match=msg):
panel._drop_labels_or_levels('L1', axis=0)
| bsd-3-clause | -540,507,303,124,389,300 | 28.399015 | 74 | 0.629021 | false |
pradeep-aradhya/security_monkey | security_monkey/alerter.py | 9 | 3671 | # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.alerter
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Patrick Kelley <[email protected]> @monkeysecurity
"""
from security_monkey import app
from security_monkey.common.jinja import get_jinja_env
from security_monkey.datastore import User
from security_monkey.common.utils.utils import send_email
def get_subject(has_issues, has_new_issue, has_unjustified_issue, account, watcher_str):
if has_new_issue:
return "NEW ISSUE - [{}] Changes in {}".format(account, watcher_str)
elif has_issues and has_unjustified_issue:
return "[{}] Changes w/existing issues in {}".format(account, watcher_str)
elif has_issues and not has_unjustified_issue:
return "[{}] Changes w/justified issues in {}".format(account, watcher_str)
else:
return "[{}] Changes in {}".format(account, watcher_str)
def report_content(content):
jenv = get_jinja_env()
template = jenv.get_template('jinja_change_email.html')
body = template.render(content)
# app.logger.info(body)
return body
class Alerter(object):
def __init__(self, watchers_auditors=[], account=None, debug=False):
"""
envs are list of environments where we care about changes
"""
self.account = account
self.notifications = ""
self.new = []
self.delete = []
self.changed = []
self.watchers_auditors = watchers_auditors
users = User.query.filter(User.accounts.any(name=account)).filter(User.change_reports=='ALL').all()
self.emails = [user.email for user in users]
team_emails = app.config.get('SECURITY_TEAM_EMAIL')
self.emails.extend(team_emails)
def report(self):
"""
Collect change summaries from watchers defined and send out an email
"""
changed_watchers = [watcher_auditor[0] for watcher_auditor in self.watchers_auditors if watcher_auditor[0].is_changed()]
has_issues = has_new_issue = has_unjustified_issue = False
for watcher in changed_watchers:
(has_issues, has_new_issue, has_unjustified_issue) = watcher.issues_found()
if has_issues:
users = User.query.filter(User.accounts.any(name=self.account)).filter(User.change_reports=='ISSUES').all()
new_emails = [user.email for user in users]
self.emails.extend(new_emails)
break
watcher_types = [watcher.index for watcher in changed_watchers]
watcher_str = ', '.join(watcher_types)
if len(changed_watchers) == 0:
app.logger.info("Alerter: no changes found")
return
app.logger.info("Alerter: Found some changes in {}: {}".format(self.account, watcher_str))
content = {u'watchers': changed_watchers}
body = report_content(content)
subject = get_subject(has_issues, has_new_issue, has_unjustified_issue, self.account, watcher_str)
return send_email(subject=subject, recipients=self.emails, html=body)
| apache-2.0 | -8,288,664,222,119,310,000 | 39.788889 | 128 | 0.658404 | false |
KendyllD/boukenda-project | makahiki/apps/widgets/resource_scoreboard/views.py | 5 | 1858 | """Handle the rendering of the energy scoreboard widget."""
import datetime
from apps.managers.challenge_mgr import challenge_mgr
from apps.managers.resource_mgr import resource_mgr
from apps.widgets.resource_goal import resource_goal
def supply(request, page_name):
"""Supply the view_objects content."""
_ = request
_ = page_name
return {}
def resource_supply(request, resource, page_name):
"""Supply the view_objects content.
:return: team, goals_scoreboard, resource_round_ranks"""
user = request.user
team = user.get_profile().team
round_resource_ranks = {}
round_group_resource_ranks = {}
round_resource_goal_ranks = {}
today = datetime.datetime.today()
rounds = challenge_mgr.get_all_round_info()["rounds"]
for key in rounds.keys():
if rounds[key]["start"] <= today and\
(rounds[key]["display_scoreboard"] or page_name == "status"):
round_resource_ranks[key] = resource_mgr.resource_ranks(resource, key)
round_group_resource_ranks[key] = resource_mgr.group_resource_ranks(resource, key)
round_resource_goal_ranks[key] = resource_goal.resource_goal_ranks(resource, key)
round_resource_ranks["Overall"] = resource_mgr.resource_ranks(resource, "Overall")
round_group_resource_ranks["Overall"] = resource_mgr.group_resource_ranks(resource, "Overall")
round_resource_goal_ranks["Overall"] = resource_goal.resource_goal_ranks(resource, "Overall")
resource_setting = resource_mgr.get_resource_setting(resource)
return {
"profile": user.get_profile(),
"team": team,
"resource": resource_setting,
"round_resource_goal_ranks": round_resource_goal_ranks,
"round_resource_ranks": round_resource_ranks,
"round_group_resource_ranks": round_group_resource_ranks,
}
| mit | -7,291,200,034,319,812,000 | 37.708333 | 98 | 0.67761 | false |
tholoien/empiriciSN | empiriciSN/tests/test_empiriciSN.py | 1 | 2967 | """
Test code for empiriciSN class.
"""
import unittest
import numpy as np
import empiriciSN
import urllib
import os
class EmpiricistTestCase(unittest.TestCase):
"TestCase class for Empiricist class."
def setUp(self):
"""
Set up each test with a new empiriciSN object with existing model.
"""
url = 'https://raw.githubusercontent.com/tholoien/empiriciSN/' \
+ 'master/models/empiriciSN_model.fit'
path = './empiriciSN_model.fit'
urllib.urlretrieve(url, path)
self.empiricist = empiriciSN.Empiricist(model_file = 'empiriciSN_model.fit')
self.files = []
def tearDown(self):
"""
Clean up files saved by tests
"""
os.remove('empiriciSN_model.fit')
if os.path.isfile('snls_master.csv'):
os.remove('snls_master.csv')
if os.path.isfile('empiriciSN_model.fit'):
os.remove('empiriciSN_model.fit')
def test_get_SN(self):
url = 'https://raw.githubusercontent.com/tholoien/empiriciSN/' \
+'master/models/empiriciSN_model.fit'
path = './empiriciSN_model.fit'
urllib.urlretrieve(url, path)
self.empiricist.read_model('empiriciSN_model.fit')
sample = self.empiricist.XDGMM.sample()[0]
testdat = np.append(np.array([np.nan,np.nan,np.nan]),sample[3:])
sn = self.empiricist.get_SN(testdat)
self.assertEqual(sn.shape,(1,3))
def test_fit(self):
url = 'https://raw.githubusercontent.com/tholoien/empiriciSN/' \
+'master/Notebooks/data_files/snls_master.csv'
path = './snls_master.csv'
urllib.urlretrieve(url, path)
this_model_file = self.empiricist.model_file
self.empiricist.fit_from_files(['snls_master.csv'],n_components=1)
self.assertEqual(self.empiricist.model_file,this_model_file)
def test_get_logR(self):
url = 'https://raw.githubusercontent.com/tholoien/empiriciSN/' \
+'master/models/empiriciSN_model.fit'
path = './empiriciSN_model.fit'
urllib.urlretrieve(url, path)
self.empiricist.read_model('empiriciSN_model.fit')
sample = self.empiricist.XDGMM.sample()[0]
indeces = np.array([3,5,6,7,8,9,10,11,12,13,14])
X = sample[indeces]
logR = self.empiricist.get_logR(indeces,4,X)
self.assertNotEqual(logR,0.0)
def test_get_local_SB(self):
sb_params = np.array([1,21.402,0.22,0.8593072,0.7847312,20.426,
0.046,0.9257621,0.1127986,19.895,0.03,
0.6392374,0.08155595,19.619,0.034,
0.7765083,0.08438506,19.311,0.087,
1.039772,0.3918832])
logR = np.log10(1.76/0.6392374)
SB,SBerr = self.empiricist.get_local_SB(sb_params,logR)
self.assertNotEqual(0,len(SB))
if __name__ == '__main__':
unittest.main()
| mit | -7,696,211,258,160,001,000 | 34.321429 | 84 | 0.597236 | false |
Dave667/service | plugin.video.youtube/YouTubePlayer.py | 2 | 22145 | '''
YouTube plugin for XBMC
Copyright (C) 2010-2012 Tobias Ussing And Henrik Mosgaard Jensen
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys
import urllib
import cgi
try: import simplejson as json
except ImportError: import json
import urllib2, re
class YouTubePlayer():
fmt_value = {
5: "240p h263 flv container",
18: "360p h264 mp4 container | 270 for rtmpe?",
22: "720p h264 mp4 container",
26: "???",
33: "???",
34: "360p h264 flv container",
35: "480p h264 flv container",
37: "1080p h264 mp4 container",
38: "720p vp8 webm container",
43: "360p h264 flv container",
44: "480p vp8 webm container",
45: "720p vp8 webm container",
46: "520p vp8 webm stereo",
59: "480 for rtmpe",
78: "seems to be around 400 for rtmpe",
82: "360p h264 stereo",
83: "240p h264 stereo",
84: "720p h264 stereo",
85: "520p h264 stereo",
100: "360p vp8 webm stereo",
101: "480p vp8 webm stereo",
102: "720p vp8 webm stereo",
120: "hd720",
121: "hd1080"
}
# MAX RECURSION Depth for security
MAX_REC_DEPTH = 5
# YouTube Playback Feeds
urls = {}
urls['video_stream'] = "http://www.youtube.com/watch?v=%s&safeSearch=none"
urls['embed_stream'] = "http://www.youtube.com/get_video_info?video_id=%s"
urls['video_info'] = "http://gdata.youtube.com/feeds/api/videos/%s"
def __init__(self):
self.xbmcgui = sys.modules["__main__"].xbmcgui
self.xbmcplugin = sys.modules["__main__"].xbmcplugin
self.pluginsettings = sys.modules["__main__"].pluginsettings
self.storage = sys.modules["__main__"].storage
self.settings = sys.modules["__main__"].settings
self.language = sys.modules["__main__"].language
self.dbg = sys.modules["__main__"].dbg
self.common = sys.modules["__main__"].common
self.utils = sys.modules["__main__"].utils
self.cache = sys.modules["__main__"].cache
self.core = sys.modules["__main__"].core
self.login = sys.modules["__main__"].login
self.subtitles = sys.modules["__main__"].subtitles
self.algoCache = {}
self._cleanTmpVariables()
def playVideo(self, params={}):
self.common.log(repr(params), 3)
get = params.get
(video, status) = self.buildVideoObject(params)
if status != 200:
self.common.log(u"construct video url failed contents of video item " + repr(video))
self.utils.showErrorMessage(self.language(30603), video["apierror"], status)
return False
listitem = self.xbmcgui.ListItem(label=video['Title'], iconImage=video['thumbnail'], thumbnailImage=video['thumbnail'], path=video['video_url'])
listitem.setInfo(type='Video', infoLabels=video)
self.common.log(u"Playing video: " + repr(video['Title']) + " - " + repr(get('videoid')) + " - " + repr(video['video_url']))
self.xbmcplugin.setResolvedUrl(handle=int(sys.argv[1]), succeeded=True, listitem=listitem)
if self.settings.getSetting("lang_code") != "0" or self.settings.getSetting("annotations") == "true":
self.common.log("BLAAAAAAAAAAAAAAAAAAAAAA: " + repr(self.settings.getSetting("lang_code")))
self.subtitles.addSubtitles(video)
if (get("watch_later") == "true" and get("playlist_entry_id")):
self.common.log(u"removing video from watch later playlist")
self.core.remove_from_watch_later(params)
self.storage.storeValue("vidstatus-" + video['videoid'], "7")
def getInfo(self, params):
get = params.get
video = self.cache.get("videoidcache" + get("videoid"))
if len(video) > 0:
self.common.log(u"returning cache ")
return (eval(video), 200)
result = self.core._fetchPage({"link": self.urls["video_info"] % get("videoid"), "api": "true"})
if result["status"] == 200:
video = self.core.getVideoInfo(result["content"], params)
if len(video) == 0:
self.common.log(u"- Couldn't parse API output, YouTube doesn't seem to know this video id?")
video = {}
video["apierror"] = self.language(30608)
return (video, 303)
else:
self.common.log(u"- Got API Error from YouTube!")
video = {}
video["apierror"] = result["content"]
return (video, 303)
video = video[0]
self.cache.set("videoidcache" + get("videoid"), repr(video))
return (video, result["status"])
def selectVideoQuality(self, params, links):
get = params.get
print "links: " + repr(type(links).__name__)
link = links.get
video_url = ""
self.common.log(u"")
if get("action") == "download":
hd_quality = int(self.settings.getSetting("hd_videos_download"))
if (hd_quality == 0):
hd_quality = int(self.settings.getSetting("hd_videos"))
else:
if (not get("quality")):
hd_quality = int(self.settings.getSetting("hd_videos"))
else:
if (get("quality") == "1080p"):
hd_quality = 3
elif (get("quality") == "720p"):
hd_quality = 2
else:
hd_quality = 1
# SD videos are default, but we go for the highest res
if (link(35)):
video_url = link(35)
elif (link(59)):
video_url = link(59)
elif link(44):
video_url = link(44)
elif (link(78)):
video_url = link(78)
elif (link(34)):
video_url = link(34)
elif (link(43)):
video_url = link(43)
elif (link(26)):
video_url = link(26)
elif (link(18)):
video_url = link(18)
elif (link(33)):
video_url = link(33)
elif (link(5)):
video_url = link(5)
if hd_quality > 1: # <-- 720p
if (link(22)):
video_url = link(22)
elif (link(45)):
video_url = link(45)
elif link(120):
video_url = link(120)
if hd_quality > 2:
if (link(37)):
video_url = link(37)
elif link(121):
video_url = link(121)
if link(38) and False:
video_url = link(38)
for fmt_key in links.iterkeys():
if link(int(fmt_key)):
if self.dbg:
text = repr(fmt_key) + " - "
if fmt_key in self.fmt_value:
text += self.fmt_value[fmt_key]
else:
text += "Unknown"
if (link(int(fmt_key)) == video_url):
text += "*"
self.common.log(text)
else:
self.common.log(u"- Missing fmt_value: " + repr(fmt_key))
if hd_quality == 0 and not get("quality"):
return self.userSelectsVideoQuality(params, links)
if not len(video_url) > 0:
self.common.log(u"- construct_video_url failed, video_url not set")
return video_url
if get("action") != "download" and video_url.find("rtmp") == -1:
video_url += '|' + urllib.urlencode({'User-Agent':self.common.USERAGENT})
self.common.log(u"Done")
return video_url
def userSelectsVideoQuality(self, params, links):
levels = [([37,121], u"1080p"),
([22,45,120], u"720p"),
([35,44], u"480p"),
([18], u"380p"),
([34,43],u"360p"),
([5],u"240p"),
([17],u"144p")]
link = links.get
quality_list = []
choices = []
for qualities, name in levels:
for quality in qualities:
if link(quality):
quality_list.append((quality, name))
break
for (quality, name) in quality_list:
choices.append(name)
dialog = self.xbmcgui.Dialog()
selected = dialog.select(self.language(30518), choices)
if selected > -1:
(quality, name) = quality_list[selected]
return link(quality)
return u""
def checkForErrors(self, video):
status = 200
if "video_url" not in video or video[u"video_url"] == u"":
status = 303
if u"apierror" not in video:
vget = video.get
if vget(u"live_play"):
video[u'apierror'] = self.language(30612)
elif vget(u"stream_map"):
video[u'apierror'] = self.language(30620)
else:
video[u'apierror'] = self.language(30618)
return (video, status)
def buildVideoObject(self, params):
self.common.log(repr(params))
(video, status) = self.getInfo(params)
if status != 200:
video[u'apierror'] = self.language(30618)
return (video, 303)
video_url = self.subtitles.getLocalFileSource(params, video)
if video_url:
video[u'video_url'] = video_url
return (video, 200)
(links, video) = self.extractVideoLinksFromYoutube(video, params)
if len(links) != 0:
video[u"video_url"] = self.selectVideoQuality(params, links)
elif "hlsvp" in video:
#hls selects the quality based on available bitrate (adaptive quality), no need to select it here
video[u"video_url"] = video[u"hlsvp"]
self.common.log("Using hlsvp url %s" % video[u"video_url"])
(video, status) = self.checkForErrors(video)
self.common.log(u"Done")
return (video, status)
def removeAdditionalEndingDelimiter(self, data):
pos = data.find("};")
if pos != -1:
self.common.log(u"found extra delimiter, removing")
data = data[:pos + 1]
return data
def normalizeUrl(self, url):
if url[0:2] == "//":
url = "http:" + url
return url
def extractFlashVars(self, data, assets):
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = self.removeAdditionalEndingDelimiter(data)
if found:
data = json.loads(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = self.normalizeUrl(flashvars[k])
self.common.log("Step2: " + repr(data))
self.common.log(u"flashvars: " + repr(flashvars), 2)
return flashvars
def scrapeWebPageForVideoLinks(self, result, video):
self.common.log(u"")
links = {}
flashvars = self.extractFlashVars(result[u"content"], 0)
if not flashvars.has_key(u"url_encoded_fmt_stream_map"):
return links
if flashvars.has_key(u"ttsurl"):
video[u"ttsurl"] = flashvars[u"ttsurl"]
if flashvars.has_key(u"ttsurl"):
video[u"ttsurl"] = flashvars[u"ttsurl"]
for url_desc in flashvars[u"url_encoded_fmt_stream_map"].split(u","):
url_desc_map = cgi.parse_qs(url_desc)
self.common.log(u"url_map: " + repr(url_desc_map), 2)
if not (url_desc_map.has_key(u"url") or url_desc_map.has_key(u"stream")):
continue
key = int(url_desc_map[u"itag"][0])
url = u""
if url_desc_map.has_key(u"url"):
url = urllib.unquote(url_desc_map[u"url"][0])
elif url_desc_map.has_key(u"conn") and url_desc_map.has_key(u"stream"):
url = urllib.unquote(url_desc_map[u"conn"][0])
if url.rfind("/") < len(url) -1:
url = url + "/"
url = url + urllib.unquote(url_desc_map[u"stream"][0])
elif url_desc_map.has_key(u"stream") and not url_desc_map.has_key(u"conn"):
url = urllib.unquote(url_desc_map[u"stream"][0])
if url_desc_map.has_key(u"sig"):
url = url + u"&signature=" + url_desc_map[u"sig"][0]
elif url_desc_map.has_key(u"s"):
sig = url_desc_map[u"s"][0]
flashvars = self.extractFlashVars(result[u"content"], 1)
js = flashvars[u"js"]
url = url + u"&signature=" + self.decrypt_signature(sig, js)
links[key] = url
return links
@staticmethod
def printDBG(s):
print(s)
def _cleanTmpVariables(self):
self.fullAlgoCode = ''
self.allLocalFunNamesTab = []
self.playerData = ''
def _jsToPy(self, jsFunBody):
pythonFunBody = re.sub(r'function (\w*)\$(\w*)', r'function \1_S_\2', jsFunBody)
pythonFunBody = pythonFunBody.replace('function', 'def').replace('{', ':\n\t').replace('}', '').replace(';', '\n\t').replace('var ', '')
pythonFunBody = pythonFunBody.replace('.reverse()', '[::-1]')
lines = pythonFunBody.split('\n')
for i in range(len(lines)):
# a.split("") -> list(a)
match = re.search('(\w+?)\.split\(""\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'list(' + match.group(1) + ')')
# a.length -> len(a)
match = re.search('(\w+?)\.length', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), 'len(' + match.group(1) + ')')
# a.slice(3) -> a[3:]
match = re.search('(\w+?)\.slice\(([0-9]+?)\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(1) + ('[%s:]' % match.group(2)) )
# a.join("") -> "".join(a)
match = re.search('(\w+?)\.join\(("[^"]*?")\)', lines[i])
if match:
lines[i] = lines[i].replace( match.group(0), match.group(2) + '.join(' + match.group(1) + ')' )
return "\n".join(lines)
def _getLocalFunBody(self, funName):
# get function body
funName=funName.replace('$', '\\$')
match = re.search('(function %s\([^)]+?\){[^}]+?})' % funName, self.playerData)
if match:
# return jsFunBody
return match.group(1)
return ''
def _getAllLocalSubFunNames(self, mainFunBody):
match = re.compile('[ =(,]([\w\$_]+)\([^)]*\)').findall( mainFunBody )
if len(match):
# first item is name of main function, so omit it
funNameTab = set( match[1:] )
return funNameTab
return set()
def decrypt_signature(self, s, playerUrl):
self.printDBG("decrypt_signature sign_len[%d] playerUrl[%s]" % (len(s), playerUrl) )
# clear local data
self._cleanTmpVariables()
# use algoCache
if playerUrl not in self.algoCache:
# get player HTML 5 sript
request = urllib2.Request(playerUrl)
try:
self.playerData = urllib2.urlopen(request).read()
self.playerData = self.playerData.decode('utf-8', 'ignore')
except Exception as ex:
self.printDBG("Error: " + str(sys.exc_info()[0]) + " - " + str(ex))
self.printDBG('Unable to download playerUrl webpage')
return ''
# get main function name
match = re.search("signature=(\w+?)\([^)]\)", self.playerData)
if match:
mainFunName = match.group(1)
self.printDBG('Main signature function name = "%s"' % mainFunName)
else:
self.printDBG('Can not get main signature function name')
return ''
self._getfullAlgoCode( mainFunName )
# wrap all local algo function into one function extractedSignatureAlgo()
algoLines = self.fullAlgoCode.split('\n')
for i in range(len(algoLines)):
algoLines[i] = '\t' + algoLines[i]
self.fullAlgoCode = 'def extractedSignatureAlgo(param):'
self.fullAlgoCode += '\n'.join(algoLines)
self.fullAlgoCode += '\n\treturn %s(param)' % mainFunName
self.fullAlgoCode += '\noutSignature = extractedSignatureAlgo( inSignature )\n'
# after this function we should have all needed code in self.fullAlgoCode
self.printDBG( "---------------------------------------" )
self.printDBG( "| ALGO FOR SIGNATURE DECRYPTION |" )
self.printDBG( "---------------------------------------" )
self.printDBG( self.fullAlgoCode )
self.printDBG( "---------------------------------------" )
try:
algoCodeObj = compile(self.fullAlgoCode, '', 'exec')
except:
self.printDBG('decryptSignature compile algo code EXCEPTION')
return ''
else:
# get algoCodeObj from algoCache
self.printDBG('Algo taken from cache')
algoCodeObj = self.algoCache[playerUrl]
# for security alow only flew python global function in algo code
vGlobals = {"__builtins__": None, 'len': len, 'list': list}
# local variable to pass encrypted sign and get decrypted sign
vLocals = { 'inSignature': s, 'outSignature': '' }
# execute prepared code
try:
exec( algoCodeObj, vGlobals, vLocals )
except:
self.printDBG('decryptSignature exec code EXCEPTION')
return ''
self.printDBG('Decrypted signature = [%s]' % vLocals['outSignature'])
# if algo seems ok and not in cache, add it to cache
if playerUrl not in self.algoCache and '' != vLocals['outSignature']:
self.printDBG('Algo from player [%s] added to cache' % playerUrl)
self.algoCache[playerUrl] = algoCodeObj
# free not needed data
self._cleanTmpVariables()
return vLocals['outSignature']
# Note, this method is using a recursion
def _getfullAlgoCode( self, mainFunName, recDepth = 0 ):
if self.MAX_REC_DEPTH <= recDepth:
self.printDBG('_getfullAlgoCode: Maximum recursion depth exceeded')
return
funBody = self._getLocalFunBody( mainFunName )
if '' != funBody:
funNames = self._getAllLocalSubFunNames(funBody)
if len(funNames):
for funName in funNames:
funName_=funName.replace('$','_S_')
if funName not in self.allLocalFunNamesTab:
funBody=funBody.replace(funName,funName_)
self.allLocalFunNamesTab.append(funName)
self.printDBG("Add local function %s to known functions" % mainFunName)
self._getfullAlgoCode( funName, recDepth + 1 )
# conver code from javascript to python
funBody = self._jsToPy(funBody)
self.fullAlgoCode += '\n' + funBody + '\n'
return
def getVideoPageFromYoutube(self, get):
login = "false"
if self.pluginsettings.userHasProvidedValidCredentials():
login = "true"
page = self.core._fetchPage({u"link": self.urls[u"video_stream"] % get(u"videoid"), "login": login})
self.common.log("Step1: " + repr(page["content"].find("ytplayer")))
if not page:
page = {u"status":303}
return page
def isVideoAgeRestricted(self, result):
error = self.common.parseDOM(result['content'], "div", attrs={"id": "watch7-player-age-gate-content"})
self.common.log(repr(error))
return len(error) > 0
def extractVideoLinksFromYoutube(self, video, params):
self.common.log(u"trying website: " + repr(params))
get = params.get
result = self.getVideoPageFromYoutube(get)
if self.isVideoAgeRestricted(result):
self.common.log(u"Age restricted video")
if self.pluginsettings.userHasProvidedValidCredentials():
self.login._httpLogin({"new":"true"})
result = self.getVideoPageFromYoutube(get)
else:
video[u"apierror"] = self.language(30622)
if result[u"status"] != 200:
self.common.log(u"Couldn't get video page from YouTube")
return ({}, video)
links = self.scrapeWebPageForVideoLinks(result, video)
if len(links) == 0 and not( "hlsvp" in video ):
self.common.log(u"Couldn't find video url- or stream-map.")
if not u"apierror" in video:
video[u'apierror'] = self.core._findErrors(result)
self.common.log(u"Done")
return (links, video)
| gpl-2.0 | -2,177,420,282,263,230,700 | 36.281145 | 152 | 0.53529 | false |
arthurprs/sucredb | scripts/sanity.py | 1 | 4060 | import subprocess
import time
import sys
import random
from itertools import chain
from redis import StrictRedis
from rediscluster import StrictRedisCluster
from funcy import retry
from collections import defaultdict
import shutil
VERBOSE = False
class Instance(object):
BIND = "127.0.0.1"
PORT = 6379
FPORT = 16379
def __init__(self, i, ii):
super(Instance, self).__init__()
self.i = i
self.ii = ii
self.process = None
self.listen_addr = "{}:{}".format(self.BIND, self.PORT + self.i)
self.fabric_addr = "{}:{}".format(self.BIND, self.FPORT + self.i)
self.data_dir = "n{}".format(self.i)
@property
def client(self):
return StrictRedis(self.BIND, self.PORT + self.i)
def clear_data(self):
shutil.rmtree(self.data_dir, ignore_errors=True)
def cluster_init(self):
self.clear_data()
self.start("init")
def cluster_join(self):
self.clear_data()
self.start()
def wait_ready(self, callback=lambda c: c.ping(),
timeout=5, sleep=0.1):
@retry(int(timeout / float(sleep) + 0.5), timeout=sleep)
def inner():
assert callback(self.client)
inner()
def start(self, *args):
assert not self.process
self.process = subprocess.Popen(
["cargo", "run", "--",
"-l", self.listen_addr,
"-f", self.fabric_addr,
"-d", self.data_dir]
+ list(chain.from_iterable(
["-s", "{}:{}".format(self.BIND, self.FPORT + i)]
for i in range(self.ii)
if i != self.i
))
+ list(args),
stdin=sys.stdin if VERBOSE else None,
stdout=sys.stdout if VERBOSE else None,
stderr=sys.stderr if VERBOSE else None,
)
self.wait_ready()
def __del__(self):
if self.process:
self.process.kill()
def kill(self):
assert self.process
self.process.kill()
self.process.wait()
self.process = None
def restart(self):
self.kill()
self.start()
@property
def running(self):
return bool(self.process)
def execute(self, *args, **kwargs):
self.client.execute_command(*args, **kwargs)
def main():
global VERBOSE
VERBOSE = "verbose" in sys.argv[1:]
subprocess.check_call(["cargo", "build"])
cluster_sz = 3
cluster = [Instance(i, cluster_sz) for i in range(cluster_sz)]
cluster[0].cluster_init()
cluster[1].cluster_join()
cluster[2].cluster_join()
cluster[0].execute("CLUSTER", "REBALANCE")
time.sleep(5)
client = StrictRedisCluster(
startup_nodes=[
{"host": n.listen_addr.partition(":")[0],
"port": int(n.listen_addr.partition(":")[2])}
for n in cluster
],
decode_responses=False,
socket_timeout=0.5,
)
check_map = defaultdict(set)
items = 1000
groups = 100
for i in xrange(items):
k = str(i % groups)
v = str(i)
client.execute_command("SET", k, v, "", "Q")
check_map[k].add(v)
if random.random() < 0.1:
n = random.choice(cluster)
# restart and wait for it to connect to cluster
n.restart()
n.wait_ready(lambda c: c.execute_command("CLUSTER", "CONNECTIONS"))
# let the syncs settle
time.sleep(5)
@retry(2, timeout=5)
def test_all_nodes_complete():
for k, expected in check_map.items():
values = set(client.get(k)[:-1])
assert values == expected, "%s %s %s" % (k, expected, values)
for c in cluster:
values = set(c.client.execute_command("GET", k, "1")[:-1])
assert values == expected, \
"key %s expected %s got %s (diff %s)" % (
k, expected, values, expected ^ values)
test_all_nodes_complete()
if __name__ == '__main__':
main()
| mit | -6,592,725,881,096,650,000 | 26.808219 | 79 | 0.543103 | false |
aboood40091/Miyamoto | puzzle.py | 2 | 160869 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Puzzle NSMBU
# This is Puzzle 0.6, ported to Python 3 & PyQt5, and then ported to support the New Super Mario Bros. U tileset format.
# Puzzle 0.6 by Tempus; all improvements for Python 3, PyQt5 and NSMBU by RoadrunnerWMC and AboodXD
import json
import os
import os.path
import platform
import struct
import sys
from ctypes import create_string_buffer
from PyQt5 import QtCore, QtGui, QtWidgets
Qt = QtCore.Qt
import globals
from gtx import RAWtoGTX
import SarcLib
from tileset import HandleTilesetEdited, loadGTX, writeGTX
from tileset import updateCollisionOverlay
########################################################
# To Do:
#
# - Object Editor
# - Moving objects around
#
# - Make UI simpler for Pop
# - Animated Tiles
# - fix up conflicts with different types of parameters
# - C speed saving
# - quick settings for applying to mulitple slopes
#
########################################################
Tileset = None
#############################################################################################
########################## Tileset Class and Tile/Object Subclasses #########################
class TilesetClass:
'''Contains Tileset data. Inits itself to a blank tileset.
Methods: addTile, removeTile, addObject, removeObject, clear'''
class Tile:
def __init__(self, image, nml, collision):
'''Tile Constructor'''
self.image = image
self.normalmap = nml
self.setCollision(collision)
def setCollision(self, collision):
self.coreType = (collision >> 0) & 0xFFFF
self.params = (collision >> 16) & 0xFF
self.params2 = (collision >> 24) & 0xFF
self.solidity = (collision >> 32) & 0xFF
self.terrain = (collision >> 40) & 0xFF
def getCollision(self):
return ((self.coreType << 0) |
(self.params << 16) |
(self.params2 << 24) |
(self.solidity << 32) |
(self.terrain << 40))
class Object:
def __init__(self, height, width, randByte, uslope, lslope, tilelist):
'''Tile Constructor'''
self.randX = (randByte >> 4) & 1
self.randY = (randByte >> 5) & 1
self.randLen = randByte & 0xF
self.upperslope = uslope
self.lowerslope = lslope
assert (width, height) != 0
self.height = height
self.width = width
self.tiles = tilelist
self.determineRepetition()
if self.repeatX or self.repeatY:
assert self.height == len(self.tiles)
assert self.width == max(len(self.tiles[y]) for y in range(self.height))
if self.repeatX:
self.determineRepetitionFinalize()
else:
# Fix a bug from a previous version of Puzzle
# where the actual width and height would
# mismatch with the number of tiles for the object
self.fillMissingTiles()
self.tilingMethodIdx = self.determineTilingMethod()
def determineRepetition(self):
self.repeatX = []
self.repeatY = []
if self.upperslope[0] != 0:
return
#### Find X Repetition ####
# You can have different X repetitions between rows, so we have to account for that
for y in range(self.height):
repeatXBn = -1
repeatXEd = -1
for x in range(len(self.tiles[y])):
if self.tiles[y][x][0] & 1 and repeatXBn == -1:
repeatXBn = x
elif not self.tiles[y][x][0] & 1 and repeatXBn != -1:
repeatXEd = x
break
if repeatXBn != -1:
if repeatXEd == -1:
repeatXEd = len(self.tiles[y])
self.repeatX.append((y, repeatXBn, repeatXEd))
#### Find Y Repetition ####
repeatYBn = -1
repeatYEd = -1
for y in range(self.height):
if len(self.tiles[y]) and self.tiles[y][0][0] & 2:
if repeatYBn == -1:
repeatYBn = y
elif repeatYBn != -1:
repeatYEd = y
break
if repeatYBn != -1:
if repeatYEd == -1:
repeatYEd = self.height
self.repeatY = [repeatYBn, repeatYEd]
def determineRepetitionFinalize(self):
if self.repeatX:
# If any X repetition is present, fill in rows which didn't have X repetition set
## Should never happen, unless the tileset is broken
## Additionally, sort the list
repeatX = []
for y in range(self.height):
for row, start, end in self.repeatX:
if y == row:
repeatX.append([start, end])
break
else:
# Get the start and end X offsets for the row
start = 0
end = len(self.tiles[y])
repeatX.append([start, end])
self.repeatX = repeatX
def fillMissingTiles(self):
realH = len(self.tiles)
while realH > self.height:
del self.tiles[-1]
realH -= 1
for row in self.tiles:
realW = len(row)
while realW > self.width:
del row[-1]
realW -= 1
for row in self.tiles:
realW = len(row)
while realW < self.width:
row.append((0, 0, 0))
realW += 1
while realH < self.height:
self.tiles.append([(0, 0, 0) for _ in range(self.width)])
realH += 1
def createRepetitionX(self):
self.repeatX = []
for y in range(self.height):
for x in range(len(self.tiles[y])):
self.tiles[y][x] = (self.tiles[y][x][0] | 1, self.tiles[y][x][1], self.tiles[y][x][2])
self.repeatX.append([0, len(self.tiles[y])])
def createRepetitionY(self, y1, y2):
self.clearRepetitionY()
for y in range(y1, y2):
for x in range(len(self.tiles[y])):
self.tiles[y][x] = (self.tiles[y][x][0] | 2, self.tiles[y][x][1], self.tiles[y][x][2])
self.repeatY = [y1, y2]
def clearRepetitionX(self):
self.fillMissingTiles()
for y in range(self.height):
for x in range(self.width):
self.tiles[y][x] = (self.tiles[y][x][0] & ~1, self.tiles[y][x][1], self.tiles[y][x][2])
self.repeatX = []
def clearRepetitionY(self):
for y in range(self.height):
for x in range(len(self.tiles[y])):
self.tiles[y][x] = (self.tiles[y][x][0] & ~2, self.tiles[y][x][1], self.tiles[y][x][2])
self.repeatY = []
def clearRepetitionXY(self):
self.clearRepetitionX()
self.clearRepetitionY()
def determineTilingMethod(self):
if self.upperslope[0] == 0x93:
return 7
elif self.upperslope[0] == 0x92:
return 6
elif self.upperslope[0] == 0x91:
return 5
elif self.upperslope[0] == 0x90:
return 4
elif self.repeatX and self.repeatY:
return 3
elif self.repeatY:
return 2
elif self.repeatX:
return 1
return 0
def getRandByte(self):
"""
Builds the Randomization byte.
"""
if (self.width, self.height) != (1, 1): return 0
if self.randX + self.randY == 0: return 0
byte = 0
if self.randX: byte |= 16
if self.randY: byte |= 32
return byte | (self.randLen & 0xF)
def __init__(self):
'''Constructor'''
self.tiles = []
self.objects = []
self.overrides = [None] * 256
self.slot = 0
self.placeNullChecked = False
def addTile(self, image, nml, collision=0):
'''Adds an tile class to the tile list with the passed image or parameters'''
self.tiles.append(self.Tile(image, nml, collision))
def addObject(self, height = 1, width = 1, randByte = 0, uslope = [0, 0], lslope = [0, 0], tilelist = None, new = False):
'''Adds a new object'''
if new:
tilelist = [[(0, 0, self.slot)]]
self.objects.append(self.Object(height, width, randByte, uslope, lslope, tilelist))
def removeObject(self, index):
'''Removes an Object by Index number. Don't use this much, because we want objects to preserve their ID.'''
try:
self.objects.pop(index)
except IndexError:
pass
def clear(self):
'''Clears the tileset for a new file'''
self.tiles = []
self.objects = []
def processOverrides(self):
if self.slot != 0:
return
try:
t = self.overrides
o = globals.Overrides
# Invisible, brick and ? blocks
## Invisible
replace = 3
for i in [3, 4, 5, 6, 7, 8, 9, 10, 13, 29]:
t[i] = o[replace].main
replace += 1
## Brick
for i in range(16, 28):
t[i] = o[i].main
## ?
t[49] = o[46].main
for i in range(32, 43):
t[i] = o[i].main
# Collisions
## Full block
t[1] = o[1].main
## Vine stopper
t[2] = o[2].main
## Solid-on-top
t[11] = o[13].main
## Half block
t[12] = o[14].main
## Muncher (hit)
t[45] = o[45].main
## Muncher (hit) 2
t[209] = o[44].main
## Donut lift
t[53] = o[43].main
## Conveyor belts
### Left
#### Fast
replace = 115
for i in range(163, 166):
t[i] = o[replace].main
replace += 1
#### Slow
replace = 99
for i in range(147, 150):
t[i] = o[replace].main
replace += 1
### Right
#### Fast
replace = 112
for i in range(160, 163):
t[i] = o[replace].main
replace += 1
#### Slow
replace = 96
for i in range(144, 147):
t[i] = o[replace].main
replace += 1
## Pipes
### Green
#### Vertical
t[64] = o[48].main
t[65] = o[49].main
t[80] = o[64].main
t[81] = o[65].main
t[96] = o[80].main
t[97] = o[81].main
#### Horizontal
t[87] = o[71].main
t[103] = o[87].main
t[88] = o[72].main
t[104] = o[88].main
t[89] = o[73].main
t[105] = o[89].main
### Yellow
#### Vertical
t[66] = o[50].main
t[67] = o[51].main
t[82] = o[66].main
t[83] = o[67].main
t[98] = o[82].main
t[99] = o[83].main
#### Horizontal
t[90] = o[74].main
t[106] = o[90].main
t[91] = o[75].main
t[107] = o[91].main
t[92] = o[76].main
t[108] = o[92].main
### Red
#### Vertical
t[68] = o[52].main
t[69] = o[53].main
t[84] = o[68].main
t[85] = o[69].main
t[100] = o[84].main
t[101] = o[85].main
#### Horizontal
t[93] = o[77].main
t[109] = o[93].main
t[94] = o[78].main
t[110] = o[94].main
t[95] = o[79].main
t[111] = o[95].main
### Mini (green)
#### Vertical
t[70] = o[54].main
t[86] = o[70].main
t[102] = o[86].main
#### Horizontal
t[120] = o[104].main
t[121] = o[105].main
t[137] = o[121].main
### Joints
#### Normal
t[118] = o[102].main
t[119] = o[103].main
t[134] = o[118].main
t[135] = o[119].main
#### Mini
t[136] = o[120].main
# Coins
t[30] = o[30].main
## Outline
t[31] = o[29].main
### Multiplayer
t[28] = o[28].main
## Blue
t[46] = o[47].main
# Flowers / Grass
grassType = 5
for sprite in globals.Area.sprites:
if sprite.type == 564:
grassType = min(sprite.spritedata[5] & 0xf, 5)
if grassType < 2:
grassType = 0
elif grassType in [3, 4]:
grassType = 3
if grassType == 0: # Forest
replace_flowers = 160
replace_grass = 163
replace_both = 168
elif grassType == 2: # Underground
replace_flowers = 55
replace_grass = 171
replace_both = 188
elif grassType == 3: # Sky
replace_flowers = 176
replace_grass = 179
replace_both = 184
else: # Normal
replace_flowers = 55
replace_grass = 58
replace_both = 106
## Flowers
replace = replace_flowers
for i in range(210, 213):
t[i] = o[replace].main
replace += 1
## Grass
replace = replace_grass
for i in range(178, 183):
t[i] = o[replace].main
replace += 1
## Flowers and grass
replace = replace_both
for i in range(213, 216):
t[i] = o[replace].main
replace += 1
# Lines
## Straight lines
### Normal
t[216] = o[128].main
t[217] = o[63].main
### Corners and diagonals
replace = 122
for i in range(218, 231):
if i != 224: # random empty tile
t[i] = o[replace].main
replace += 1
## Circles and stops
for i in range(231, 256):
t[i] = o[replace].main
replace += 1
except Exception:
warningBox = QtWidgets.QMessageBox(QtWidgets.QMessageBox.NoIcon, 'OH NO',
'Whoops, something went wrong while processing the overrides...')
warningBox.exec_()
def getUsedTiles(self):
usedTiles = []
if self.slot == 0:
usedTiles.append(0)
for i, tile in enumerate(self.overrides):
if tile is not None:
usedTiles.append(i)
for object in self.objects:
for i in range(len(object.tiles)):
for tile in object.tiles[i]:
if not tile[2] & 3 and self.slot: # Pa0 tile 0 used in another slot, don't count it
continue
if object.randLen > 0:
for i in range(object.randLen):
if tile[1] + i not in usedTiles:
usedTiles.append(tile[1] + i)
else:
if tile[1] not in usedTiles:
usedTiles.append(tile[1])
return usedTiles
#############################################################################################
######################### Palette for painting behaviours to tiles ##########################
class paletteWidget(QtWidgets.QWidget):
def __init__(self, window):
super(paletteWidget, self).__init__(window)
# Core Types Radio Buttons and Tooltips
self.coreType = QtWidgets.QGroupBox()
self.coreType.setTitle('Core Type:')
self.coreWidgets = []
coreLayout = QtWidgets.QGridLayout()
path = globals.miyamoto_path + '/miyamotodata/Icons/'
self.coreTypes = [
['Default', QtGui.QIcon(path + 'Core/Default.png'), 'The standard type for tiles.\n\nAny regular terrain or backgrounds\nshould be of this generic type.'],
['Rails', QtGui.QIcon(path + 'Core/Rails.png'), 'Used for all types of rails.\n\nRails are replaced in-game with\n3D models, so modifying these\ntiles with different graphics\nwill have no effect.'],
['Dash Coin', QtGui.QIcon(path + 'Core/DashCoin.png'), 'Creates a dash coin.\n\nDash coins, also known as\n"coin outlines," turn into\na coin a second or so after they\nare touched by the player.'],
['Coin', QtGui.QIcon(path + 'Core/Coin.png'), 'Creates a coin.\n\nCoins have no solid collision,\nand when touched will disappear\nand increment the coin counter.\nUnused Blue Coins go in this\ncategory, too.'],
['Blue Coin', QtGui.QIcon(path + 'Core/BlueCoin.png'), 'This is used for the blue coin in Pa0_jyotyu\nthat has a black checkerboard outline.'],
['Explodable Block', QtGui.QIcon(path + 'Core/UsedWoodStoneRed.png'), 'Defines a used item block, a stone\nblock, a wooden block or a red block.'],
['Brick Block', QtGui.QIcon(path + 'Core/Brick.png'), 'Defines a brick block.'],
['? Block', QtGui.QIcon(path + 'Core/Qblock.png'), 'Defines a ? block.'],
['Red Block Outline(?)', QtGui.QIcon(path + 'Unknown.png'), 'Looking at NSMB2, this is supposedly the core type for Red Block Outline.'],
['Partial Block', QtGui.QIcon(path + 'Core/Partial.png'), '<b>DOESN\'T WORK!</b>\n\nUsed for blocks with partial collisions.\n\nVery useful for Mini-Mario secret\nareas, but also for providing a more\naccurate collision map for your tiles.\nUse with the "Solid" setting.'],
['Invisible Block', QtGui.QIcon(path + 'Core/Invisible.png'), 'Used for invisible item blocks.'],
['Slope', QtGui.QIcon(path + 'Core/Slope.png'), 'Defines a sloped tile.\n\nSloped tiles have sloped collisions,\nwhich Mario can slide on.'],
['Reverse Slope', QtGui.QIcon(path + 'Core/RSlope.png'), 'Defines an upside-down slope.\n\nSloped tiles have sloped collisions,\nwhich Mario can hit.'],
['Liquid', QtGui.QIcon(path + 'Core/Quicksand.png'), 'Creates a liquid area. All seen to be non-functional, except for Quicksand, which should be used with the "Quicksand" terrain type.'],
['Climbable Terrian', QtGui.QIcon(path + 'Core/Climb.png'), 'Creates terrain that can be\nclimbed on.\n\nClimable terrain cannot be walked on.\n\nWhen Mario is on top of a climable\ntile and the player presses up, Mario\nwill enter a climbing state.'],
['Damage Tile', QtGui.QIcon(path + 'Core/Skull.png'), 'Various damaging tiles.\n\nIcicle/Spike tiles will damage Mario one hit\nwhen they are touched, whereas lava and poison water will instantly kill Mario and play the corresponding death animation.'],
['Pipe/Joint', QtGui.QIcon(path + 'Core/Pipe.png'), 'Denotes a pipe tile, or a pipe joint.\n\nPipe tiles are specified according to\nthe part of the pipe. It\'s important\nto specify the right parts or\nentrances may not function correctly.'],
['Conveyor Belt', QtGui.QIcon(path + 'Core/Conveyor.png'), 'Defines moving tiles.\n\nMoving tiles will move Mario in one\ndirection or another.'],
['Donut Block', QtGui.QIcon(path + 'Core/Donut.png'), 'Creates a falling donut block.\n\nThese blocks fall after they have been\nstood on for a few seconds, and then\nrespawn later. They are replaced by\nthe game with 3D models, so you can\'t\neasily make your own.'],
['Cave Entrance', QtGui.QIcon(path + 'Core/Cave.png'), 'Creates a cave entrance.\n\nCave entrances are used to mark secret\nareas hidden behind Layer 0 tiles.'],
['Hanging Ledge', QtGui.QIcon(path + 'Core/Ledge.png'), 'Creates a hanging ledge, or terrain that can be\nclimbed on with a ledge.\n\nYou cannot climb down from the <b>hanging</b> ledge\nif climable terrian is under it,\nand you cannot climb up from the climable terrian\nif the <b>hanging</b> ledge is above it.\n\nFor such behavior, you need the climbable wall with ledge.'],
['Rope', QtGui.QIcon(path + 'Core/Rope.png'), 'Unused type that produces a rope you can hang to. If solidity is set to "None," it will have no effect. "Solid on Top" and "Solid on Bottom" produce no useful behavior.'],
['Climbable Pole', QtGui.QIcon(path + 'Core/Pole.png'), 'Creates a pole that can be climbed. Use with "No Solidity."'],
]
for i, item in enumerate(self.coreTypes):
self.coreWidgets.append(QtWidgets.QRadioButton())
if i == 0:
self.coreWidgets[i].setText(item[0])
else:
self.coreWidgets[i].setIcon(item[1])
self.coreWidgets[i].setIconSize(QtCore.QSize(24, 24))
self.coreWidgets[i].setToolTip('<b>' + item[0] + ':</b><br><br>' + item[2].replace('\n', '<br>'))
self.coreWidgets[i].clicked.connect(self.swapParams)
coreLayout.addWidget(self.coreWidgets[i], i // 4, i % 4)
self.coreWidgets[0].setChecked(True) # make "Default" selected at first
self.coreType.setLayout(coreLayout)
# Parameters
self.parametersGroup = QtWidgets.QGroupBox()
self.parametersGroup.setTitle('Parameters:')
parametersLayout = QtWidgets.QVBoxLayout()
self.parameters1 = QtWidgets.QComboBox()
self.parameters2 = QtWidgets.QComboBox()
self.parameters1.setIconSize(QtCore.QSize(24, 24))
self.parameters2.setIconSize(QtCore.QSize(24, 24))
self.parameters1.setMinimumHeight(32)
self.parameters2.setMinimumHeight(32)
self.parameters1.hide()
self.parameters2.hide()
parametersLayout.addWidget(self.parameters1)
parametersLayout.addWidget(self.parameters2)
self.parametersGroup.setLayout(parametersLayout)
GenericParams = [
['Normal', QtGui.QIcon()],
['Beanstalk Stop', QtGui.QIcon(path + '/Generic/Beanstopper.png')],
]
RailParams = [
['Upslope', QtGui.QIcon(path + 'Rails/Upslope.png')],
['Downslope', QtGui.QIcon(path + 'Rails/Downslope.png')],
['Top-Left Corner', QtGui.QIcon(path + 'Rails/Top-Left Corner.png')],
['Bottom-Right Corner', QtGui.QIcon(path + 'Rails/Bottom-Right Corner.png')],
['Horizontal', QtGui.QIcon(path + 'Rails/Horizontal.png')],
['Vertical', QtGui.QIcon(path + 'Rails/Vertical.png')],
['Blank', QtGui.QIcon()],
['Gentle Upslope 2', QtGui.QIcon(path + 'Rails/Gentle Upslope 2.png')],
['Gentle Upslope 1', QtGui.QIcon(path + 'Rails/Gentle Upslope 1.png')],
['Gentle Downslope 2', QtGui.QIcon(path + 'Rails/Gentle Downslope 2.png')],
['Gentle Downslope 1', QtGui.QIcon(path + 'Rails/Gentle Downslope 1.png')],
['Steep Upslope 2', QtGui.QIcon(path + 'Rails/Steep Upslope 2.png')],
['Steep Upslope 1', QtGui.QIcon(path + 'Rails/Steep Upslope 1.png')],
['Steep Downslope 2', QtGui.QIcon(path + 'Rails/Steep Downslope 2.png')],
['Steep Downslope 1', QtGui.QIcon(path + 'Rails/Steep Downslope 1.png')],
['1x1 Circle', QtGui.QIcon(path + 'Rails/1x1 Circle.png')],
['2x2 Circle Upper Right', QtGui.QIcon(path + 'Rails/2x2 Circle Upper Right.png')],
['2x2 Circle Upper Left', QtGui.QIcon(path + 'Rails/2x2 Circle Upper Left.png')],
['2x2 Circle Lower Right', QtGui.QIcon(path + 'Rails/2x2 Circle Lower Right.png')],
['2x2 Circle Lower Left', QtGui.QIcon(path + 'Rails/2x2 Circle Lower Left.png')],
['4x4 Circle Top Left Corner', QtGui.QIcon(path + 'Rails/4x4 Circle Top Left Corner.png')],
['4x4 Circle Top Left', QtGui.QIcon(path + 'Rails/4x4 Circle Top Left.png')],
['4x4 Circle Top Right', QtGui.QIcon(path + 'Rails/4x4 Circle Top Right.png')],
['4x4 Circle Top Right Corner', QtGui.QIcon(path + 'Rails/4x4 Circle Top Right Corner.png')],
['4x4 Circle Upper Left Side', QtGui.QIcon(path + 'Rails/4x4 Circle Upper Left Side.png')],
['4x4 Circle Upper Right Side', QtGui.QIcon(path + 'Rails/4x4 Circle Upper Right Side.png')],
['4x4 Circle Lower Left Side', QtGui.QIcon(path + 'Rails/4x4 Circle Lower Left Side.png')],
['4x4 Circle Lower Right Side', QtGui.QIcon(path + 'Rails/4x4 Circle Lower Right Side.png')],
['4x4 Circle Bottom Left Corner', QtGui.QIcon(path + 'Rails/4x4 Circle Bottom Left Corner.png')],
['4x4 Circle Bottom Left', QtGui.QIcon(path + 'Rails/4x4 Circle Bottom Left.png')],
['4x4 Circle Bottom Right', QtGui.QIcon(path + 'Rails/4x4 Circle Bottom Right.png')],
['4x4 Circle Bottom Right Corner', QtGui.QIcon(path + 'Rails/4x4 Circle Bottom Right Corner.png')],
['End Stop', QtGui.QIcon()],
]
CoinParams = [
['Generic Coin', QtGui.QIcon(path + 'Core/Coin.png')],
['Nothing', QtGui.QIcon()],
['Blue Coin', QtGui.QIcon(path + 'Core/BlueCoin.png')],
]
ExplodableBlockParams = [
['Used Item Block', QtGui.QIcon(path + 'ExplodableBlock/Used.png')],
['Stone Block', QtGui.QIcon(path + 'ExplodableBlock/Stone.png')],
['Wooden Block', QtGui.QIcon(path + 'ExplodableBlock/Wooden.png')],
['Red Block', QtGui.QIcon(path + 'ExplodableBlock/Red.png')],
]
PartialParams = [
['Upper Left', QtGui.QIcon(path + 'Partial/UpLeft.png')],
['Upper Right', QtGui.QIcon(path + 'Partial/UpRight.png')],
['Top Half', QtGui.QIcon(path + 'Partial/TopHalf.png')],
['Lower Left', QtGui.QIcon(path + 'Partial/LowLeft.png')],
['Left Half', QtGui.QIcon(path + 'Partial/LeftHalf.png')],
['Diagonal Downwards', QtGui.QIcon(path + 'Partial/DiagDn.png')],
['Upper Left 3/4', QtGui.QIcon(path + 'Partial/UpLeft3-4.png')],
['Lower Right', QtGui.QIcon(path + 'Partial/LowRight.png')],
['Diagonal Downwards', QtGui.QIcon(path + 'Partial/DiagDn.png')],
['Right Half', QtGui.QIcon(path + 'Partial/RightHalf.png')],
['Upper Right 3/4', QtGui.QIcon(path + 'Partial/UpRig3-4.png')],
['Lower Half', QtGui.QIcon(path + 'Partial/LowHalf.png')],
['Lower Left 3/4', QtGui.QIcon(path + 'Partial/LowLeft3-4.png')],
['Lower Right 3/4', QtGui.QIcon(path + 'Partial/LowRight3-4.png')],
['Full Brick', QtGui.QIcon(path + 'Partial/Full.png')],
]
SlopeParams = [
['Steep Upslope', QtGui.QIcon(path + 'Slope/steepslopeleft.png')],
['Steep Downslope', QtGui.QIcon(path + 'Slope/steepsloperight.png')],
['Upslope 1', QtGui.QIcon(path + 'Slope/slopeleft.png')],
['Upslope 2', QtGui.QIcon(path + 'Slope/slope3left.png')],
['Downslope 1', QtGui.QIcon(path + 'Slope/slope3right.png')],
['Downslope 2', QtGui.QIcon(path + 'Slope/sloperight.png')],
['Very Steep Upslope 1', QtGui.QIcon(path + 'Slope/vsteepup1.png')],
['Very Steep Upslope 2', QtGui.QIcon(path + 'Slope/vsteepup2.png')],
['Very Steep Downslope 1', QtGui.QIcon(path + 'Slope/vsteepdown2.png')],
['Very Steep Downslope 2', QtGui.QIcon(path + 'Slope/vsteepdown1.png')],
['Slope Edge (solid)', QtGui.QIcon(path + 'Slope/edge.png')],
['Gentle Upslope 1', QtGui.QIcon(path + 'Slope/gentleupslope1.png')],
['Gentle Upslope 2', QtGui.QIcon(path + 'Slope/gentleupslope2.png')],
['Gentle Upslope 3', QtGui.QIcon(path + 'Slope/gentleupslope3.png')],
['Gentle Upslope 4', QtGui.QIcon(path + 'Slope/gentleupslope4.png')],
['Gentle Downslope 1', QtGui.QIcon(path + 'Slope/gentledownslope1.png')],
['Gentle Downslope 2', QtGui.QIcon(path + 'Slope/gentledownslope2.png')],
['Gentle Downslope 3', QtGui.QIcon(path + 'Slope/gentledownslope3.png')],
['Gentle Downslope 4', QtGui.QIcon(path + 'Slope/gentledownslope4.png')],
]
ReverseSlopeParams = [
['Steep Downslope', QtGui.QIcon(path + 'Slope/Rsteepslopeleft.png')],
['Steep Upslope', QtGui.QIcon(path + 'Slope/Rsteepsloperight.png')],
['Downslope 1', QtGui.QIcon(path + 'Slope/Rslopeleft.png')],
['Downslope 2', QtGui.QIcon(path + 'Slope/Rslope3left.png')],
['Upslope 1', QtGui.QIcon(path + 'Slope/Rslope3right.png')],
['Upslope 2', QtGui.QIcon(path + 'Slope/Rsloperight.png')],
['Very Steep Downslope 1', QtGui.QIcon(path + 'Slope/Rvsteepdown1.png')],
['Very Steep Downslope 2', QtGui.QIcon(path + 'Slope/Rvsteepdown2.png')],
['Very Steep Upslope 1', QtGui.QIcon(path + 'Slope/Rvsteepup2.png')],
['Very Steep Upslope 2', QtGui.QIcon(path + 'Slope/Rvsteepup1.png')],
['Slope Edge (solid)', QtGui.QIcon(path + 'Slope/edge.png')],
['Gentle Downslope 1', QtGui.QIcon(path + 'Slope/Rgentledownslope1.png')],
['Gentle Downslope 2', QtGui.QIcon(path + 'Slope/Rgentledownslope2.png')],
['Gentle Downslope 3', QtGui.QIcon(path + 'Slope/Rgentledownslope3.png')],
['Gentle Downslope 4', QtGui.QIcon(path + 'Slope/Rgentledownslope4.png')],
['Gentle Upslope 1', QtGui.QIcon(path + 'Slope/Rgentleupslope1.png')],
['Gentle Upslope 2', QtGui.QIcon(path + 'Slope/Rgentleupslope2.png')],
['Gentle Upslope 3', QtGui.QIcon(path + 'Slope/Rgentleupslope3.png')],
['Gentle Upslope 4', QtGui.QIcon(path + 'Slope/Rgentleupslope4.png')],
]
LiquidParams = [
['Unknown 0', QtGui.QIcon(path + 'Unknown.png')],
['Unknown 1', QtGui.QIcon(path + 'Unknown.png')],
['Unknown 2', QtGui.QIcon(path + 'Unknown.png')],
['Unknown 3', QtGui.QIcon(path + 'Unknown.png')],
['Quicksand', QtGui.QIcon(path + 'Core/Quicksand.png')],
]
ClimbableParams = [
['Vine', QtGui.QIcon(path + 'Climbable/Vine.png')],
['Climbable Wall', QtGui.QIcon(path + 'Core/Climb.png')],
['Climbable Fence', QtGui.QIcon(path + 'Climbable/Fence.png')],
]
DamageTileParams = [
['Icicle', QtGui.QIcon(path + 'Damage/Icicle1x1.png')],
['Long Icicle 1', QtGui.QIcon(path + 'Damage/Icicle1x2Top.png')],
['Long Icicle 2', QtGui.QIcon(path + 'Damage/Icicle1x2Bottom.png')],
['Left-Facing Spikes', QtGui.QIcon(path + 'Damage/SpikeLeft.png')],
['Right-Facing Spikes', QtGui.QIcon(path + 'Damage/SpikeRight.png')],
['Up-Facing Spikes', QtGui.QIcon(path + 'Damage/Spike.png')],
['Down-Facing Spikes', QtGui.QIcon(path + 'Damage/SpikeDown.png')],
['Instant Death', QtGui.QIcon(path + 'Core/Skull.png')],
['Lava', QtGui.QIcon(path + 'Damage/Lava.png')],
['Poison Water', QtGui.QIcon(path + 'Damage/Poison.png')],
]
PipeParams = [
['Vert. Top Entrance Left', QtGui.QIcon(path + 'Pipes/UpLeft.png')],
['Vert. Top Entrance Right', QtGui.QIcon(path + 'Pipes/UpRight.png')],
['Vert. Bottom Entrance Left', QtGui.QIcon(path + 'Pipes/DownLeft.png')],
['Vert. Bottom Entrance Right', QtGui.QIcon(path + 'Pipes/DownRight.png')],
['Horiz. Left Entrance Top', QtGui.QIcon(path + 'Pipes/LeftTop.png')],
['Horiz. Left Entrance Bottom', QtGui.QIcon(path + 'Pipes/LeftBottom.png')],
['Horiz. Right Entrance Top', QtGui.QIcon(path + 'Pipes/RightTop.png')],
['Horiz. Right Entrance Bottom', QtGui.QIcon(path + 'Pipes/RightBottom.png')],
['Vert. Mini Pipe Top', QtGui.QIcon(path + 'Pipes/MiniUp.png')],
['Vert. Mini Pipe Bottom', QtGui.QIcon(path + 'Pipes/MiniDown.png')],
['Horiz. Mini Pipe Left', QtGui.QIcon(path + 'Pipes/MiniLeft.png')],
['Horiz. Mini Pipe Right', QtGui.QIcon(path + 'Pipes/MiniRight.png')],
['Vert. Center Left', QtGui.QIcon(path + 'Pipes/VertCenterLeft.png')],
['Vert. Center Right', QtGui.QIcon(path + 'Pipes/VertCenterRight.png')],
['Vert. Intersection Left', QtGui.QIcon(path + 'Pipes/VertIntersectLeft.png')],
['Vert. Intersection Right', QtGui.QIcon(path + 'Pipes/VertIntersectRight.png')],
['Horiz. Center Top', QtGui.QIcon(path + 'Pipes/HorizCenterTop.png')],
['Horiz. Center Bottom', QtGui.QIcon(path + 'Pipes/HorizCenterBottom.png')],
['Horiz. Intersection Top', QtGui.QIcon(path + 'Pipes/HorizIntersectTop.png')],
['Horiz. Intersection Bottom', QtGui.QIcon(path + 'Pipes/HorizIntersectBottom.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['Vert. Mini Pipe Center', QtGui.QIcon(path + 'Pipes/MiniVertCenter.png')],
['Horiz. Mini Pipe Center', QtGui.QIcon(path + 'Pipes/MiniHorizCenter.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['UNUSED', QtGui.QIcon(path + 'Unknown.png')],
['Pipe Joint', QtGui.QIcon(path + 'Pipes/Joint.png')],
['Vert. Mini Pipe Intersection', QtGui.QIcon(path + 'Pipes/MiniVertIntersect.png')],
['Horiz. Mini Pipe Intersection', QtGui.QIcon(path + 'Pipes/MiniHorizIntersect.png')],
]
ConveyorParams = [
['Left', QtGui.QIcon(path + 'Conveyor/Left.png')],
['Right', QtGui.QIcon(path + 'Conveyor/Right.png')],
['Left Fast', QtGui.QIcon(path + 'Conveyor/LeftFast.png')],
['Right Fast', QtGui.QIcon(path + 'Conveyor/RightFast.png')],
]
CaveParams = [
['Left', QtGui.QIcon(path + 'Cave/Left.png')],
['Right', QtGui.QIcon(path + 'Cave/Right.png')],
]
ClimbLedgeParams = [
['Hanging Ledge', QtGui.QIcon(path + 'Core/Ledge.png')],
['Climbable Wall with Ledge', QtGui.QIcon(path + 'Core/ClimbLedge.png')],
]
self.ParameterList = [
GenericParams, # 0x00
RailParams, # 0x01
None, # 0x02
CoinParams, # 0x03
None, # 0x04
ExplodableBlockParams, # 0x05
None, # 0x06
None, # 0x07
None, # 0x08
PartialParams, # 0x09
None, # 0x0A
SlopeParams, # 0x0B
ReverseSlopeParams, # 0x0C
LiquidParams, # 0x0D
ClimbableParams, # 0x0E
DamageTileParams, # 0x0F
PipeParams, # 0x10
ConveyorParams, # 0x11
None, # 0x12
CaveParams, # 0x13
ClimbLedgeParams, # 0x14
None, # 0x15
None, # 0x16
]
DamageTileParams2 = [
['Default', QtGui.QIcon()],
['Muncher (no visible difference)', QtGui.QIcon(path + 'Damage/Muncher.png')],
]
PipeParams2 = [
['Green', QtGui.QIcon(path + 'PipeColors/Green.png')],
['Red', QtGui.QIcon(path + 'PipeColors/Red.png')],
['Yellow', QtGui.QIcon(path + 'PipeColors/Yellow.png')],
['Blue', QtGui.QIcon(path + 'PipeColors/Blue.png')],
]
self.ParameterList2 = [
None, # 0x0
None, # 0x1
None, # 0x2
None, # 0x3
None, # 0x4
None, # 0x5
None, # 0x6
None, # 0x7
None, # 0x8
None, # 0x9
None, # 0xA
None, # 0xB
None, # 0xC
None, # 0xD
None, # 0xE
DamageTileParams2, # 0xF
PipeParams2, # 0x10
None, # 0x11
None, # 0x12
None, # 0x13
None, # 0x14
None, # 0x15
None, # 0x16
]
# Collision Type
self.collsType = QtWidgets.QComboBox()
self.collsGroup = QtWidgets.QGroupBox('Collision Type')
L = QtWidgets.QVBoxLayout(self.collsGroup)
L.addWidget(self.collsType)
self.collsTypes = [
['No Solidity', QtGui.QIcon(path + 'Collisions/NoSolidity.png')],
['Solid', QtGui.QIcon(path + 'Collisions/Solid.png')],
['Solid-on-Top', QtGui.QIcon(path + 'Collisions/SolidOnTop.png')],
['Solid-on-Bottom', QtGui.QIcon(path + 'Collisions/SolidOnBottom.png')],
['Solid-on-Top and Bottom', QtGui.QIcon(path + 'Collisions/SolidOnTopBottom.png')],
['Slide (1)', QtGui.QIcon(path + 'Collisions/SlopedSlide.png')],
['Slide (2)', QtGui.QIcon(path + 'Collisions/SlopedSlide.png')],
['Staircase (1)', QtGui.QIcon(path + 'Collisions/SlopedSolidOnTop.png')],
['Staircase (2)', QtGui.QIcon(path + 'Collisions/SlopedSolidOnTop.png')],
]
for item in self.collsTypes:
self.collsType.addItem(item[1], item[0])
self.collsType.setIconSize(QtCore.QSize(24, 24))
self.collsType.setToolTip(
'Set the collision style of the terrain.\n\n'
'<b>No Solidity:</b>\nThe tile cannot be stood on or hit.\n\n'
'<b>Solid:</b>\nThe tile can be stood on and hit from all sides.\n\n'
'<b>Solid-on-Top:</b>\nThe tile can only be stood on.\n\n'
'<b>Solid-on-Bottom:</b>\nThe tile can only be hit from below.\n\n'
'<b>Solid-on-Top and Bottom:</b>\nThe tile can be stood on and hit from below, but not any other side.\n\n'
'<b>Slide:</b>\nThe player starts sliding without being able to jump when interacting with this solidity.\n\n'
'<b>Staircase:</b>\nUsed for staircases in Ghost Houses, Castle rooftop and in the main tilesets.\n\n'
'The difference between <b>Slide/Staircase (1)</b> and <b>Slide/Staircase (2)</b> is that (1) will\n'
'let you go past it by default (unless you add a solid tile edge), where as (2) will\n'
'force you to climb it (without the need of a solid tile edge).\n\n'.replace('\n', '<br>')
)
# Terrain Type
self.terrainType = QtWidgets.QComboBox()
self.terrainGroup = QtWidgets.QGroupBox('Terrain Type')
L = QtWidgets.QVBoxLayout(self.terrainGroup)
L.addWidget(self.terrainType)
# Quicksand is unused.
self.terrainTypes = [
['Default', QtGui.QIcon()], # 0x0
['Ice', QtGui.QIcon(path + 'Terrain/Ice.png')], # 0x1
['Snow', QtGui.QIcon(path + 'Terrain/Snow.png')], # 0x2
['Quicksand', QtGui.QIcon(path + 'Terrain/Quicksand.png')], # 0x3
['Desert Sand', QtGui.QIcon(path + 'Terrain/Sand.png')], # 0x4
['Grass', QtGui.QIcon(path + 'Terrain/Grass.png')], # 0x5
['Cloud', QtGui.QIcon(path + 'Terrain/Cloud.png')], # 0x6
['Beach Sand', QtGui.QIcon(path + 'Terrain/BeachSand.png')], # 0x7
['Carpet', QtGui.QIcon(path + 'Terrain/Carpet.png')], # 0x8
['Leaves', QtGui.QIcon(path + 'Terrain/Leaves.png')], # 0x9
['Wood', QtGui.QIcon(path + 'Terrain/Wood.png')], # 0xA
['Water', QtGui.QIcon(path + 'Terrain/Water.png')], # 0xB
['Beanstalk Leaf', QtGui.QIcon(path + 'Terrain/BeanstalkLeaf.png')], # 0xC
]
for item in range(len(self.terrainTypes)):
self.terrainType.addItem(self.terrainTypes[item][1], self.terrainTypes[item][0])
self.terrainType.setIconSize(QtCore.QSize(24, 24))
self.terrainType.setToolTip(
'Set the various types of terrain.\n\n'
'<b>Default:</b>\nTerrain with no paticular properties.\n\n'
'<b>Ice:</b>\nWill be slippery.\n\n'
'<b>Snow:</b>\nWill emit puffs of snow and snow noises.\n\n'
'<b>Quicksand:</b>\nWill emit puffs of sand. Use with the "Quicksand" core type.\n\n'
'<b>Sand:</b>\nWill create dark-colored sand tufts around\nMario\'s feet.\n\n'
'<b>Grass:</b>\nWill emit grass-like footstep noises.\n\n'
'<b>Cloud:</b>\nWill emit footstep noises for cloud platforms.\n\n'
'<b>Beach Sand:</b>\nWill create light-colored sand tufts around\nMario\'s feet.\n\n'
'<b>Carpet:</b>\nWill emit footstep noises for carpets.\n\n'
'<b>Leaves:</b>\nWill emit footstep noises for Palm Tree leaves.\n\n'
'<b>Wood:</b>\nWill emit footstep noises for wood.\n\n'
'<b>Water:</b>\nWill emit small splashes of water around\nMario\'s feet.\n\n'
'<b>Beanstalk Leaf:</b>\nWill emit footstep noises for Beanstalk leaves.'.replace('\n', '<br>')
)
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.coreType)
layout.addWidget(self.parametersGroup)
layout.addWidget(self.collsGroup)
layout.addWidget(self.terrainGroup)
self.setLayout(layout)
self.swapParams()
def swapParams(self):
for item in range(len(self.ParameterList)):
if self.coreWidgets[item].isChecked():
self.parameters1.clear()
if self.ParameterList[item] is not None:
for option in self.ParameterList[item]:
self.parameters1.addItem(option[1], option[0])
self.parameters1.show()
else:
self.parameters1.hide()
self.parameters2.clear()
if self.ParameterList2[item] is not None:
for option in self.ParameterList2[item]:
self.parameters2.addItem(option[1], option[0])
self.parameters2.show()
else:
self.parameters2.hide()
#############################################################################################
######################### InfoBox Custom Widget to display info to ##########################
class InfoBox(QtWidgets.QWidget):
def __init__(self, window):
super(InfoBox, self).__init__(window)
# InfoBox
superLayout = QtWidgets.QGridLayout()
infoLayout = QtWidgets.QFormLayout()
self.imageBox = QtWidgets.QGroupBox()
imageLayout = QtWidgets.QHBoxLayout()
pix = QtGui.QPixmap(24, 24)
pix.fill(Qt.transparent)
self.coreImage = QtWidgets.QLabel()
self.coreImage.setPixmap(pix)
self.terrainImage = QtWidgets.QLabel()
self.terrainImage.setPixmap(pix)
self.parameterImage = QtWidgets.QLabel()
self.parameterImage.setPixmap(pix)
self.collisionOverlay = QtWidgets.QCheckBox('Overlay Collision')
self.collisionOverlay.clicked.connect(InfoBox.updateCollision)
self.coreInfo = QtWidgets.QLabel()
self.propertyInfo = QtWidgets.QLabel(' \n\n\n\n\n')
self.terrainInfo = QtWidgets.QLabel()
self.paramInfo = QtWidgets.QLabel()
Font = self.font()
Font.setPointSize(9)
self.coreInfo.setFont(Font)
self.propertyInfo.setFont(Font)
self.terrainInfo.setFont(Font)
self.paramInfo.setFont(Font)
self.LabelB = QtWidgets.QLabel('Properties:')
self.LabelB.setFont(Font)
self.hexdata = QtWidgets.QLabel('Hex Data: 0x0 0x0\n 0x0 0x0 0x0')
self.hexdata.setFont(Font)
coreLayout = QtWidgets.QVBoxLayout()
terrLayout = QtWidgets.QVBoxLayout()
paramLayout = QtWidgets.QVBoxLayout()
coreLayout.setGeometry(QtCore.QRect(0,0,40,40))
terrLayout.setGeometry(QtCore.QRect(0,0,40,40))
paramLayout.setGeometry(QtCore.QRect(0,0,40,40))
label = QtWidgets.QLabel('Core')
label.setFont(Font)
coreLayout.addWidget(label, 0, Qt.AlignCenter)
label = QtWidgets.QLabel('Terrain')
label.setFont(Font)
terrLayout.addWidget(label, 0, Qt.AlignCenter)
label = QtWidgets.QLabel('Parameters')
label.setFont(Font)
paramLayout.addWidget(label, 0, Qt.AlignCenter)
coreLayout.addWidget(self.coreImage, 0, Qt.AlignCenter)
terrLayout.addWidget(self.terrainImage, 0, Qt.AlignCenter)
paramLayout.addWidget(self.parameterImage, 0, Qt.AlignCenter)
coreLayout.addWidget(self.coreInfo, 0, Qt.AlignCenter)
terrLayout.addWidget(self.terrainInfo, 0, Qt.AlignCenter)
paramLayout.addWidget(self.paramInfo, 0, Qt.AlignCenter)
imageLayout.setContentsMargins(0,4,4,4)
imageLayout.addLayout(coreLayout)
imageLayout.addLayout(terrLayout)
imageLayout.addLayout(paramLayout)
self.imageBox.setLayout(imageLayout)
superLayout.addWidget(self.imageBox, 0, 0)
superLayout.addWidget(self.collisionOverlay, 1, 0)
infoLayout.addRow(self.LabelB, self.propertyInfo)
infoLayout.addRow(self.hexdata)
superLayout.addLayout(infoLayout, 0, 1, 2, 1)
self.setLayout(superLayout)
@staticmethod
def updateCollision():
window.setuptile()
window.tileWidget.setObject(window.objectList.currentIndex())
window.tileWidget.update()
#############################################################################################
##################### Object List Widget and Model Setup with Painter #######################
class objectList(QtWidgets.QListView):
def __init__(self, parent=None):
super(objectList, self).__init__(parent)
self.setViewMode(QtWidgets.QListView.IconMode)
self.setIconSize(QtCore.QSize(96,96))
self.setGridSize(QtCore.QSize(114,114))
self.setMovement(QtWidgets.QListView.Static)
self.setBackgroundRole(QtGui.QPalette.BrightText)
self.setWrapping(False)
self.setMinimumHeight(140)
self.setMaximumHeight(140)
self.noneIdx = self.currentIndex()
def clearCurrentIndex(self):
self.setCurrentIndex(self.noneIdx)
def SetupObjectModel(self, objects, tiles):
global Tileset
self.clear()
count = 0
for object in objects:
tex = QtGui.QPixmap(object.width * 24, object.height * 24)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
for i in range(len(object.tiles)):
for tile in object.tiles[i]:
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
image = Tileset.overrides[tile[1]] if Tileset.slot == 0 and window.overrides else None
if not image:
image = tiles[tile[1]].image
painter.drawPixmap(Xoffset, Yoffset, image.scaledToWidth(24, Qt.SmoothTransformation))
Xoffset += 24
Xoffset = 0
Yoffset += 24
painter.end()
item = QtGui.QStandardItem(QtGui.QIcon(tex), 'Object {0}'.format(count))
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
self.appendRow(item)
count += 1
#############################################################################################
######################## List Widget with custom painter/MouseEvent #########################
class displayWidget(QtWidgets.QListView):
mouseMoved = QtCore.pyqtSignal(int, int)
def __init__(self, parent=None):
super(displayWidget, self).__init__(parent)
self.setMinimumWidth(426)
self.setMaximumWidth(426)
self.setMinimumHeight(404)
self.setDragEnabled(True)
self.setViewMode(QtWidgets.QListView.IconMode)
self.setIconSize(QtCore.QSize(24,24))
self.setGridSize(QtCore.QSize(25,25))
self.setMovement(QtWidgets.QListView.Static)
self.setAcceptDrops(False)
self.setDropIndicatorShown(True)
self.setResizeMode(QtWidgets.QListView.Adjust)
self.setUniformItemSizes(True)
self.setBackgroundRole(QtGui.QPalette.BrightText)
self.setMouseTracking(True)
self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.setItemDelegate(self.TileItemDelegate())
def mouseMoveEvent(self, event):
QtWidgets.QWidget.mouseMoveEvent(self, event)
self.mouseMoved.emit(event.x(), event.y())
class TileItemDelegate(QtWidgets.QAbstractItemDelegate):
"""Handles tiles and their rendering"""
def __init__(self):
"""Initialises the delegate"""
QtWidgets.QAbstractItemDelegate.__init__(self)
def paint(self, painter, option, index):
"""Paints an object"""
global Tileset
p = index.model().data(index, Qt.DecorationRole)
painter.drawPixmap(option.rect.x(), option.rect.y(), p.pixmap(24,24))
x = option.rect.x()
y = option.rect.y()
# Collision Overlays
info = window.infoDisplay
if index.row() >= len(Tileset.tiles): return
curTile = Tileset.tiles[index.row()]
if info.collisionOverlay.isChecked():
updateCollisionOverlay(curTile, x, y, 24, painter)
# Highlight stuff.
colour = QtGui.QColor(option.palette.highlight())
colour.setAlpha(80)
if option.state & QtWidgets.QStyle.State_Selected:
painter.fillRect(option.rect, colour)
def sizeHint(self, option, index):
"""Returns the size for the object"""
return QtCore.QSize(24, 24)
#############################################################################################
############################ Tile widget for drag n'drop Objects ############################
class RepeatXModifiers(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setVisible(False)
self.layout = QtWidgets.QVBoxLayout(self)
self.layout.setSpacing(0)
self.layout.setContentsMargins(0,0,0,0)
self.spinboxes = []
self.buttons = []
self.updating = False
def update(self):
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
if not object.repeatX:
return
self.updating = True
assert len(self.spinboxes) == len(self.buttons)
height = object.height
numRows = len(self.spinboxes)
if numRows < height:
for i in range(numRows, height):
layout = QtWidgets.QHBoxLayout()
layout.setSpacing(0)
layout.setContentsMargins(0,0,0,0)
spinbox1 = QtWidgets.QSpinBox()
spinbox1.setFixedSize(32, 24)
spinbox1.valueChanged.connect(lambda val, i=i: self.startValChanged(val, i))
layout.addWidget(spinbox1)
spinbox2 = QtWidgets.QSpinBox()
spinbox2.setFixedSize(32, 24)
spinbox2.valueChanged.connect(lambda val, i=i: self.endValChanged(val, i))
layout.addWidget(spinbox2)
button1 = QtWidgets.QPushButton('+')
button1.setFixedSize(24, 24)
button1.released.connect(lambda i=i: self.addTile(i))
layout.addWidget(button1)
button2 = QtWidgets.QPushButton('-')
button2.setFixedSize(24, 24)
button2.released.connect(lambda i=i: self.removeTile(i))
layout.addWidget(button2)
self.layout.addLayout(layout)
self.spinboxes.append((spinbox1, spinbox2))
self.buttons.append((button1, button2))
elif height < numRows:
for i in reversed(range(height, numRows)):
layout = self.layout.itemAt(i).layout()
self.layout.removeItem(layout)
spinbox1, spinbox2 = self.spinboxes[i]
layout.removeWidget(spinbox1)
layout.removeWidget(spinbox2)
spinbox1.setParent(None)
spinbox2.setParent(None)
del self.spinboxes[i]
button1, button2 = self.buttons[i]
layout.removeWidget(button1)
layout.removeWidget(button2)
button1.setParent(None)
button2.setParent(None)
del self.buttons[i]
for y in range(height):
spinbox1, spinbox2 = self.spinboxes[y]
spinbox1.setRange(0, object.repeatX[y][1]-1)
spinbox2.setRange(object.repeatX[y][0]+1, len(object.tiles[y]))
spinbox1.setValue(object.repeatX[y][0])
spinbox2.setValue(object.repeatX[y][1])
self.updating = False
self.setFixedHeight(height * 24)
def startValChanged(self, val, y):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
object.repeatX[y][0] = val
for x in range(len(object.tiles[y])):
if x >= val and x < object.repeatX[y][1]:
object.tiles[y][x] = (object.tiles[y][x][0] | 1, object.tiles[y][x][1], object.tiles[y][x][2])
else:
object.tiles[y][x] = (object.tiles[y][x][0] & ~1, object.tiles[y][x][1], object.tiles[y][x][2])
spinbox1, spinbox2 = self.spinboxes[y]
spinbox1.setRange(0, object.repeatX[y][1]-1)
spinbox2.setRange(val+1, len(object.tiles[y]))
window.tileWidget.tiles.update()
def endValChanged(self, val, y):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
object.repeatX[y][1] = val
for x in range(len(object.tiles[y])):
if x >= object.repeatX[y][0] and x < val:
object.tiles[y][x] = (object.tiles[y][x][0] | 1, object.tiles[y][x][1], object.tiles[y][x][2])
else:
object.tiles[y][x] = (object.tiles[y][x][0] & ~1, object.tiles[y][x][1], object.tiles[y][x][2])
spinbox1, spinbox2 = self.spinboxes[y]
spinbox1.setRange(0, val-1)
spinbox2.setRange(object.repeatX[y][0]+1, len(object.tiles[y]))
window.tileWidget.tiles.update()
def addTile(self, y):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
pix = QtGui.QPixmap(24,24)
pix.fill(QtGui.QColor(0,0,0,0))
window.tileWidget.tiles.tiles[y].append(pix)
object = Tileset.objects[index]
if object.repeatY and y >= object.repeatY[0] and y < object.repeatY[1]:
object.tiles[y].append((2, 0, 0))
else:
object.tiles[y].append((0, 0, 0))
object.width = max(len(object.tiles[y]), object.width)
self.update()
window.tileWidget.tiles.size[0] = object.width
window.tileWidget.tiles.setMinimumSize(window.tileWidget.tiles.size[0]*24 + 12, window.tileWidget.tiles.size[1]*24 + 12)
window.tileWidget.tiles.update()
window.tileWidget.tiles.updateList()
window.tileWidget.randStuff.setVisible(window.tileWidget.tiles.size == [1, 1])
def removeTile(self, y):
if self.updating:
return
if window.tileWidget.tiles.size[0] == 1:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
row = window.tileWidget.tiles.tiles[y]
if len(row) > 1:
row.pop()
else:
return
row = object.tiles[y]
if len(row) > 1:
row.pop()
else:
return
start, end = object.repeatX[y]
end = min(end, len(row))
start = min(start, end - 1)
if [start, end] != object.repeatX[y]:
object.repeatX[y] = [start, end]
for x in range(len(row)):
if x >= start and x < end:
row[x] = (row[x][0] | 1, row[x][1], row[x][2])
else:
row[x] = (row[x][0] & ~1, row[x][1], row[x][2])
object.width = max(len(row) for row in object.tiles)
self.update()
window.tileWidget.tiles.size[0] = object.width
window.tileWidget.tiles.setMinimumSize(window.tileWidget.tiles.size[0]*24 + 12, window.tileWidget.tiles.size[1]*24 + 12)
window.tileWidget.tiles.update()
window.tileWidget.tiles.updateList()
window.tileWidget.randStuff.setVisible(window.tileWidget.tiles.size == [1, 1])
class RepeatYModifiers(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setVisible(False)
layout = QtWidgets.QHBoxLayout(self)
layout.setSpacing(0)
layout.setContentsMargins(0,0,0,0)
spinbox1 = QtWidgets.QSpinBox()
spinbox1.setFixedSize(32, 24)
spinbox1.valueChanged.connect(self.startValChanged)
layout.addWidget(spinbox1)
spinbox2 = QtWidgets.QSpinBox()
spinbox2.setFixedSize(32, 24)
spinbox2.valueChanged.connect(self.endValChanged)
layout.addWidget(spinbox2)
self.spinboxes = (spinbox1, spinbox2)
self.updating = False
self.setFixedWidth(64)
def update(self):
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
if not object.repeatY:
return
self.updating = True
spinbox1, spinbox2 = self.spinboxes
spinbox1.setRange(0, object.repeatY[1]-1)
spinbox2.setRange(object.repeatY[0]+1, object.height)
spinbox1.setValue(object.repeatY[0])
spinbox2.setValue(object.repeatY[1])
self.updating = False
def startValChanged(self, val):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
object.createRepetitionY(val, object.repeatY[1])
spinbox1, spinbox2 = self.spinboxes
spinbox1.setRange(0, object.repeatY[1]-1)
spinbox2.setRange(object.repeatY[0]+1, object.height)
window.tileWidget.tiles.update()
def endValChanged(self, val):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
object.createRepetitionY(object.repeatY[0], val)
spinbox1, spinbox2 = self.spinboxes
spinbox1.setRange(0, object.repeatY[1]-1)
spinbox2.setRange(object.repeatY[0]+1, object.height)
window.tileWidget.tiles.update()
class SlopeLineModifier(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setVisible(False)
layout = QtWidgets.QHBoxLayout(self)
layout.setSpacing(0)
layout.setContentsMargins(0,0,0,0)
self.spinbox = QtWidgets.QSpinBox()
self.spinbox.setFixedSize(32, 24)
self.spinbox.valueChanged.connect(self.valChanged)
layout.addWidget(self.spinbox)
self.updating = False
self.setFixedWidth(32)
def update(self):
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
if object.upperslope[0] == 0:
return
self.updating = True
self.spinbox.setRange(1, object.height)
self.spinbox.setValue(object.upperslope[1])
self.updating = False
def valChanged(self, val):
if self.updating:
return
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
if object.height == 1:
object.upperslope[1] = 1
object.lowerslope = [0, 0]
else:
object.upperslope[1] = val
object.lowerslope = [0x84, object.height - val]
tiles = window.tileWidget.tiles
if object.upperslope[0] & 2:
tiles.slope = -object.upperslope[1]
else:
tiles.slope = object.upperslope[1]
tiles.update()
class tileOverlord(QtWidgets.QWidget):
def __init__(self):
super(tileOverlord, self).__init__()
# Setup Widgets
self.tiles = tileWidget()
self.addObject = QtWidgets.QPushButton('Add')
self.removeObject = QtWidgets.QPushButton('Remove')
global Tileset
self.placeNull = QtWidgets.QPushButton('Null')
self.placeNull.setCheckable(True)
self.placeNull.setChecked(Tileset.placeNullChecked)
self.addRow = QtWidgets.QPushButton('+')
self.removeRow = QtWidgets.QPushButton('-')
self.addColumn = QtWidgets.QPushButton('+')
self.removeColumn = QtWidgets.QPushButton('-')
self.tilingMethod = QtWidgets.QComboBox()
self.tilesetType = QtWidgets.QLabel('Pa%d' % Tileset.slot)
self.tilingMethod.addItems(['No Repetition',
'Repeat X',
'Repeat Y',
'Repeat X and Y',
'Upward slope',
'Downward slope',
'Downward reverse slope',
'Upward reverse slope'])
self.randX = QtWidgets.QCheckBox('Randomize Horizontally')
self.randY = QtWidgets.QCheckBox('Randomize Vertically')
self.randX.setToolTip('<b>Randomize Horizontally:</b><br><br>'
'Check this if you want to use randomized replacements for '
'this tile, in the <u>horizontal</u> direction. Examples: '
'floor tiles and ceiling tiles.')
self.randY.setToolTip('<b>Randomize Vertically:</b><br><br>'
'Check this if you want to use randomized replacements for '
'this tile, in the <u>vertical</u> direction. Example: '
'edge tiles.')
self.randLenLbl = QtWidgets.QLabel('Total Randomizable Tiles:')
self.randLen = QtWidgets.QSpinBox()
self.randLen.setRange(1, 15)
self.randLen.setEnabled(False)
self.randLen.setToolTip('<b>Total Randomizable Tiles:</b><br><br>'
'This specifies the total number of tiles the game may '
'use for randomized replacements of this tile. This '
'will be the tile itself, and <i>(n - 1)</i> tiles after it, '
'where <i>n</i> is the number in this box. Tiles "after" this one '
'are tiles to the right of it in the tileset image, wrapping '
'to the next line if the right edge of the image is reached.')
# Connections
self.addObject.released.connect(self.addObj)
self.removeObject.released.connect(self.removeObj)
self.placeNull.toggled.connect(self.doPlaceNull)
self.addRow.released.connect(self.addRowHandler)
self.removeRow.released.connect(self.removeRowHandler)
self.addColumn.released.connect(self.addColumnHandler)
self.removeColumn.released.connect(self.removeColumnHandler)
self.tilingMethod.currentIndexChanged.connect(self.setTiling)
self.randX.toggled.connect(self.changeRandX)
self.randY.toggled.connect(self.changeRandY)
self.randLen.valueChanged.connect(self.changeRandLen)
# Layout
self.randStuff = QtWidgets.QWidget()
randLyt = QtWidgets.QGridLayout(self.randStuff)
randLyt.addWidget(self.randX, 0, 0)
randLyt.addWidget(self.randY, 1, 0)
randLyt.addWidget(self.randLenLbl, 0, 1)
randLyt.addWidget(self.randLen, 1, 1)
self.repeatX = RepeatXModifiers()
repeatXLyt = QtWidgets.QVBoxLayout()
repeatXLyt.addWidget(self.repeatX)
self.repeatY = RepeatYModifiers()
repeatYLyt = QtWidgets.QHBoxLayout()
repeatYLyt.addWidget(self.repeatY)
self.slopeLine = SlopeLineModifier()
slopeLineLyt = QtWidgets.QVBoxLayout()
slopeLineLyt.addWidget(self.slopeLine)
tilesLyt = QtWidgets.QGridLayout()
tilesLyt.setSpacing(0)
tilesLyt.setContentsMargins(0,0,0,0)
tilesLyt.addWidget(self.tiles, 0, 0, 3, 4)
tilesLyt.addLayout(repeatXLyt, 0, 4, 3, 1)
tilesLyt.addLayout(repeatYLyt, 3, 0, 1, 4)
tilesLyt.addLayout(slopeLineLyt, 0, 5, 3, 1)
layout = QtWidgets.QGridLayout()
layout.addWidget(self.tilesetType, 0, 0, 1, 3)
layout.addWidget(self.tilingMethod, 0, 3, 1, 3)
layout.addWidget(self.addObject, 0, 6, 1, 1)
layout.addWidget(self.removeObject, 0, 7, 1, 1)
layout.setRowMinimumHeight(1, 40)
layout.addWidget(self.randStuff, 1, 0, 1, 8)
layout.setRowStretch(2, 1)
layout.setRowStretch(3, 5)
layout.setRowStretch(6, 5)
layout.addLayout(tilesLyt, 3, 1, 4, 6)
layout.addWidget(self.placeNull, 3, 7, 1, 1)
layout.addWidget(self.addColumn, 4, 7, 1, 1)
layout.addWidget(self.removeColumn, 5, 7, 1, 1)
layout.addWidget(self.addRow, 7, 3, 1, 1)
layout.addWidget(self.removeRow, 7, 4, 1, 1)
self.setLayout(layout)
def addObj(self):
global Tileset
Tileset.addObject(new=True)
pix = QtGui.QPixmap(24, 24)
pix.fill(Qt.transparent)
painter = QtGui.QPainter(pix)
painter.drawPixmap(0, 0, Tileset.tiles[0].image.scaledToWidth(24, Qt.SmoothTransformation))
painter.end()
del painter
count = len(Tileset.objects)
item = QtGui.QStandardItem(QtGui.QIcon(pix), 'Object {0}'.format(count-1))
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
window.objmodel.appendRow(item)
index = window.objectList.currentIndex()
window.objectList.setCurrentIndex(index)
self.setObject(index)
window.objectList.update()
self.update()
def removeObj(self):
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
Tileset.removeObject(index)
window.objmodel.removeRow(index)
self.tiles.clear()
SetupObjectModel(window.objmodel, Tileset.objects, Tileset.tiles)
window.objectList.update()
self.update()
def doPlaceNull(self, checked):
global Tileset
Tileset.placeNullChecked = checked
def setObject(self, index):
global Tileset
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
object = Tileset.objects[index.row()]
self.randStuff.setVisible((object.width, object.height) == (1, 1))
self.randX.setChecked(object.randX == 1)
self.randY.setChecked(object.randY == 1)
self.randLen.setValue(object.randLen)
self.randLen.setEnabled(object.randX + object.randY > 0)
self.tilingMethod.setCurrentIndex(object.determineTilingMethod())
self.tiles.setObject(object)
@QtCore.pyqtSlot(int)
def setTiling(self, listindex):
if listindex == 0: # No Repetition
self.repeatX.setVisible(False)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(False)
elif listindex == 1: # Repeat X
self.repeatX.setVisible(True)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(False)
elif listindex == 2: # Repeat Y
self.repeatX.setVisible(False)
self.repeatY.setVisible(True)
self.slopeLine.setVisible(False)
elif listindex == 3: # Repeat X and Y
self.repeatX.setVisible(True)
self.repeatY.setVisible(True)
self.slopeLine.setVisible(False)
elif listindex == 4: # Upward Slope
self.repeatX.setVisible(False)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(True)
elif listindex == 5: # Downward Slope
self.repeatX.setVisible(False)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(True)
elif listindex == 6: # Upward Reverse Slope
self.repeatX.setVisible(False)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(True)
elif listindex == 7: # Downward Reverse Slope
self.repeatX.setVisible(False)
self.repeatY.setVisible(False)
self.slopeLine.setVisible(True)
global Tileset
index = window.objectList.currentIndex().row()
if index < 0 or index >= len(Tileset.objects):
return
object = Tileset.objects[index]
if object.tilingMethodIdx == listindex:
return
object.tilingMethodIdx = listindex
self.tiles.slope = 0
if listindex == 0: # No Repetition
object.clearRepetitionXY()
object.upperslope = [0, 0]
object.lowerslope = [0, 0]
elif listindex == 1: # Repeat X
object.clearRepetitionY()
if not object.repeatX:
object.createRepetitionX()
self.repeatX.update()
object.upperslope = [0, 0]
object.lowerslope = [0, 0]
elif listindex == 2: # Repeat Y
object.clearRepetitionX()
if not object.repeatY:
object.createRepetitionY(0, object.height)
self.repeatY.update()
object.upperslope = [0, 0]
object.lowerslope = [0, 0]
elif listindex == 3: # Repeat X and Y
if not object.repeatX:
object.createRepetitionX()
self.repeatX.update()
if not object.repeatY:
object.createRepetitionY(0, object.height)
self.repeatY.update()
object.upperslope = [0, 0]
object.lowerslope = [0, 0]
elif listindex == 4: # Upward Slope
object.clearRepetitionXY()
if object.upperslope[0] != 0x90:
object.upperslope = [0x90, 1]
if object.height == 1:
object.lowerslope = [0, 0]
else:
object.lowerslope = [0x84, object.height - 1]
self.tiles.slope = object.upperslope[1]
self.slopeLine.update()
elif listindex == 5: # Downward Slope
object.clearRepetitionXY()
if object.upperslope[0] != 0x91:
object.upperslope = [0x91, 1]
if object.height == 1:
object.lowerslope = [0, 0]
else:
object.lowerslope = [0x84, object.height - 1]
self.tiles.slope = object.upperslope[1]
self.slopeLine.update()
elif listindex == 6: # Upward Reverse Slope
object.clearRepetitionXY()
if object.upperslope[0] != 0x92:
object.upperslope = [0x92, 1]
if object.height == 1:
object.lowerslope = [0, 0]
else:
object.lowerslope = [0x84, object.height - 1]
self.tiles.slope = -object.upperslope[1]
self.slopeLine.update()
elif listindex == 7: # Downward Reverse Slope
object.clearRepetitionXY()
if object.upperslope[0] != 0x93:
object.upperslope = [0x93, 1]
if object.height == 1:
object.lowerslope = [0, 0]
else:
object.lowerslope = [0x84, object.height - 1]
self.tiles.slope = -object.upperslope[1]
self.slopeLine.update()
self.tiles.update()
def addRowHandler(self):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
self.tiles.addRow()
self.randStuff.setVisible(self.tiles.size == [1, 1])
def removeRowHandler(self):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
self.tiles.removeRow()
self.randStuff.setVisible(self.tiles.size == [1, 1])
def addColumnHandler(self):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
self.tiles.addColumn()
self.randStuff.setVisible(self.tiles.size == [1, 1])
def removeColumnHandler(self):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
self.tiles.removeColumn()
self.randStuff.setVisible(self.tiles.size == [1, 1])
def changeRandX(self, toggled):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
object = Tileset.objects[self.tiles.object]
object.randX = 1 if toggled else 0
self.randLen.setEnabled(object.randX + object.randY > 0)
def changeRandY(self, toggled):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
object = Tileset.objects[self.tiles.object]
object.randY = 1 if toggled else 0
self.randLen.setEnabled(object.randX + object.randY > 0)
def changeRandLen(self, val):
index = window.objectList.currentIndex()
self.tiles.object = index.row()
if self.tiles.object < 0 or self.tiles.object >= len(Tileset.objects):
return
object = Tileset.objects[self.tiles.object]
object.randLen = val
class tileWidget(QtWidgets.QWidget):
def __init__(self):
super(tileWidget, self).__init__()
self.tiles = []
self.size = [1, 1]
self.setMinimumSize(36, 36) # (24, 24) + padding
self.slope = 0
self.highlightedRect = QtCore.QRect()
self.setAcceptDrops(True)
self.object = -1
def clear(self):
self.tiles = []
self.size = [1, 1] # [width, height]
self.slope = 0
self.highlightedRect = QtCore.QRect()
self.update()
def addColumn(self):
global Tileset
if self.size[0] >= 24:
return
if self.object < 0 or self.object >= len(Tileset.objects):
return
self.size[0] += 1
self.setMinimumSize(self.size[0]*24 + 12, self.size[1]*24 + 12)
curObj = Tileset.objects[self.object]
curObj.width += 1
pix = QtGui.QPixmap(24,24)
pix.fill(QtGui.QColor(0,0,0,0))
for row in self.tiles:
row.append(pix)
if curObj.repeatY:
for y, row in enumerate(curObj.tiles):
if y >= curObj.repeatY[0] and y < curObj.repeatY[1]:
row.append((2, 0, 0))
else:
row.append((0, 0, 0))
else:
for row in curObj.tiles:
row.append((0, 0, 0))
self.update()
self.updateList()
window.tileWidget.repeatX.update()
def removeColumn(self):
global Tileset
if self.size[0] == 1:
return
if self.object < 0 or self.object >= len(Tileset.objects):
return
self.size[0] -= 1
self.setMinimumSize(self.size[0]*24 + 12, self.size[1]*24 + 12)
curObj = Tileset.objects[self.object]
curObj.width -= 1
for row in self.tiles:
if len(row) > 1:
row.pop()
for row in curObj.tiles:
if len(row) > 1:
row.pop()
if curObj.repeatX:
for y, row in enumerate(curObj.tiles):
start, end = curObj.repeatX[y]
end = min(end, len(row))
start = min(start, end - 1)
if [start, end] != curObj.repeatX[y]:
curObj.repeatX[y] = [start, end]
for x in range(len(row)):
if x >= start and x < end:
row[x] = (row[x][0] | 1, row[x][1], row[x][2])
else:
row[x] = (row[x][0] & ~1, row[x][1], row[x][2])
self.update()
self.updateList()
window.tileWidget.repeatX.update()
def addRow(self):
global Tileset
if self.size[1] >= 24:
return
if self.object < 0 or self.object >= len(Tileset.objects):
return
self.size[1] += 1
self.setMinimumSize(self.size[0]*24 + 12, self.size[1]*24 + 12)
curObj = Tileset.objects[self.object]
curObj.height += 1
pix = QtGui.QPixmap(24,24)
pix.fill(QtGui.QColor(0,0,0,0))
self.tiles.append([pix for _ in range(curObj.width)])
if curObj.repeatX:
curObj.tiles.append([(1, 0, 0) for _ in range(curObj.width)])
curObj.repeatX.append([0, curObj.width])
else:
curObj.tiles.append([(0, 0, 0) for _ in range(curObj.width)])
if curObj.upperslope[0] != 0:
curObj.lowerslope = [0x84, curObj.lowerslope[1] + 1]
self.update()
self.updateList()
window.tileWidget.repeatX.update()
window.tileWidget.repeatY.update()
window.tileWidget.slopeLine.update()
def removeRow(self):
global Tileset
if self.size[1] == 1:
return
if self.object < 0 or self.object >= len(Tileset.objects):
return
self.tiles.pop()
self.size[1] -= 1
self.setMinimumSize(self.size[0]*24 + 12, self.size[1]*24 + 12)
curObj = Tileset.objects[self.object]
curObj.tiles = list(curObj.tiles)
curObj.height -= 1
curObj.tiles.pop()
if curObj.repeatX:
curObj.repeatX.pop()
if curObj.repeatY:
start, end = curObj.repeatY
end = min(end, curObj.height)
start = min(start, end - 1)
if [start, end] != curObj.repeatY:
curObj.createRepetitionY(start, end)
if curObj.upperslope[0] != 0:
if curObj.upperslope[1] > curObj.height or curObj.height == 1:
curObj.upperslope[1] = curObj.height
curObj.lowerslope = [0, 0]
if curObj.upperslope[0] & 2:
self.slope = -curObj.upperslope[1]
else:
self.slope = curObj.upperslope[1]
else:
curObj.lowerslope = [0x84, curObj.lowerslope[1] - 1]
self.update()
self.updateList()
window.tileWidget.repeatX.update()
window.tileWidget.repeatY.update()
window.tileWidget.slopeLine.update()
def setObject(self, object):
self.clear()
global Tileset
self.size = [object.width, object.height]
self.setMinimumSize(self.size[0]*24 + 12, self.size[1]*24 + 12)
if not object.upperslope[1] == 0:
if object.upperslope[0] & 2:
self.slope = -object.upperslope[1]
else:
self.slope = object.upperslope[1]
x = 0
y = 0
for row in object.tiles:
self.tiles.append([])
for tile in row:
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
image = Tileset.overrides[tile[1]] if Tileset.slot == 0 and window.overrides else None
if not image:
image = Tileset.tiles[tile[1]].image
self.tiles[-1].append(image.scaledToWidth(24, Qt.SmoothTransformation))
else:
pix = QtGui.QPixmap(24,24)
pix.fill(QtGui.QColor(0,0,0,0))
self.tiles[-1].append(pix)
x += 1
y += 1
x = 0
self.object = window.objectList.currentIndex().row()
self.update()
self.updateList()
window.tileWidget.repeatX.update()
window.tileWidget.repeatY.update()
window.tileWidget.slopeLine.update()
def mousePressEvent(self, event):
global Tileset
if event.button() == 2:
return
index = window.objectList.currentIndex()
self.object = index.row()
if self.object < 0 or self.object >= len(Tileset.objects):
return
if Tileset.placeNullChecked:
centerPoint = self.contentsRect().center()
upperLeftX = centerPoint.x() - self.size[0]*12
upperLeftY = centerPoint.y() - self.size[1]*12
lowerRightX = centerPoint.x() + self.size[0]*12
lowerRightY = centerPoint.y() + self.size[1]*12
x = int((event.x() - upperLeftX)/24)
y = int((event.y() - upperLeftY)/24)
if event.x() < upperLeftX or event.y() < upperLeftY or event.x() > lowerRightX or event.y() > lowerRightY:
return
if Tileset.slot == 0:
try:
self.tiles[y][x] = Tileset.tiles[0].image.scaledToWidth(24, Qt.SmoothTransformation)
Tileset.objects[self.object].tiles[y][x] = (Tileset.objects[self.object].tiles[y][x][0], 0, 0)
except IndexError:
pass
else:
pix = QtGui.QPixmap(24,24)
pix.fill(QtGui.QColor(0,0,0,0))
try:
self.tiles[y][x] = pix
Tileset.objects[self.object].tiles[y][x] = (Tileset.objects[self.object].tiles[y][x][0], 0, 0)
except IndexError:
pass
else:
if window.tileDisplay.selectedIndexes() == []:
return
currentSelected = window.tileDisplay.selectedIndexes()
ix = 0
iy = 0
for modelItem in currentSelected:
# Update yourself!
centerPoint = self.contentsRect().center()
tile = modelItem.row()
upperLeftX = centerPoint.x() - self.size[0]*12
upperLeftY = centerPoint.y() - self.size[1]*12
lowerRightX = centerPoint.x() + self.size[0]*12
lowerRightY = centerPoint.y() + self.size[1]*12
x = int((event.x() - upperLeftX)/24 + ix)
y = int((event.y() - upperLeftY)/24 + iy)
if event.x() < upperLeftX or event.y() < upperLeftY or event.x() > lowerRightX or event.y() > lowerRightY:
return
try:
image = Tileset.overrides[tile] if Tileset.slot == 0 and window.overrides else None
if not image:
image = Tileset.tiles[tile].image
self.tiles[y][x] = image.scaledToWidth(24, Qt.SmoothTransformation)
Tileset.objects[self.object].tiles[y][x] = (Tileset.objects[self.object].tiles[y][x][0], tile, Tileset.slot)
except IndexError:
pass
ix += 1
if self.size[0]-1 < ix:
ix = 0
iy += 1
if iy > self.size[1]-1:
break
self.update()
self.updateList()
def updateList(self):
# Update the list >.>
object = window.objmodel.itemFromIndex(window.objectList.currentIndex())
if not object: return
tex = QtGui.QPixmap(self.size[0] * 24, self.size[1] * 24)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
for y, row in enumerate(self.tiles):
for x, tile in enumerate(row):
painter.drawPixmap(x*24, y*24, tile)
painter.end()
object.setIcon(QtGui.QIcon(tex))
window.objectList.update()
def paintEvent(self, event):
painter = QtGui.QPainter()
painter.begin(self)
centerPoint = self.contentsRect().center()
upperLeftX = centerPoint.x() - self.size[0]*12
lowerRightX = centerPoint.x() + self.size[0]*12
upperLeftY = centerPoint.y() - self.size[1]*12
lowerRightY = centerPoint.y() + self.size[1]*12
index = window.objectList.currentIndex()
self.object = index.row()
if self.object < 0 or self.object >= len(Tileset.objects):
painter.end()
return
object = Tileset.objects[self.object]
for y, row in enumerate(object.tiles):
painter.fillRect(upperLeftX, upperLeftY + y * 24, len(row) * 24, 24, QtGui.QColor(205, 205, 255))
for y, row in enumerate(self.tiles):
for x, pix in enumerate(row):
painter.drawPixmap(upperLeftX + (x * 24), upperLeftY + (y * 24), pix)
if object.upperslope[0] & 0x80:
pen = QtGui.QPen()
pen.setStyle(Qt.DashLine)
pen.setWidth(2)
pen.setColor(Qt.blue)
painter.setPen(QtGui.QPen(pen))
slope = self.slope
if slope < 0:
slope += self.size[1]
painter.drawLine(upperLeftX, upperLeftY + (slope * 24), lowerRightX, upperLeftY + (slope * 24))
font = painter.font()
font.setPixelSize(8)
font.setFamily('Monaco')
painter.setFont(font)
if self.slope > 0:
painter.drawText(upperLeftX+1, upperLeftY+10, 'Main')
painter.drawText(upperLeftX+1, upperLeftY + (slope * 24) + 9, 'Sub')
else:
painter.drawText(upperLeftX+1, upperLeftY + self.size[1]*24 - 4, 'Main')
painter.drawText(upperLeftX+1, upperLeftY + (slope * 24) - 3, 'Sub')
if 0 <= self.object < len(Tileset.objects):
object = Tileset.objects[self.object]
if object.repeatX:
pen = QtGui.QPen()
pen.setStyle(Qt.DashLine)
pen.setWidth(2)
pen.setColor(Qt.blue)
painter.setPen(QtGui.QPen(pen))
for y in range(object.height):
startX, endX = object.repeatX[y]
painter.drawLine(upperLeftX + startX * 24, upperLeftY + y * 24, upperLeftX + startX * 24, upperLeftY + y * 24 + 24)
painter.drawLine(upperLeftX + endX * 24, upperLeftY + y * 24, upperLeftX + endX * 24, upperLeftY + y * 24 + 24)
if object.repeatY:
pen = QtGui.QPen()
pen.setStyle(Qt.DashLine)
pen.setWidth(2)
pen.setColor(Qt.red)
painter.setPen(QtGui.QPen(pen))
painter.drawLine(upperLeftX, upperLeftY + object.repeatY[0] * 24, lowerRightX, upperLeftY + object.repeatY[0] * 24)
painter.drawLine(upperLeftX, upperLeftY + object.repeatY[1] * 24, lowerRightX, upperLeftY + object.repeatY[1] * 24)
painter.end()
#############################################################################################
################################## Pa0 Tileset Animation Tab ################################
class frameTileWidget(QtWidgets.QWidget):
def __init__(self, parent):
super().__init__()
self.parent = parent
def width(self):
return 0
def height(self):
return 0
def pixmap(self):
return None
def paintEvent(self, event):
if not self.parent.frames:
return
painter = QtGui.QPainter()
painter.begin(self)
width = self.width()
height = self.height()
pixmap = self.pixmap()
centerPoint = self.contentsRect().center()
upperLeftX = centerPoint.x() - width * 30
upperLeftY = centerPoint.y() - height * 30
painter.fillRect(upperLeftX, upperLeftY, width * 60, height * 60, QtGui.QColor(205, 205, 255))
painter.drawPixmap(upperLeftX, upperLeftY, pixmap)
class frameByFrameTab(QtWidgets.QWidget):
class tileWidget(frameTileWidget):
def __init__(self, parent):
super().__init__(parent)
self.idx = 0
def width(self):
return self.parent.blockWidth
def height(self):
return self.parent.blockHeight
def pixmap(self):
return self.parent.frames[self.idx]
def __init__(self, parent):
super().__init__()
self.parent = parent
self.importButton = QtWidgets.QPushButton('Import')
self.importButton.released.connect(self.importFrame)
self.importButton.setEnabled(False)
self.exportButton = QtWidgets.QPushButton('Export')
self.exportButton.released.connect(self.exportFrame)
self.exportButton.setEnabled(False)
self.addButton = QtWidgets.QPushButton('Add Frame')
self.addButton.released.connect(self.addFrame)
self.deleteButton = QtWidgets.QPushButton('Delete Frame')
self.deleteButton.released.connect(self.deleteFrame)
self.deleteButton.setEnabled(False)
self.playButton = QtWidgets.QPushButton('Play Preview')
self.playButton.setCheckable(True)
self.playButton.toggled.connect(self.playPreview)
self.playButton.setEnabled(False)
self.tiles = frameByFrameTab.tileWidget(parent)
self.frameIdx = QtWidgets.QSpinBox()
self.frameIdx.setRange(0, 0)
self.frameIdx.valueChanged.connect(self.frameIdxChanged)
self.frameIdx.setEnabled(False)
layout = QtWidgets.QGridLayout()
layout.addWidget(self.tiles, 0, 1, 2, 3)
layout.addWidget(self.frameIdx, 3, 2, 1, 1)
layout.addWidget(self.importButton, 3, 0, 1, 1)
layout.addWidget(self.exportButton, 4, 0, 1, 1)
layout.addWidget(self.addButton, 3, 4, 1, 1)
layout.addWidget(self.deleteButton, 4, 4, 1, 1)
layout.addWidget(self.playButton, 4, 1, 1, 3)
self.setLayout(layout)
self.previewTimer = QtCore.QTimer()
self.previewTimer.timeout.connect(lambda: self.frameIdxChanged(self.getNextFrame()))
def update(self):
self.tiles.update()
super().update()
def frameIdxChanged(self, idx):
self.tiles.idx = idx
self.update()
def importPixmap(self):
path = QtWidgets.QFileDialog.getOpenFileName(self, "Open Image", '',
'.png (*.png)')[0]
if not path:
return None
pixmap = QtGui.QPixmap(path)
width = pixmap.width()
height = pixmap.height()
blockWidth = self.parent.blockWidth
blockHeight = self.parent.blockHeight
requiredWidth = blockWidth * 60
requiredHeight = blockHeight * 60
try:
assert width == requiredWidth
assert height == requiredHeight
except AssertionError:
requiredWidthPadded = blockWidth * 64
requiredHeightPadded = blockHeight * 64
try:
assert width == requiredWidthPadded
assert height == requiredHeightPadded
except AssertionError:
QtWidgets.QMessageBox.warning(self, "Open Image",
"The image was not the proper dimensions.\n"
"Please resize the image to %dx%d pixels." % (requiredWidth, requiredHeight),
QtWidgets.QMessageBox.Cancel)
return None
paddedPixmap = pixmap
pixmap = QtGui.QPixmap(requiredWidth, requiredHeight)
pixmap.fill(Qt.transparent)
for y in range(height // 64):
for x in range(width // 64):
painter = QtGui.QPainter(pixmap)
painter.drawPixmap(x * 60, y * 60, paddedPixmap.copy(x*64 + 2, y*64 + 2, 60, 60))
painter.end()
del paddedPixmap
return pixmap
def importFrame(self):
pixmap = self.importPixmap()
if not pixmap:
return
del self.parent.frames[self.tiles.idx]
self.parent.frames.insert(self.tiles.idx, pixmap)
self.parent.update()
def exportFrame(self):
path = QtWidgets.QFileDialog.getSaveFileName(self, "Save Image", ''
, '.png (*.png)')[0]
if not path:
return
self.tiles.pixmap().save(path)
def addFrame(self):
pixmap = self.importPixmap()
if not pixmap:
return
newIdx = len(self.parent.frames)
self.parent.frames.append(pixmap)
self.parent.update()
self.frameIdx.setValue(newIdx)
def deleteFrame(self):
idx = self.tiles.idx
frames = self.parent.frames
del frames[idx]
self.frameIdx.setValue(min(idx, max(len(frames), 1) - 1))
self.parent.update()
def getNextFrame(self):
return (self.tiles.idx + 1) % max(len(self.parent.frames), 1)
def playPreview(self, checked):
if checked:
self.importButton.setEnabled(False)
self.exportButton.setEnabled(False)
self.addButton.setEnabled(False)
self.deleteButton.setEnabled(False)
self.frameIdx.setEnabled(False)
self.parent.allFramesTab.importButton.setEnabled(False)
self.previewTimer.start(62.5)
else:
self.importButton.setEnabled(True)
self.exportButton.setEnabled(True)
self.addButton.setEnabled(True)
self.deleteButton.setEnabled(True)
self.frameIdx.setEnabled(True)
self.parent.allFramesTab.importButton.setEnabled(True)
self.previewTimer.stop()
self.frameIdx.setValue(self.tiles.idx)
class scrollArea(QtWidgets.QScrollArea):
def __init__(self, widget):
super().__init__()
self.setWidgetResizable(True)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setWidget(widget)
self.deltaWidth = globals.app.style().pixelMetric(QtWidgets.QStyle.PM_ScrollBarExtent)
self.width = widget.sizeHint().width() + self.deltaWidth
self.height = widget.sizeHint().height() + self.deltaWidth
def sizeHint(self):
return QtCore.QSize(self.width, self.height)
def update(self):
widget = self.widget()
self.width = widget.sizeHint().width() + self.deltaWidth
self.height = widget.sizeHint().height() + self.deltaWidth
super().update()
class allFramesTab(QtWidgets.QWidget):
class tileWidget(frameTileWidget):
def width(self):
return self.parent.blockWidth
def height(self):
return self.parent.blockHeight * len(self.parent.frames)
def pixmap(self):
pixmap = QtGui.QPixmap(self.width() * 60, self.height() * 60)
pixmap.fill(Qt.transparent)
blockHeight = self.parent.blockHeight
for i, frame in enumerate(self.parent.frames):
painter = QtGui.QPainter(pixmap)
painter.drawPixmap(0, i * blockHeight * 60, frame)
painter.end()
return pixmap
def __init__(self, parent):
super().__init__()
self.parent = parent
self.importButton = QtWidgets.QPushButton('Import')
self.importButton.released.connect(self.importFrame)
self.exportButton = QtWidgets.QPushButton('Export')
self.exportButton.released.connect(self.exportFrame)
self.tiles = allFramesTab.tileWidget(parent)
self.tilesScroll = scrollArea(self.tiles)
layout = QtWidgets.QGridLayout()
layout.addWidget(self.importButton, 0, 0, 1, 2)
layout.addWidget(self.exportButton, 0, 2, 1, 2)
layout.addWidget(self.tilesScroll, 1, 0, 1, 4)
self.setLayout(layout)
def update(self):
self.tiles.update()
self.tilesScroll.update()
super().update()
def importFrame(self):
path = QtWidgets.QFileDialog.getOpenFileName(self, "Open Image", '',
'.png (*.png)')[0]
if not path:
return
pixmap = QtGui.QPixmap(path)
width = pixmap.width()
height = pixmap.height()
blockWidth = self.parent.blockWidth
blockHeight = self.parent.blockHeight
requiredWidth = blockWidth * 60
requiredHeight = blockHeight * 60
padded = False
try:
assert width == requiredWidth
assert height % requiredHeight == 0
except AssertionError:
requiredWidthPadded = blockWidth * 64
requiredHeightPadded = blockHeight * 64
try:
assert width == requiredWidthPadded
assert height % requiredHeightPadded == 0
except AssertionError:
QtWidgets.QMessageBox.warning(self, "Open Image",
"The image was not the proper dimensions.\n"
"Please resize the image to a width of %d and height multiple of %d." % (requiredWidth, requiredHeight),
QtWidgets.QMessageBox.Cancel)
return
padded = True
if padded:
frames = [QtGui.QPixmap(requiredWidth, requiredHeight) for _ in range(height // requiredHeightPadded)]
for frame in frames:
frame.fill(Qt.transparent)
for y in range(height // 64):
for x in range(width // 64):
painter = QtGui.QPainter(frames[y // blockHeight])
painter.drawPixmap(x * 60, y % blockHeight * 60, pixmap.copy(x*64 + 2, y*64 + 2, 60, 60))
painter.end()
else:
frames = [QtGui.QPixmap(requiredWidth, requiredHeight) for _ in range(height // requiredHeight)]
for frame in frames:
frame.fill(Qt.transparent)
for y in range(0, height, requiredHeight):
painter = QtGui.QPainter(frames[y // requiredHeight])
painter.drawPixmap(0, 0, pixmap.copy(0, y, requiredWidth, requiredHeight))
painter.end()
del pixmap
del self.parent.frames
self.parent.frames = frames
self.parent.update()
def exportFrame(self):
path = QtWidgets.QFileDialog.getSaveFileName(self, "Save Image", ''
, '.png (*.png)')[0]
if not path:
return
self.tiles.pixmap().save(path)
class tileAnime(QtWidgets.QTabWidget):
def __init__(self, name, blockWidth, blockHeight, tiles):
super().__init__()
self.name = name
self.blockWidth = blockWidth
self.blockHeight = blockHeight
self.tiles = tiles # TODO: Highlight tiles in the palette when this tab is selected
self.frames = []
self.frameByFrameTab = frameByFrameTab(self)
self.allFramesTab = allFramesTab(self)
self.addTab(self.frameByFrameTab, "Frame-by-frame View")
self.addTab(self.allFramesTab, "All-Frames View")
self.setStyleSheet("""
QTabWidget::tab-bar {
alignment: center;
}
""")
self.setTabPosition(QtWidgets.QTabWidget.South)
def load(self, useAddrLib=False):
global window
arc = window.arc
data = b''
for folder in arc.contents:
if folder.name == 'BG_tex':
for file in folder.contents:
if file.name == '%s.gtx' % self.name:
data = file.data
if not data:
print("Failed to acquired %s.gtx" % self.name)
frames = []
else:
image = QtGui.QPixmap.fromImage(loadGTX(data, useAddrLib))
width = image.width()
height = image.height()
blockWidth = self.blockWidth
blockHeight = self.blockHeight
try:
assert width == blockWidth * 64
assert height % blockHeight * 64 == 0
except AssertionError:
print("Invalid dimensions for %s.gtx: (%d, %d)" % (self.name, width, height))
frames = []
else:
frames = [QtGui.QPixmap(blockWidth * 60, blockHeight * 60) for _ in range(height // (blockHeight * 64))]
for frame in frames:
frame.fill(Qt.transparent)
for y in range(height // 64):
for x in range(width // 64):
painter = QtGui.QPainter(frames[y // blockHeight])
painter.drawPixmap(x * 60, y % blockHeight * 60, image.copy(x*64 + 2, y*64 + 2, 60, 60))
painter.end()
del self.frames
self.frames = frames
self.update()
def update(self):
nFrames = len(self.frames)
_frameByFrameTab = self.frameByFrameTab
_frameByFrameTab.tiles.setMinimumSize(_frameByFrameTab.tiles.width() * 60, _frameByFrameTab.tiles.height() * 60)
_frameByFrameTab.importButton.setEnabled(nFrames)
_frameByFrameTab.exportButton.setEnabled(nFrames)
_frameByFrameTab.deleteButton.setEnabled(nFrames)
_frameByFrameTab.playButton.setEnabled(nFrames)
_frameByFrameTab.frameIdx.setRange(0, max(nFrames, 1) - 1)
_frameByFrameTab.frameIdx.setEnabled(nFrames)
_frameByFrameTab.update()
_allFramesTab = self.allFramesTab
_allFramesTab.tiles.setMinimumSize(_allFramesTab.tiles.width() * 60, _allFramesTab.tiles.height() * 60)
_allFramesTab.exportButton.setEnabled(nFrames)
_allFramesTab.update()
super().update()
class animWidget(QtWidgets.QTabWidget):
def __init__(self):
super().__init__()
global window
if window.slot:
return
self.block = tileAnime('block_anime', 1, 1, (48,))
self.hatena = tileAnime('hatena_anime', 1, 1, (49,))
self.blockL = tileAnime('block_anime_L', 2, 2, (112, 113, 128, 129))
self.hatenaL = tileAnime('hatena_anime_L', 2, 2, (114, 115, 130, 131))
self.tuka = tileAnime('tuka_coin_anime', 1, 1, (31,))
self.belt = tileAnime('belt_conveyor_anime', 3, 1, (144, 145, 146, 147, 148, 149,
160, 161, 162, 163, 164, 165))
path = globals.miyamoto_path + '/miyamotodata/Icons/'
self.addTab(self.block, QtGui.QIcon(path + 'Core/Brick.png'), 'Brick Block')
self.addTab(self.hatena, QtGui.QIcon(path + 'Core/Qblock.png'), '? Block')
self.addTab(self.blockL, QtGui.QIcon(path + 'Core/Brick.png'), 'Big Brick Block')
self.addTab(self.hatenaL, QtGui.QIcon(path + 'Core/Qblock.png'), 'Big ? Block')
self.addTab(self.tuka, QtGui.QIcon(path + 'Core/DashCoin.png'), 'Dash Coin')
self.addTab(self.belt, QtGui.QIcon(path + 'Core/Conveyor.png'), 'Conveyor Belt')
self.setTabToolTip(0, "Brick Block animation.<br><b>Needs to be 16 frames!")
self.setTabToolTip(1, "Question Block animation.<br><b>Needs to be 16 frames!")
self.setTabToolTip(2, "Big Brick Block animation.<br><b>Needs to be 16 frames!")
self.setTabToolTip(3, "Big Question Block animation.<br><b>Needs to be 16 frames!")
self.setTabToolTip(4, "Dash Coin animation.<br><b>Needs to be 8 frames!")
self.setTabToolTip(5, "Conveyor Belt animation.<br><b>Needs to be 8 frames!")
#self.setTabShape(QtWidgets.QTabWidget.Triangular)
self.setTabPosition(QtWidgets.QTabWidget.South)
def load(self):
global window
if window.slot:
return
self.block.load()
self.hatena.load()
self.blockL.load()
self.hatenaL.load()
self.tuka.load()
self.belt.load(True)
def save(self):
global window
if window.slot:
return []
packTexture = self.packTexture
anime = []
if self.block.frames:
anime.append((self.block.name, packTexture(self.block.allFramesTab.tiles.pixmap())))
if self.hatena.frames:
anime.append((self.hatena.name, packTexture(self.hatena.allFramesTab.tiles.pixmap())))
if self.blockL.frames:
anime.append((self.blockL.name, packTexture(self.blockL.allFramesTab.tiles.pixmap())))
if self.hatenaL.frames:
anime.append((self.hatenaL.name, packTexture(self.hatenaL.allFramesTab.tiles.pixmap())))
if self.tuka.frames:
anime.append((self.tuka.name, packTexture(self.tuka.allFramesTab.tiles.pixmap())))
if self.belt.frames:
anime.append((self.belt.name, packTexture(self.belt.allFramesTab.tiles.pixmap())))
return anime
@staticmethod
def packTexture(pixmap):
width = pixmap.width() // 60
height = pixmap.height() // 60
tex = QtGui.QImage(width * 64, height * 64, QtGui.QImage.Format_RGBA8888)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
for y in range(height):
for x in range(width):
tile = QtGui.QImage(64, 64, QtGui.QImage.Format_RGBA8888)
tile.fill(Qt.transparent)
tilePainter = QtGui.QPainter(tile)
tilePainter.drawPixmap(2, 2, pixmap.copy(x * 60, y * 60, 60, 60))
tilePainter.end()
for i in range(2, 62):
color = tile.pixel(i, 2)
for pix in range(0,2):
tile.setPixel(i, pix, color)
color = tile.pixel(2, i)
for p in range(0,2):
tile.setPixel(p, i, color)
color = tile.pixel(i, 61)
for p in range(62,64):
tile.setPixel(i, p, color)
color = tile.pixel(61, i)
for p in range(62,64):
tile.setPixel(p, i, color)
color = tile.pixel(2, 2)
for a in range(0, 2):
for b in range(0, 2):
tile.setPixel(a, b, color)
color = tile.pixel(61, 2)
for a in range(62, 64):
for b in range(0, 2):
tile.setPixel(a, b, color)
color = tile.pixel(2, 61)
for a in range(0, 2):
for b in range(62, 64):
tile.setPixel(a, b, color)
color = tile.pixel(61, 61)
for a in range(62, 64):
for b in range(62, 64):
tile.setPixel(a, b, color)
painter.drawImage(x * 64, y * 64, tile)
painter.end()
bits = tex.bits()
bits.setsize(tex.byteCount())
data = bits.asstring()
return RAWtoGTX(width * 64, height * 64, 0x1a, bytes(4), len(data), [0, 1, 2, 3], 1, data)
#############################################################################################
############################ Subclassed one dimension Item Model ############################
class PiecesModel(QtCore.QAbstractListModel):
def __init__(self, parent=None):
super(PiecesModel, self).__init__(parent)
self.pixmaps = []
def supportedDragActions(self):
super().supportedDragActions()
return Qt.CopyAction | Qt.MoveAction | Qt.LinkAction
def data(self, index, role=Qt.DisplayRole):
if not index.isValid():
return None
if role == Qt.DecorationRole:
return QtGui.QIcon(self.pixmaps[index.row()])
if role == Qt.UserRole:
return self.pixmaps[index.row()]
return None
def addPieces(self, pixmap):
row = len(self.pixmaps)
self.beginInsertRows(QtCore.QModelIndex(), row, row)
self.pixmaps.insert(row, pixmap)
self.endInsertRows()
def flags(self,index):
if index.isValid():
return (Qt.ItemIsEnabled | Qt.ItemIsSelectable |
Qt.ItemIsDragEnabled)
def clear(self):
row = len(self.pixmaps)
del self.pixmaps[:]
def mimeTypes(self):
return ['image/x-tile-piece']
def mimeData(self, indexes):
mimeData = QtCore.QMimeData()
encodedData = QtCore.QByteArray()
stream = QtCore.QDataStream(encodedData, QtCore.QIODevice.WriteOnly)
for index in indexes:
if index.isValid():
pixmap = QtGui.QPixmap(self.data(index, Qt.UserRole))
stream << pixmap
mimeData.setData('image/x-tile-piece', encodedData)
return mimeData
def rowCount(self, parent):
if parent.isValid():
return 0
else:
return len(self.pixmaps)
def supportedDragActions(self):
return Qt.CopyAction | Qt.MoveAction
#############################################################################################
############ Main Window Class. Takes care of menu functions and widget creation ############
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, name, data, slot, con, flags, parent=None):
super().__init__(parent, flags)
global window
window = self
self.overrides = True
self.saved = False
self.con = con
self.slot = int(slot)
self.tileImage = QtGui.QPixmap()
self.normalmap = False
global Tileset
Tileset = TilesetClass()
self.name = name
self.forceClose = False
self.setupMenus()
self.setupWidgets()
self.setuptile()
if data == 'None':
self.newTileset()
else:
with open(data, 'rb') as fn:
self.data = fn.read()
if not self.openTileset():
self.forceClose = True
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed,
QtWidgets.QSizePolicy.Fixed))
self.setWindowTitle(name + ' - Puzzle NSMBU')
def closeEvent(self, event):
if platform.system() == 'Windows':
tile_path = globals.miyamoto_path + '/Tools'
elif platform.system() == 'Linux':
tile_path = globals.miyamoto_path + '/linuxTools'
else:
tile_path = globals.miyamoto_path + '/macTools'
if os.path.isfile(tile_path + '/tmp.tmp'):
os.remove(tile_path + '/tmp.tmp')
"""
# Object-duplicates-related
if self.saved:
toDelete = []
for folderIndex in globals.ObjectAddedtoEmbedded[globals.CurrentArea]:
for objNum in globals.ObjectAddedtoEmbedded[globals.CurrentArea][folderIndex]:
idx, _ = globals.ObjectAddedtoEmbedded[globals.CurrentArea][folderIndex][objNum]
if idx == self.slot:
toDelete.append([folderIndex, objNum])
for (folderIndex, objNum) in toDelete:
del globals.ObjectAddedtoEmbedded[globals.CurrentArea][folderIndex][objNum]
"""
if not self.saved and self.con:
exec("globals.Area.tileset%d = ''" % self.slot)
if self.slot == 0:
self.animWidget.block.frameByFrameTab.previewTimer.stop()
self.animWidget.hatena.frameByFrameTab.previewTimer.stop()
self.animWidget.blockL.frameByFrameTab.previewTimer.stop()
self.animWidget.hatenaL.frameByFrameTab.previewTimer.stop()
self.animWidget.tuka.frameByFrameTab.previewTimer.stop()
self.animWidget.belt.frameByFrameTab.previewTimer.stop()
super().closeEvent(event)
def setuptile(self):
self.tileWidget.tiles.clear()
self.model.clear()
if self.normalmap:
for tile in Tileset.tiles:
self.model.addPieces(tile.normalmap.scaledToWidth(24, Qt.SmoothTransformation))
elif Tileset.slot == 0 and window.overrides:
for i in range(len(Tileset.tiles)):
image = Tileset.overrides[i]
if not image:
image = Tileset.tiles[i].image
self.model.addPieces(image.scaledToWidth(24, Qt.SmoothTransformation))
else:
for tile in Tileset.tiles:
self.model.addPieces(tile.image.scaledToWidth(24, Qt.SmoothTransformation))
def newTileset(self):
'''Creates a new, blank tileset'''
global Tileset
Tileset.clear()
Tileset = TilesetClass()
EmptyPix = QtGui.QPixmap(60, 60)
EmptyPix.fill(Qt.black)
normalmap = QtGui.QPixmap(60, 60)
normalmap.fill(QtGui.QColor(0x80, 0x80, 0xff))
for i in range(256):
Tileset.addTile(EmptyPix, normalmap)
Tileset.slot = self.slot; Tileset.processOverrides()
self.tileWidget.tilesetType.setText('Pa%d' % Tileset.slot)
self.setuptile()
@staticmethod
def getData(arc):
Image = None
NmlMap = None
behaviourdata = None
objstrings = None
metadata = None
for folder in arc.contents:
if folder.name == 'BG_tex':
for file in folder.contents:
if file.name.endswith('_nml.gtx') and len(file.data) in (1421344, 4196384):
NmlMap = file.data
elif file.name.endswith('.gtx') and len(file.data) in (1421344, 4196384):
Image = file.data
elif folder.name == 'BG_chk':
for file in folder.contents:
if file.name.startswith('d_bgchk_') and file.name.endswith('.bin'):
behaviourdata = file.data
elif folder.name == 'BG_unt':
for file in folder.contents:
if file.name.endswith('_hd.bin'):
metadata = file.data
elif file.name.endswith('.bin'):
objstrings = file.data
return Image, NmlMap, behaviourdata, objstrings, metadata
def openTileset(self):
'''Opens a Nintendo tileset sarc and parses the heck out of it.'''
data = self.data
if not data.startswith(b'SARC'):
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - this is not a SARC file.\n\nNot a valid tileset, sadly.')
return
arc = SarcLib.SARC_Archive(data)
Image, NmlMap, behaviourdata, objstrings, metadata = self.getData(arc)
if not Image:
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - Couldn\'t load the image data')
return
elif not NmlMap:
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - Couldn\'t load the normal map data')
return
elif None in (behaviourdata, objstrings, metadata):
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - the necessary files were not found.\n\nNot a valid tileset, sadly.')
return
global Tileset
Tileset.clear()
self.arc = arc
# Loads the Image Data.
dest = loadGTX(Image)
destnml = loadGTX(NmlMap)
self.tileImage = QtGui.QPixmap.fromImage(dest)
self.nmlImage = QtGui.QPixmap.fromImage(destnml)
# Loads Tile Behaviours
behaviours = []
for entry in range(256):
behaviours.append(struct.unpack('<Q', behaviourdata[entry*8:entry*8+8])[0])
# Makes us some nice Tile Classes!
Xoffset = 2
Yoffset = 2
for i in range(256):
Tileset.addTile(
self.tileImage.copy(Xoffset,Yoffset,60,60),
self.nmlImage.copy(Xoffset,Yoffset,60,60),
behaviours[i])
Xoffset += 64
if Xoffset >= 2048:
Xoffset = 2
Yoffset += 64
# Load Objects
meta = []
for i in range(len(metadata) // 6):
meta.append(struct.unpack_from('>HBBH', metadata, i * 6))
tilelist = [[]]
upperslope = [0, 0]
lowerslope = [0, 0]
byte = 0
for entry in meta:
offset = entry[0]
byte = struct.unpack_from('>B', objstrings, offset)[0]
row = 0
while byte != 0xFF:
if byte == 0xFE:
tilelist.append([])
if (upperslope[0] != 0) and (lowerslope[0] == 0):
upperslope[1] = upperslope[1] + 1
if lowerslope[0] != 0:
lowerslope[1] = lowerslope[1] + 1
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
elif (byte & 0x80):
if upperslope[0] == 0:
upperslope[0] = byte
else:
lowerslope[0] = byte
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
else:
tilelist[-1].append(struct.unpack_from('>3B', objstrings, offset))
offset += 3
byte = struct.unpack_from('>B', objstrings, offset)[0]
tilelist.pop()
if (upperslope[0] & 0x80) and (upperslope[0] & 0x2):
for i in range(lowerslope[1]):
pop = tilelist.pop()
tilelist.insert(0, pop)
Tileset.addObject(entry[2], entry[1], entry[3], upperslope, lowerslope, tilelist)
tilelist = [[]]
upperslope = [0, 0]
lowerslope = [0, 0]
Tileset.slot = self.slot; Tileset.processOverrides()
self.tileWidget.tilesetType.setText('Pa%d' % Tileset.slot)
self.animWidget.load()
cobj = 0
crow = 0
ctile = 0
for object in Tileset.objects:
for row in object.tiles:
for tile in row:
if tile[2] & 3 or not Tileset.slot:
Tileset.objects[cobj].tiles[crow][ctile] = (tile[0], tile[1], (tile[2] & 0xFC) | Tileset.slot)
ctile += 1
crow += 1
ctile = 0
cobj += 1
crow = 0
ctile = 0
self.setuptile()
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
return True
def openTilesetfromFile(self):
'''Opens a NSMBU tileset sarc from a file and parses the heck out of it.'''
path = QtWidgets.QFileDialog.getOpenFileName(self, "Open NSMBU Tileset", '',
"All files (*)")[0]
if not path: return
name = '.'.join(os.path.basename(path).split('.')[:-1])
with open(path, 'rb') as file:
data = file.read()
if not data.startswith(b'SARC'):
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - this is not a SARC file.\n\nNot a valid tileset, sadly.')
return
arc = SarcLib.SARC_Archive(data)
Image, NmlMap, behaviourdata, objstrings, metadata = self.getData(arc)
if not Image:
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - Couldn\'t load the image data')
return
elif not NmlMap:
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - Couldn\'t load the normal map data')
return
elif None in (behaviourdata, objstrings, metadata):
QtWidgets.QMessageBox.warning(None, 'Error', 'Error - the necessary files were not found.\n\nNot a valid tileset, sadly.')
return
global Tileset
Tileset.clear()
self.arc = arc
# Loads the Image Data.
dest = loadGTX(Image)
destnml = loadGTX(NmlMap)
self.tileImage = QtGui.QPixmap.fromImage(dest)
self.nmlImage = QtGui.QPixmap.fromImage(destnml)
# Loads Tile Behaviours
behaviours = []
for entry in range(256):
behaviours.append(struct.unpack('<Q', behaviourdata[entry*8:entry*8+8])[0])
# Makes us some nice Tile Classes!
Xoffset = 2
Yoffset = 2
for i in range(256):
Tileset.addTile(
self.tileImage.copy(Xoffset,Yoffset,60,60),
self.nmlImage.copy(Xoffset,Yoffset,60,60),
behaviours[i])
Xoffset += 64
if Xoffset >= 2048:
Xoffset = 2
Yoffset += 64
# Load Objects
meta = []
for i in range(len(metadata) // 6):
meta.append(struct.unpack_from('>HBBH', metadata, i * 6))
tilelist = [[]]
upperslope = [0, 0]
lowerslope = [0, 0]
byte = 0
for entry in meta:
offset = entry[0]
byte = struct.unpack_from('>B', objstrings, offset)[0]
row = 0
while byte != 0xFF:
if byte == 0xFE:
tilelist.append([])
if (upperslope[0] != 0) and (lowerslope[0] == 0):
upperslope[1] = upperslope[1] + 1
if lowerslope[0] != 0:
lowerslope[1] = lowerslope[1] + 1
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
elif (byte & 0x80):
if upperslope[0] == 0:
upperslope[0] = byte
else:
lowerslope[0] = byte
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
else:
tilelist[-1].append(struct.unpack_from('>3B', objstrings, offset))
offset += 3
byte = struct.unpack_from('>B', objstrings, offset)[0]
tilelist.pop()
if (upperslope[0] & 0x80) and (upperslope[0] & 0x2):
for i in range(lowerslope[1]):
pop = tilelist.pop()
tilelist.insert(0, pop)
Tileset.addObject(entry[2], entry[1], entry[3], upperslope, lowerslope, tilelist)
tilelist = [[]]
upperslope = [0, 0]
lowerslope = [0, 0]
Tileset.slot = self.slot
self.tileWidget.tilesetType.setText('Pa%d' % Tileset.slot)
self.animWidget.load()
cobj = 0
crow = 0
ctile = 0
for object in Tileset.objects:
for row in object.tiles:
for tile in row:
if tile[2] & 3 or not Tileset.slot:
Tileset.objects[cobj].tiles[crow][ctile] = (tile[0], tile[1], (tile[2] & 0xFC) | Tileset.slot)
ctile += 1
crow += 1
ctile = 0
cobj += 1
crow = 0
ctile = 0
self.setuptile()
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
self.objectList.clearCurrentIndex()
self.tileWidget.setObject(self.objectList.currentIndex())
self.objectList.update()
self.tileWidget.update()
def openImage(self, nml=False):
'''Opens an Image from png, and creates a new tileset from it.'''
path = QtWidgets.QFileDialog.getOpenFileName(self, "Open Image", '',
"Image Files (*.png)")[0]
if not path: return
newImage = QtGui.QPixmap()
self.tileImage = newImage
if not newImage.load(path):
QtWidgets.QMessageBox.warning(self, "Open Image",
"The image file could not be loaded.",
QtWidgets.QMessageBox.Cancel)
return
if ((newImage.width() == 960) & (newImage.height() == 960)):
x = 0
y = 0
for i in range(256):
if nml:
Tileset.tiles[i].normalmap = self.tileImage.copy(x*60,y*60,60,60)
else:
Tileset.tiles[i].image = self.tileImage.copy(x*60,y*60,60,60)
x += 1
if (x * 60) >= 960:
y += 1
x = 0
else:
QtWidgets.QMessageBox.warning(self, "Open Image",
"The image was not the proper dimensions.\n"
"Please resize the image to 960x960 pixels.",
QtWidgets.QMessageBox.Cancel)
return
index = self.objectList.currentIndex()
self.setuptile()
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
self.objectList.setCurrentIndex(index)
self.tileWidget.setObject(index)
self.objectList.update()
self.tileWidget.update()
def saveImage(self, nml=False):
fn = QtWidgets.QFileDialog.getSaveFileName(self, 'Choose a new filename', '', '.png (*.png)')[0]
if fn == '': return
tex = QtGui.QPixmap(960, 960)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
for tile in Tileset.tiles:
tileimg = tile.image
if nml:
tileimg = tile.normalmap
painter.drawPixmap(Xoffset, Yoffset, tileimg)
Xoffset += 60
if Xoffset >= 960:
Xoffset = 0
Yoffset += 60
painter.end()
tex.save(fn)
def openNml(self):
self.openImage(True)
def saveNml(self):
self.saveImage(True)
def saveTileset(self):
outdata = self.saving(os.path.basename(self.name))
globals.szsData[eval('globals.Area.tileset%d' % self.slot)] = outdata
if self.slot == 0:
import loading
loading.LoadTileset(0, globals.Area.tileset0)
del loading
import verifications
verifications.SetDirty()
del verifications
HandleTilesetEdited(True)
globals.mainWindow.objAllTab.setTabEnabled(0, True)
globals.mainWindow.objAllTab.setCurrentIndex(0)
else:
globals.mainWindow.ReloadTilesets()
import verifications
verifications.SetDirty()
del verifications
HandleTilesetEdited(True)
if globals.ObjectDefinitions[self.slot] == [None] * 256:
import tileset
tileset.UnloadTileset(self.slot)
del tileset
exec("globals.Area.tileset%d = ''" % self.slot)
else:
globals.mainWindow.objAllTab.setCurrentIndex(2)
for layer in globals.Area.layers:
for obj in layer:
obj.updateObjCache()
globals.mainWindow.scene.update()
self.saved = True
self.close()
def saveTilesetAs(self):
fn = QtWidgets.QFileDialog.getSaveFileName(self, 'Choose a new filename', '', 'All files (*)')[0]
if fn == '': return
outdata = self.saving(os.path.basename(str(fn)))
with open(fn, 'wb') as f:
f.write(outdata)
def saving(self, name):
# Prepare tiles, objects, object metadata, and textures and stuff into buffers.
textureBuffer = self.PackTexture()
textureBufferNml = self.PackTexture(True)
tileBuffer = self.PackTiles()
objectBuffers = self.PackObjects()
objectBuffer = objectBuffers[0]
objectMetaBuffer = objectBuffers[1]
# Make an arc and pack up the files!
arc = SarcLib.SARC_Archive()
tex = SarcLib.Folder('BG_tex'); arc.addFolder(tex)
tex.addFile(SarcLib.File('%s.gtx' % name, textureBuffer))
tex.addFile(SarcLib.File('%s_nml.gtx' % name, textureBufferNml))
for (animName, data) in self.animWidget.save():
tex.addFile(SarcLib.File('%s.gtx' % animName, data))
chk = SarcLib.Folder('BG_chk'); arc.addFolder(chk)
chk.addFile(SarcLib.File('d_bgchk_%s.bin' % name, tileBuffer))
unt = SarcLib.Folder('BG_unt'); arc.addFolder(unt)
unt.addFile(SarcLib.File('%s.bin' % name, objectBuffer))
unt.addFile(SarcLib.File('%s_hd.bin' % name, objectMetaBuffer))
return arc.save()[0]
def PackTexture(self, normalmap=False):
tex = QtGui.QImage(2048, 512, QtGui.QImage.Format_RGBA8888)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
for tile in Tileset.tiles:
minitex = QtGui.QImage(64, 64, QtGui.QImage.Format_RGBA8888)
minitex.fill(Qt.transparent)
minipainter = QtGui.QPainter(minitex)
minipainter.drawPixmap(2, 2, tile.normalmap if normalmap else tile.image)
minipainter.end()
# Read colours and DESTROY THEM (or copy them to the edges, w/e)
for i in range(2, 62):
# Top Clamp
colour = minitex.pixel(i, 2)
for p in range(0,2):
minitex.setPixel(i, p, colour)
# Left Clamp
colour = minitex.pixel(2, i)
for p in range(0,2):
minitex.setPixel(p, i, colour)
# Right Clamp
colour = minitex.pixel(i, 61)
for p in range(62,64):
minitex.setPixel(i, p, colour)
# Bottom Clamp
colour = minitex.pixel(61, i)
for p in range(62,64):
minitex.setPixel(p, i, colour)
# UpperLeft Corner Clamp
colour = minitex.pixel(2, 2)
for x in range(0,2):
for y in range(0,2):
minitex.setPixel(x, y, colour)
# UpperRight Corner Clamp
colour = minitex.pixel(61, 2)
for x in range(62,64):
for y in range(0,2):
minitex.setPixel(x, y, colour)
# LowerLeft Corner Clamp
colour = minitex.pixel(2, 61)
for x in range(0,2):
for y in range(62,64):
minitex.setPixel(x, y, colour)
# LowerRight Corner Clamp
colour = minitex.pixel(61, 61)
for x in range(62,64):
for y in range(62,64):
minitex.setPixel(x, y, colour)
painter.drawImage(Xoffset, Yoffset, minitex)
Xoffset += 64
if Xoffset >= 2048:
Xoffset = 0
Yoffset += 64
painter.end()
return writeGTX(tex, Tileset.slot, normalmap)
def PackTiles(self):
offset = 0
tilespack = struct.Struct('<Q')
Tilebuffer = create_string_buffer(2048)
for tile in Tileset.tiles:
tilespack.pack_into(Tilebuffer, offset, tile.getCollision())
offset += 8
return Tilebuffer.raw
def PackObjects(self):
objectStrings = []
o = 0
for object in Tileset.objects:
# Slopes
if object.upperslope[0] != 0:
# Reverse Slopes
if object.upperslope[0] & 0x2:
a = struct.pack('>B', object.upperslope[0])
for row in range(object.lowerslope[1], object.height):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
if object.height > 1 and object.lowerslope[1]:
a += struct.pack('>B', object.lowerslope[0])
for row in range(0, object.lowerslope[1]):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
objectStrings.append(a)
# Regular Slopes
else:
a = struct.pack('>B', object.upperslope[0])
for row in range(0, object.upperslope[1]):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
if object.height > 1 and object.lowerslope[1]:
a += struct.pack('>B', object.lowerslope[0])
for row in range(object.upperslope[1], object.height):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
objectStrings.append(a)
# Not slopes!
else:
a = b''
for tilerow in object.tiles:
for tile in tilerow:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
objectStrings.append(a)
o += 1
Objbuffer = b''
Metabuffer = b''
i = 0
for a in objectStrings:
Metabuffer += struct.pack('>HBBH', len(Objbuffer), Tileset.objects[i].width, Tileset.objects[i].height, Tileset.objects[i].getRandByte())
Objbuffer += a
i += 1
return (Objbuffer, Metabuffer)
def setupMenus(self):
fileMenu = self.menuBar().addMenu("&File")
pixmap = QtGui.QPixmap(60,60)
pixmap.fill(Qt.black)
icon = QtGui.QIcon(pixmap)
fileMenu.addAction("Import Tileset from file...", self.openTilesetfromFile, QtGui.QKeySequence.Open)
fileMenu.addAction("Export Tileset...", self.saveTilesetAs, QtGui.QKeySequence.SaveAs)
fileMenu.addAction("Import Image...", self.openImage, QtGui.QKeySequence('Ctrl+I'))
fileMenu.addAction("Export Image...", self.saveImage, QtGui.QKeySequence('Ctrl+E'))
fileMenu.addAction("Import Normal Map...", self.openNml, QtGui.QKeySequence('Ctrl+Shift+I'))
fileMenu.addAction("Export Normal Map...", self.saveNml, QtGui.QKeySequence('Ctrl+Shift+E'))
fileMenu.addAction("Save and Quit", self.saveTileset, QtGui.QKeySequence.Save)
fileMenu.addAction("Quit", self.close, QtGui.QKeySequence('Ctrl-Q'))
taskMenu = self.menuBar().addMenu("&Tasks")
taskMenu.addAction("Toggle Normal Map", self.toggleNormal, QtGui.QKeySequence('Ctrl+Shift+N'))
taskMenu.addAction("Toggle Overrides", self.toggleOverrides, QtGui.QKeySequence('Ctrl+Shift+O'))
taskMenu.addAction("Show Tiles info...", self.showInfo, QtGui.QKeySequence('Ctrl+P'))
taskMenu.addAction("Import object from file...", self.importObjFromFile, '')
taskMenu.addAction("Export object...", self.saveObject, '')
taskMenu.addAction("Export all objects...", self.saveAllObjects, '')
taskMenu.addAction("Clear Collision Data", self.clearCollisions, QtGui.QKeySequence('Ctrl+Shift+Backspace'))
taskMenu.addAction("Clear Object Data", self.clearObjects, QtGui.QKeySequence('Ctrl+Alt+Backspace'))
def toggleNormal(self):
# Replace regular image with normalmap images in model
self.normalmap = not self.normalmap
self.setuptile()
self.tileWidget.setObject(self.objectList.currentIndex())
self.tileWidget.update()
def toggleOverrides(self):
self.overrides = not self.overrides
index = self.objectList.currentIndex()
self.setuptile()
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
self.objectList.setCurrentIndex(index)
self.tileWidget.setObject(index)
self.objectList.update()
self.tileWidget.update()
def showInfo(self):
usedTiles = len(Tileset.getUsedTiles())
freeTiles = 256 - usedTiles
QtWidgets.QMessageBox.information(self, "Tiles info",
"Used Tiles: " + str(usedTiles) + (" tile.\n" if usedTiles == 1 else " tiles.\n")
+ "Free Tiles: " + str(freeTiles) + (" tile." if freeTiles == 1 else " tiles."),
QtWidgets.QMessageBox.Ok)
def importObjFromFile(self):
usedTiles = Tileset.getUsedTiles()
if len(usedTiles) >= 256: # It can't be more than 256, oh well
QtWidgets.QMessageBox.warning(self, "Open Object",
"There isn't enough room in the Tileset.",
QtWidgets.QMessageBox.Cancel)
return
file = QtWidgets.QFileDialog.getOpenFileName(self, "Open Object", '',
"Object files (*.json)")[0]
if not file: return
with open(file) as inf:
jsonData = json.load(inf)
dir = os.path.dirname(file)
tilelist = [[]]
upperslope = [0, 0]
lowerslope = [0, 0]
metaData = open(dir + "/" + jsonData["meta"], "rb").read()
objstrings = open(dir + "/" + jsonData["objlyt"], "rb").read()
colls = open(dir + "/" + jsonData["colls"], "rb").read()
randLen = 0
if "randLen" in jsonData:
randLen = (metaData[5] & 0xF)
numTiles = randLen
else:
tilesUsed = []
pos = 0
while objstrings[pos] != 0xFF:
if objstrings[pos] & 0x80:
pos += 1
continue
tile = objstrings[pos:pos+3]
if tile != b'\0\0\0':
if tile[1] not in tilesUsed:
tilesUsed.append(tile[1])
pos += 3
numTiles = len(tilesUsed)
if numTiles + len(usedTiles) > 256:
QtWidgets.QMessageBox.warning(self, "Open Object",
"There isn't enough room for the object.",
QtWidgets.QMessageBox.Cancel)
return
freeTiles = [i for i in range(256) if i not in usedTiles]
if randLen:
found = False
for i in freeTiles:
for z in range(randLen):
if i + z not in freeTiles:
break
if z == randLen - 1:
tileNum = i
found = True
break
if found:
break
if not found:
QtWidgets.QMessageBox.warning(self, "Open Object",
"There isn't enough room for the object.",
QtWidgets.QMessageBox.Cancel)
return
tilesUsed = {}
offset = 0
byte = struct.unpack_from('>B', objstrings, offset)[0]
i = 0
row = 0
while byte != 0xFF:
if byte == 0xFE:
tilelist.append([])
if (upperslope[0] != 0) and (lowerslope[0] == 0):
upperslope[1] = upperslope[1] + 1
if lowerslope[0] != 0:
lowerslope[1] = lowerslope[1] + 1
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
elif (byte & 0x80):
if upperslope[0] == 0:
upperslope[0] = byte
else:
lowerslope[0] = byte
offset += 1
byte = struct.unpack_from('>B', objstrings, offset)[0]
else:
tileBytes = objstrings[offset:offset + 3]
if tileBytes == b'\0\0\0':
tile = [0, 0, 0]
else:
tile = []
tile.append(byte)
if randLen:
tile.append(tileNum + i)
if i < randLen: i += 1
else:
if tileBytes[1] not in tilesUsed:
tilesUsed[tileBytes[1]] = i
tile.append(freeTiles[i])
i += 1
else:
tile.append(freeTiles[tilesUsed[tileBytes[1]]])
byte2 = (struct.unpack_from('>B', objstrings, offset + 2)[0]) & 0xFC
byte2 |= Tileset.slot
tile.append(byte2)
tilelist[-1].append(tile)
offset += 3
byte = struct.unpack_from('>B', objstrings, offset)[0]
tilelist.pop()
if (upperslope[0] & 0x80) and (upperslope[0] & 0x2):
for i in range(lowerslope[1]):
pop = tilelist.pop()
tilelist.insert(0, pop)
if randLen:
Tileset.addObject(metaData[3], metaData[2], metaData[5], upperslope, lowerslope, tilelist)
else:
Tileset.addObject(metaData[3], metaData[2], 0, upperslope, lowerslope, tilelist)
count = len(Tileset.objects)
object = Tileset.objects[count-1]
tileImage = QtGui.QPixmap(dir + "/" + jsonData["img"])
nmlImage = QtGui.QPixmap(dir + "/" + jsonData["nml"])
if randLen:
tex = tileImage.copy(0,0,60,60)
colls_off = 0
for z in range(randLen):
Tileset.tiles[tileNum + z].image = tileImage.copy(z*60,0,60,60)
Tileset.tiles[tileNum + z].normalmap = nmlImage.copy(z*60,0,60,60)
Tileset.tiles[tileNum + z].setCollision(struct.unpack_from('<Q', colls, colls_off)[0])
colls_off += 8
else:
tex = QtGui.QPixmap(object.width * 60, object.height * 60)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
colls_off = 0
tilesReplaced = []
for row in object.tiles:
for tile in row:
if tile[2] & 3 or not Tileset.slot:
if tile[1] not in tilesReplaced:
tilesReplaced.append(tile[1])
Tileset.tiles[tile[1]].image = tileImage.copy(Xoffset,Yoffset,60,60)
Tileset.tiles[tile[1]].normalmap = nmlImage.copy(Xoffset,Yoffset,60,60)
Tileset.tiles[tile[1]].setCollision(struct.unpack_from('<Q', colls, colls_off)[0])
painter.drawPixmap(Xoffset, Yoffset, Tileset.tiles[tile[1]].image)
Xoffset += 60
colls_off += 8
Xoffset = 0
Yoffset += 60
painter.end()
self.setuptile()
self.objmodel.appendRow(QtGui.QStandardItem(QtGui.QIcon(tex.scaledToWidth(tex.width() / 60 * 24, Qt.SmoothTransformation)), 'Object {0}'.format(count-1)))
index = self.objectList.currentIndex()
self.objectList.setCurrentIndex(index)
self.tileWidget.setObject(index)
self.objectList.update()
self.tileWidget.update()
@staticmethod
def exportObject(name, baseName, n):
object = Tileset.objects[n]
object.jsonData = {}
if object.randLen and (object.width, object.height) == (1, 1):
tex = QtGui.QPixmap(object.randLen * 60, object.height * 60)
else:
tex = QtGui.QPixmap(object.width * 60, object.height * 60)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
Tilebuffer = b''
for i in range(len(object.tiles)):
for tile in object.tiles[i]:
if object.randLen and (object.width, object.height) == (1, 1):
for z in range(object.randLen):
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
painter.drawPixmap(Xoffset, Yoffset, Tileset.tiles[tile[1] + z].image)
Tilebuffer += struct.pack('<Q', Tileset.tiles[tile[1] + z].getCollision())
Xoffset += 60
break
else:
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
painter.drawPixmap(Xoffset, Yoffset, Tileset.tiles[tile[1]].image)
Tilebuffer += struct.pack('<Q', Tileset.tiles[tile[1]].getCollision())
Xoffset += 60
Xoffset = 0
Yoffset += 60
painter.end()
# Slopes
if object.upperslope[0] != 0:
# Reverse Slopes
if object.upperslope[0] & 0x2:
a = struct.pack('>B', object.upperslope[0])
for row in range(object.lowerslope[1], object.height):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
if object.height > 1 and object.lowerslope[1]:
a += struct.pack('>B', object.lowerslope[0])
for row in range(0, object.lowerslope[1]):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
# Regular Slopes
else:
a = struct.pack('>B', object.upperslope[0])
for row in range(0, object.upperslope[1]):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
if object.height > 1 and object.lowerslope[1]:
a += struct.pack('>B', object.lowerslope[0])
for row in range(object.upperslope[1], object.height):
for tile in object.tiles[row]:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
# Not slopes!
else:
a = b''
for tilerow in object.tiles:
for tile in tilerow:
a += struct.pack('>BBB', tile[0], tile[1], tile[2])
a += b'\xfe'
a += b'\xff'
Objbuffer = a
Metabuffer = struct.pack('>HBBH', (0 if n == 0 else len(Objbuffer)), object.width, object.height, object.getRandByte())
tex.save(name + ".png", "PNG")
object.jsonData['img'] = baseName + ".png"
with open(name + ".colls", "wb+") as colls:
colls.write(Tilebuffer)
object.jsonData['colls'] = baseName + ".colls"
with open(name + ".objlyt", "wb+") as objlyt:
objlyt.write(Objbuffer)
object.jsonData['objlyt'] = baseName + ".objlyt"
with open(name + ".meta", "wb+") as meta:
meta.write(Metabuffer)
object.jsonData['meta'] = baseName + ".meta"
if object.randLen and (object.width, object.height) == (1, 1):
object.jsonData['randLen'] = object.randLen
if object.randLen and (object.width, object.height) == (1, 1):
tex = QtGui.QPixmap(object.randLen * 60, object.height * 60)
else:
tex = QtGui.QPixmap(object.width * 60, object.height * 60)
tex.fill(Qt.transparent)
painter = QtGui.QPainter(tex)
Xoffset = 0
Yoffset = 0
for i in range(len(object.tiles)):
for tile in object.tiles[i]:
if object.randLen and (object.width, object.height) == (1, 1):
for z in range(object.randLen):
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
painter.drawPixmap(Xoffset, Yoffset, Tileset.tiles[tile[1] + z].normalmap)
Xoffset += 60
break
else:
if (Tileset.slot == 0) or ((tile[2] & 3) != 0):
painter.drawPixmap(Xoffset, Yoffset, Tileset.tiles[tile[1]].normalmap)
Xoffset += 60
Xoffset = 0
Yoffset += 60
painter.end()
tex.save(name + "_nml.png", "PNG")
object.jsonData['nml'] = baseName + "_nml.png"
with open(name + ".json", 'w+') as outfile:
json.dump(object.jsonData, outfile)
def saveAllObjects(self):
save_path = QtWidgets.QFileDialog.getExistingDirectory(None, "Choose where to save the Object folder")
if not save_path:
return
for n in range(len(Tileset.objects)):
baseName = "object_%d" % n
name = os.path.join(save_path, baseName)
self.exportObject(name, baseName, n)
def saveObject(self):
if len(Tileset.objects) == 0: return
dlg = getObjNum()
if dlg.exec_() == QtWidgets.QDialog.Accepted:
n = dlg.objNum.value()
file = QtWidgets.QFileDialog.getSaveFileName(None, "Save Objects", "", "Object files (*.json)")[0]
if not file:
return
name = os.path.splitext(file)[0]
baseName = os.path.basename(name)
self.exportObject(name, baseName, n)
def clearObjects(self):
'''Clears the object data'''
Tileset.objects = []
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
self.objectList.update()
self.tileWidget.update()
def clearCollisions(self):
'''Clears the collisions data'''
for tile in Tileset.tiles:
tile.setCollision(0)
self.updateInfo(0, 0)
self.tileDisplay.update()
def setupWidgets(self):
frame = QtWidgets.QFrame()
frameLayout = QtWidgets.QGridLayout(frame)
# Displays the tiles
self.tileDisplay = displayWidget()
# Info Box for tile information
self.infoDisplay = InfoBox(self)
# Sets up the model for the tile pieces
self.model = PiecesModel(self)
self.tileDisplay.setModel(self.model)
# Object List
self.objectList = objectList()
self.objmodel = QtGui.QStandardItemModel()
SetupObjectModel(self.objmodel, Tileset.objects, Tileset.tiles)
self.objectList.setModel(self.objmodel)
# Creates the Tab Widget for behaviours and objects
self.tabWidget = QtWidgets.QTabWidget()
self.tileWidget = tileOverlord()
self.paletteWidget = paletteWidget(self)
# Objects Tab
self.container = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.objectList)
layout.addWidget(self.tileWidget)
self.container.setLayout(layout)
# Animations Tab
self.animWidget = animWidget()
# Sets the Tabs
self.tabWidget.addTab(self.paletteWidget, 'Behaviours')
self.tabWidget.addTab(self.container, 'Objects')
self.tabWidget.addTab(self.animWidget, 'Animations')
self.tabWidget.setTabEnabled(2, self.slot == 0)
# Connections do things!
self.tileDisplay.clicked.connect(self.paintFormat)
self.tileDisplay.mouseMoved.connect(self.updateInfo)
self.objectList.clicked.connect(self.tileWidget.setObject)
# Layout
frameLayout.addWidget(self.infoDisplay, 0, 0, 1, 1)
frameLayout.addWidget(self.tileDisplay, 1, 0)
frameLayout.addWidget(self.tabWidget, 0, 1, 2, 1)
self.setCentralWidget(frame)
def updateInfo(self, x, y):
index = [self.tileDisplay.indexAt(QtCore.QPoint(x, y))]
curTile = Tileset.tiles[index[0].row()]
info = self.infoDisplay
palette = self.paletteWidget
propertyList = []
propertyText = ''
if curTile.solidity == 1:
propertyList.append('Solid')
elif curTile.solidity == 2:
propertyList.append('Solid-on-Top')
elif curTile.solidity == 3:
propertyList.append('Solid-on-Bottom')
elif curTile.solidity == 4:
propertyList.append('Solid-on-Top and Bottom')
elif curTile.solidity == 0x11:
propertyList.append('Slide (1)')
elif curTile.solidity == 0x12:
propertyList.append('Slide (2)')
elif curTile.solidity == 0x21:
propertyList.append('Staircase (1)')
elif curTile.solidity == 0x22:
propertyList.append('Staircase (2)')
if len(propertyList) == 0:
propertyText = 'None'
elif len(propertyList) == 1:
propertyText = propertyList[0]
else:
propertyText = propertyList.pop(0)
for string in propertyList:
propertyText = propertyText + ', ' + string
if palette.ParameterList[curTile.coreType] is not None:
if curTile.params < len(palette.ParameterList[curTile.coreType]):
parameter = palette.ParameterList[curTile.coreType][curTile.params]
else:
print('Error 1: %d, %d, %d' % (index[0].row(), curTile.coreType, curTile.params))
parameter = ['', QtGui.QIcon()]
else:
parameter = ['', QtGui.QIcon()]
info.coreImage.setPixmap(palette.coreTypes[curTile.coreType][1].pixmap(24,24))
info.terrainImage.setPixmap(palette.terrainTypes[curTile.terrain][1].pixmap(24,24))
info.parameterImage.setPixmap(parameter[1].pixmap(24,24))
info.coreInfo.setText(palette.coreTypes[curTile.coreType][0])
info.propertyInfo.setText(propertyText)
info.terrainInfo.setText(palette.terrainTypes[curTile.terrain][0])
info.paramInfo.setText(parameter[0])
info.hexdata.setText('Hex Data: {0} {1}\n {2} {3} {4}'.format(
hex(curTile.coreType), hex(curTile.params),
hex(curTile.params2), hex(curTile.solidity), hex(curTile.terrain)))
def paintFormat(self, index):
if self.tabWidget.currentIndex() == 1:
return
curTile = Tileset.tiles[index.row()]
palette = self.paletteWidget
# Find the checked Core widget
for i, w in enumerate(palette.coreWidgets):
if w.isChecked():
curTile.coreType = i
break
if palette.ParameterList[i] is not None:
curTile.params = palette.parameters1.currentIndex()
else:
curTile.params = 0
if palette.ParameterList2[i] is not None:
curTile.params2 = palette.parameters2.currentIndex()
else:
curTile.params2 = 0
curTile.solidity = palette.collsType.currentIndex()
if curTile.solidity in [5, 6]:
curTile.solidity += 0xC
elif curTile.solidity in [7, 8]:
curTile.solidity += 0x1A
curTile.terrain = palette.terrainType.currentIndex()
self.updateInfo(0, 0)
self.tileDisplay.update()
#############################################################################################
######################## Widget for selecting the object to export ##########################
class getObjNum(QtWidgets.QDialog):
"""
Dialog which lets you choose an object to export
"""
def __init__(self):
"""
Creates and initializes the dialog
"""
QtWidgets.QDialog.__init__(self)
self.setWindowTitle('Choose Object')
self.objNum = QtWidgets.QSpinBox()
count = len(Tileset.objects) - 1
self.objNum.setRange(0, count)
self.objNum.setValue(0)
buttonBox = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
mainLayout = QtWidgets.QVBoxLayout()
mainLayout.addWidget(self.objNum)
mainLayout.addWidget(buttonBox)
self.setLayout(mainLayout)
| gpl-3.0 | -3,503,609,789,299,445,000 | 33.373718 | 389 | 0.540328 | false |
nathanielvarona/airflow | airflow/contrib/operators/docker_swarm_operator.py | 1 | 1181 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.docker.operators.docker_swarm`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.docker.operators.docker_swarm import DockerSwarmOperator # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.docker.operators.docker_swarm`.",
DeprecationWarning,
stacklevel=2,
)
| apache-2.0 | -4,171,271,246,935,747,000 | 39.724138 | 99 | 0.77138 | false |
jsilhan/dnf | dnf/cli/completion_helper.py | 3 | 6949 | #!/usr/bin/env python
#
# This file is part of dnf.
#
# Copyright 2015 (C) Igor Gnatenko <[email protected]>
# Copyright 2016 (C) Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import dnf.exceptions
import dnf.cli
import dnf.cli.commands.clean
import sys
def filter_list_by_kw(kw, lst):
return filter(lambda k: str(k).startswith(kw), lst)
def listpkg_to_setstr(pkgs):
return set([str(x) for x in pkgs])
class RemoveCompletionCommand(dnf.cli.commands.remove.RemoveCommand):
def __init__(self, args):
super(RemoveCompletionCommand, self).__init__(args)
def configure(self):
self.cli.demands.root_user = False
self.cli.demands.sack_activation = True
def run(self):
for pkg in ListCompletionCommand.installed(self.base, self.opts.pkg_specs):
print(str(pkg))
class InstallCompletionCommand(dnf.cli.commands.install.InstallCommand):
def __init__(self, args):
super(InstallCompletionCommand, self).__init__(args)
def configure(self):
self.cli.demands.root_user = False
self.cli.demands.available_repos = True
self.cli.demands.sack_activation = True
def run(self):
installed = listpkg_to_setstr(ListCompletionCommand.installed(self.base, self.opts.pkg_specs))
available = listpkg_to_setstr(ListCompletionCommand.available(self.base, self.opts.pkg_specs))
for pkg in (available - installed):
print(str(pkg))
class ReinstallCompletionCommand(dnf.cli.commands.reinstall.ReinstallCommand):
def __init__(self, args):
super(ReinstallCompletionCommand, self).__init__(args)
def configure(self):
self.cli.demands.root_user = False
self.cli.demands.available_repos = True
self.cli.demands.sack_activation = True
def run(self):
installed = listpkg_to_setstr(ListCompletionCommand.installed(self.base, self.opts.packages))
available = listpkg_to_setstr(ListCompletionCommand.available(self.base, self.opts.packages))
for pkg in (installed & available):
print(str(pkg))
class ListCompletionCommand(dnf.cli.commands.ListCommand):
def __init__(self, args):
super(ListCompletionCommand, self).__init__(args)
def run(self):
subcmds = self.pkgnarrows
args = self.opts.packages
if args[0] not in subcmds:
print("\n".join(filter_list_by_kw(args[1], subcmds)))
else:
if args[0] == "installed":
pkgs = self.installed(self.base, args[1])
elif args[0] == "available":
pkgs = self.available(self.base, args[1])
elif args[0] == "updates":
pkgs = self.updates(self.base, args[1])
else:
return
for pkg in pkgs:
print(str(pkg))
@staticmethod
def installed(base, arg):
return base.sack.query().installed().filter(name__glob="{}*".format(arg))
@staticmethod
def available(base, arg):
return base.sack.query().available().filter(name__glob="{}*".format(arg))
@staticmethod
def updates(base, arg):
return base.check_updates(["{}*".format(arg)], print_=False)
class RepoListCompletionCommand(dnf.cli.commands.repolist.RepoListCommand):
def __init__(self, args):
super(RepoListCompletionCommand, self).__init__(args)
def run(self):
args = self.opts.extcmds
if args[0] == "enabled":
print("\n".join(filter_list_by_kw(args[1], [r.id for r in self.base.repos.iter_enabled()])))
elif args[0] == "disabled":
print("\n".join(filter_list_by_kw(args[1], [r.id for r in self.base.repos.all() if not r.enabled])))
class UpgradeCompletionCommand(dnf.cli.commands.upgrade.UpgradeCommand):
def __init__(self, args):
super(UpgradeCompletionCommand, self).__init__(args)
def configure(self):
self.cli.demands.root_user = False
self.cli.demands.available_repos = True
self.cli.demands.sack_activation = True
def run(self):
for pkg in ListCompletionCommand.updates(self.base, self.opts.pkg_specs):
print(str(pkg))
class DowngradeCompletionCommand(dnf.cli.commands.downgrade.DowngradeCommand):
def __init__(self, args):
super(DowngradeCompletionCommand, self).__init__(args)
def configure(self):
self.cli.demands.root_user = False
self.cli.demands.available_repos = True
self.cli.demands.sack_activation = True
def run(self):
for pkg in ListCompletionCommand.available(self.base, self.opts.package).downgrades():
print(str(pkg))
class CleanCompletionCommand(dnf.cli.commands.clean.CleanCommand):
def __init__(self, args):
super(CleanCompletionCommand, self).__init__(args)
def run(self):
subcmds = dnf.cli.commands.clean._CACHE_TYPES.keys()
print("\n".join(filter_list_by_kw(self.opts.type[1], subcmds)))
class HistoryCompletionCommand(dnf.cli.commands.HistoryCommand):
def __init__(self, args):
super(HistoryCompletionCommand, self).__init__(args)
def run(self, args):
subcmds = self.__class__.__base__.usage[1:-1].split("|")
if args[0] not in subcmds:
print("\n".join(filter_list_by_kw(self.opts.tid, subcmds)))
def main(args):
base = dnf.cli.cli.BaseCli()
cli = dnf.cli.Cli(base)
if args[0] == "_cmds":
base.init_plugins([], cli)
print("\n".join(filter_list_by_kw(args[1], cli.cli_commands)))
return
cli.cli_commands.clear()
cli.register_command(RemoveCompletionCommand)
cli.register_command(InstallCompletionCommand)
cli.register_command(ReinstallCompletionCommand)
cli.register_command(ListCompletionCommand)
cli.register_command(RepoListCompletionCommand)
cli.register_command(UpgradeCompletionCommand)
cli.register_command(DowngradeCompletionCommand)
cli.register_command(CleanCompletionCommand)
cli.register_command(HistoryCompletionCommand)
cli.configure(args)
try:
cli.run()
except dnf.exceptions.Error:
sys.exit(0)
if __name__ == "__main__":
try:
main(sys.argv[1:])
except KeyboardInterrupt:
sys.exit(1)
| gpl-2.0 | 3,336,101,952,810,042,400 | 33.919598 | 112 | 0.660095 | false |
mishfit/ZeroNet | src/lib/websocket/_logging.py | 16 | 1926 | """
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
import logging
_logger = logging.getLogger('websocket')
_traceEnabled = False
__all__ = ["enableTrace", "dump", "error", "warning", "debug", "trace",
"isEnabledForError", "isEnabledForDebug"]
def enableTrace(traceable):
"""
turn on/off the traceability.
traceable: boolean value. if set True, traceability is enabled.
"""
global _traceEnabled
_traceEnabled = traceable
if traceable:
if not _logger.handlers:
_logger.addHandler(logging.StreamHandler())
_logger.setLevel(logging.DEBUG)
def dump(title, message):
if _traceEnabled:
_logger.debug("--- " + title + " ---")
_logger.debug(message)
_logger.debug("-----------------------")
def error(msg):
_logger.error(msg)
def warning(msg):
_logger.warning(msg)
def debug(msg):
_logger.debug(msg)
def trace(msg):
if _traceEnabled:
_logger.debug(msg)
def isEnabledForError():
return _logger.isEnabledFor(logging.ERROR)
def isEnabledForDebug():
return _logger.isEnabledFor(logging.DEBUG)
| gpl-2.0 | 1,187,330,186,203,141,600 | 25.027027 | 71 | 0.67757 | false |
swordmaster2k/robotics | framework/model/simulated_robot.py | 1 | 2475 | import time
from .robot import Robot
from framework.event.events import ScanResult
from framework.event.events import OdometryReport
'''
A generic Robot class which (may) represent(s) a hardware robot that
implements the communications interface defined by the robonav tool.
It is possible to use this class for simulations where no hardware
robot exists.
It can communicate with a hardware robot using Bluetooth, WiFi,
Ethernet, Serial, InfraRed, etc. using the abstracted connection
approach.
This class keeps track of the robots position, orientation, the path it
has traversed, physical dimensions, state, and the cell resolution
it is operating in.
It does not matter if the robot is a wheeled, tracked, bipod, etc. as
long as the hardware conforms to the generic interface required by
the robonav tool.
'''
class SimulatedRobot(Robot):
'''
Initialises the robot using the connection specified.
'''
def __init__(self, connection):
Robot.__init__(self, connection)
'''
Instructs the robot to go forward.
'''
def go_forward(self):
return
'''
Instructs the robot to go backward.
'''
def go_backward(self):
return
'''
Instructs the robot to rotate left.
'''
def rotate_left(self):
return
'''
Instructs the robot to rotate right.
'''
def rotate_right(self):
return
'''
Instructs the robot to halt.
'''
def halt(self):
return
'''
Instructs the robot to begin a scan.
'''
def scan(self):
return
'''
Instructs the robot to ping.
'''
def ping(self):
# Send a dummy scan result with a distance of 0.
# Will have no effect.
for listener in self.connection.listeners:
listener.handle_event(ScanResult([0]))
'''
Instructs the robot to update its odometry with the new parameters.
'''
def change_odometry(self, x, y, heading):
self.x = x
self.y = y
self.heading = heading
self.trail.append([self.get_cell_x(), self.get_cell_y()])
'''
Instructs the robot to go a point.
'''
def go_to(self, x, y):
self.x = x * self.cell_size
self.y = y * self.cell_size
self.trail.append([self.get_cell_x(), self.get_cell_y()])
self.state = "Travelled"
time.sleep(1)
self.notify_listeners(OdometryReport(self.x, self.y, self.heading))
| gpl-3.0 | 3,415,311,852,639,462,000 | 21.098214 | 75 | 0.633535 | false |
wadadaaa/marta | docs/conf.py | 1 | 7775 | # -*- coding: utf-8 -*-
#
# marta documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'marta'
copyright = u"{{ cookiecutter.now[:4] }}, wadadaaa"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'martadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'marta.tex',
u'marta Documentation',
u"wadadaaa", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'marta', u'marta Documentation',
[u"wadadaaa"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'marta', u'marta Documentation',
u"wadadaaa", 'marta',
'A short description of the project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| bsd-3-clause | 2,505,709,013,565,696,500 | 30.734694 | 80 | 0.694662 | false |
jmartinm/inspire-next | inspire/dojson/hep/fields/bd3xx.py | 5 | 1291 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014, 2015 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""MARC 21 model definition."""
from dojson import utils
from ..model import hep, hep2marc
@hep.over('page_nr', '^300..')
@utils.for_each_value
def page_nr(self, key, value):
"""Page number."""
return value.get('a')
@hep2marc.over('300', 'page_nr')
@utils.for_each_value
def page_nr2marc(self, key, value):
"""Page number."""
return {
'a': value,
}
| gpl-2.0 | 6,874,719,130,527,898,000 | 29.023256 | 77 | 0.711851 | false |
synctree/synctree-awsebcli | ebcli/lib/elasticbeanstalk.py | 2 | 18489 | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
from cement.utils.misc import minimal_logger
from ..objects.solutionstack import SolutionStack
from ..objects.exceptions import NotFoundError, InvalidStateError, \
AlreadyExistsError
from ..objects.tier import Tier
from ..lib import aws
from ..lib.aws import InvalidParameterValueError
from ..objects.event import Event
from ..objects.environment import Environment
from ..objects.application import Application
from ..resources.strings import strings, responses
LOG = minimal_logger(__name__)
DEFAULT_ROLE_NAME = 'aws-elasticbeanstalk-ec2-role'
def _make_api_call(operation_name, **operation_options):
return aws.make_api_call('elasticbeanstalk',
operation_name,
**operation_options)
def create_application(app_name, descrip):
LOG.debug('Inside create_application api wrapper')
try:
result = _make_api_call('create_application',
ApplicationName=app_name,
Description=descrip)
except InvalidParameterValueError as e:
string = responses['app.exists'].replace('{app-name}', app_name)
if e.message == string:
raise AlreadyExistsError(e)
else:
raise e
return result
def create_application_version(app_name, vers_label, descrip, s3_bucket,
s3_key):
kwargs = dict()
if descrip is not None:
kwargs['Description'] = descrip
if s3_bucket and s3_key:
kwargs['SourceBundle'] = {'S3Bucket': s3_bucket,
'S3Key': s3_key}
LOG.debug('Inside create_application_version api wrapper')
return _make_api_call('create_application_version',
ApplicationName=app_name,
VersionLabel=vers_label,
**kwargs)
def create_environment(environment):
"""
Creates an Elastic Beanstalk environment
"""
LOG.debug('Inside create_environment api wrapper')
kwargs = environment.convert_to_kwargs()
if environment.database:
# need to know region for database string
region = aws.get_region_name()
# Database is a dictionary
kwargs['TemplateSpecification'] = {
'TemplateSnippets': [
{'SnippetName': 'RdsExtensionEB',
'Order': 10000,
'SourceUrl': 'https://s3.amazonaws.com/'
'elasticbeanstalk-env-resources-' + region +
'/eb_snippets/rds/rds.json'}
]
}
result = _make_api_call('create_environment', **kwargs)
# convert to object
env = _api_to_environment(result)
request_id = result['ResponseMetadata']['RequestId']
return env, request_id
def clone_environment(clone):
LOG.debug('Inside clone_environment api wrapper')
kwargs = clone.convert_to_kwargs()
kwargs['TemplateSpecification'] = \
{'TemplateSource': {'EnvironmentName': clone.original_name}}
result = _make_api_call('create_environment', **kwargs)
# convert to object
env = _api_to_environment(result)
request_id = result['ResponseMetadata']['RequestId']
return env, request_id
def _api_to_environment(api_dict):
# Convert solution_stack and tier to objects
solution_stack = SolutionStack(api_dict['SolutionStackName'])
tier = api_dict['Tier']
tier = Tier(tier['Name'], tier['Type'], tier['Version'])
env = Environment(
version_label=api_dict.get('VersionLabel'),
status=api_dict.get('Status'),
app_name=api_dict.get('ApplicationName'),
health=api_dict.get('Health'),
id=api_dict.get('EnvironmentId'),
date_updated=api_dict.get('DateUpdated'),
platform=solution_stack,
description=api_dict.get('Description'),
name=api_dict.get('EnvironmentName'),
date_created=api_dict.get('DateCreated'),
tier=tier,
cname=api_dict.get('CNAME', 'UNKNOWN'),
option_settings=api_dict.get('OptionSettings'),
is_abortable=api_dict.get('AbortableOperationInProgress', False)
)
return env
def delete_application(app_name):
LOG.debug('Inside delete_application api wrapper')
result = _make_api_call('delete_application',
ApplicationName=app_name)
return result['ResponseMetadata']['RequestId']
def delete_application_version(app_name, version_label):
LOG.debug('Inside delete_application_version api wrapper')
result = _make_api_call('delete_application_version',
ApplicationName=app_name,
VersionLabel=version_label,
DeleteSourceBundle=True)
def delete_application_and_envs(app_name):
LOG.debug('Inside delete_application_and_envs')
result = _make_api_call('delete_application',
ApplicationName=app_name,
TerminateEnvByForce=True)
return result['ResponseMetadata']['RequestId']
def describe_application(app_name):
LOG.debug('Inside describe_application api wrapper')
result = _make_api_call('describe_applications',
ApplicationNames=[app_name])
apps = result['Applications']
if len(apps) != 1:
raise NotFoundError('Application "' + app_name + '" not found.')
return apps[0]
def is_cname_available(cname):
LOG.debug('Inside is_cname_available api wrapper')
result = _make_api_call('check_dns_availability',
CNAMEPrefix=cname)
return result['Available']
def swap_environment_cnames(source_env, dest_env):
LOG.debug('Inside swap_environment_cnames api wrapper')
result = _make_api_call('swap_environment_cnames',
SourceEnvironmentName=source_env,
DestinationEnvironmentName=dest_env)
return result['ResponseMetadata']['RequestId']
def describe_applications():
LOG.debug('Inside describe_applications api wrapper')
result = _make_api_call('describe_applications')
return result['Applications']
def describe_configuration_settings(app_name, env_name):
LOG.debug('Inside describe_configuration_settings api wrapper')
result = _make_api_call('describe_configuration_settings',
ApplicationName=app_name,
EnvironmentName=env_name)
return result['ConfigurationSettings'][0]
def get_option_setting(option_settings, namespace, option):
for setting in option_settings:
if setting['Namespace'] == namespace and \
setting['OptionName'] == option:
try:
return setting['Value']
except KeyError:
return None
return None
def create_option_setting(namespace, option, value):
return {
'Namespace': namespace,
'OptionName': option,
'Value': value
}
def get_specific_configuration(env_config, namespace, option):
return get_option_setting(env_config['OptionSettings'], namespace, option)
def get_specific_configuration_for_env(app_name, env_name, namespace, option):
env_config = describe_configuration_settings(app_name, env_name)
return get_specific_configuration(env_config, namespace, option)
def get_available_solution_stacks():
LOG.debug('Inside get_available_solution_stacks api wrapper')
result = _make_api_call('list_available_solution_stacks')
stack_strings = result['SolutionStacks']
LOG.debug('Solution Stack result size = ' + str(len(stack_strings)))
if len(stack_strings) == 0:
raise NotFoundError(strings['sstacks.notfound'])
solution_stacks = [SolutionStack(s) for s in stack_strings]
return solution_stacks
def get_application_versions(app_name, version_labels=None):
LOG.debug('Inside get_application_versions api wrapper')
kwargs = {}
if version_labels:
kwargs['VersionLabels'] = version_labels
result = _make_api_call('describe_application_versions',
ApplicationName=app_name,
**kwargs)
return result['ApplicationVersions']
def get_all_applications():
LOG.debug('Inside get_all_applications api wrapper')
result = _make_api_call('describe_applications')
app_list = []
for app in result['Applications']:
try:
description = app['Description']
except KeyError:
description = None
try:
versions = app['Versions']
except KeyError:
versions = None
app_list.append(
Application(
name=app['ApplicationName'],
date_created=app['DateCreated'],
date_updated=app['DateUpdated'],
description=description,
versions=versions,
templates=app['ConfigurationTemplates'],
)
)
return app_list
def get_app_environments(app_name):
LOG.debug('Inside get_app_environments api wrapper')
result = _make_api_call('describe_environments',
ApplicationName=app_name,
IncludeDeleted=False)
# convert to objects
envs = [_api_to_environment(env) for env in result['Environments']]
return envs
def get_all_environments():
LOG.debug('Inside get_all_environments api wrapper')
result = _make_api_call('describe_environments',
IncludeDeleted=False)
# convert to object
envs = []
for env in result['Environments']:
envs.append(_api_to_environment(env))
return envs
def get_environment(app_name, env_name):
LOG.debug('Inside get_environment api wrapper')
result = _make_api_call('describe_environments',
ApplicationName=app_name,
EnvironmentNames=[env_name],
IncludeDeleted=False)
envs = result['Environments']
if len(envs) < 1:
raise NotFoundError('Environment "' + env_name + '" not Found.')
else:
return _api_to_environment(envs[0])
def get_environment_settings(app_name, env_name):
LOG.debug('Inside get_environment_settings api wrapper')
result = _make_api_call('describe_configuration_settings',
ApplicationName=app_name,
EnvironmentName=env_name)
return _api_to_environment(result['ConfigurationSettings'][0])
def get_environment_resources(env_name):
LOG.debug('Inside get_environment_resources api wrapper')
result = _make_api_call('describe_environment_resources',
EnvironmentName=env_name)
return result
def get_new_events(app_name, env_name, request_id,
last_event_time=None):
LOG.debug('Inside get_new_events api wrapper')
# make call
if last_event_time is not None:
# In python 2 time is a datetime, in 3 it is a string
## Convert to string for compatibility
time = last_event_time
new_time = time + datetime.timedelta(0, 0, 1000)
else:
new_time = None
kwargs = {}
if app_name:
kwargs['ApplicationName'] = app_name
if env_name:
kwargs['EnvironmentName'] = env_name
if request_id:
kwargs['RequestId'] = request_id
if new_time:
kwargs['StartTime'] = str(new_time)
result = _make_api_call('describe_events',
**kwargs)
# convert to object
events = []
for event in result['Events']:
try:
version_label = event['VersionLabel']
except KeyError:
version_label = None
try:
environment_name = event['EnvironmentName']
except KeyError:
environment_name = None
events.append(
Event(message=event['Message'],
event_date=event['EventDate'],
version_label=version_label,
app_name=event['ApplicationName'],
environment_name=environment_name,
severity=event['Severity'],
)
)
return events
def get_storage_location():
LOG.debug('Inside get_storage_location api wrapper')
response = _make_api_call('create_storage_location')
return response['S3Bucket']
def update_environment(env_name, options, remove=None,
template=None, template_body=None,
solution_stack_name=None):
LOG.debug('Inside update_environment api wrapper')
if remove is None:
remove = []
kwargs = {
'EnvironmentName': env_name,
}
if options:
kwargs['OptionSettings'] = options
if remove:
kwargs['OptionsToRemove'] = remove
if template:
kwargs['TemplateName'] = template
if template_body:
kwargs['TemplateSpecification'] = \
{'TemplateSource':
{'SourceContents': template_body}}
if solution_stack_name:
kwargs['SolutionStackName'] = solution_stack_name
try:
response = _make_api_call('update_environment',
**kwargs)
except aws.InvalidParameterValueError as e:
if e.message == responses['env.invalidstate'].replace('{env-name}',
env_name):
raise InvalidStateError(e)
else:
raise
return response['ResponseMetadata']['RequestId']
def abort_environment_update(env_name):
LOG.debug('Inside abort_environment_update')
result = _make_api_call('abort_environment_update',
EnvironmentName=env_name)
return result['ResponseMetadata']['RequestId']
def update_env_application_version(env_name,
version_label):
LOG.debug('Inside update_env_application_version api wrapper')
response = _make_api_call('update_environment',
EnvironmentName=env_name,
VersionLabel=version_label)
return response['ResponseMetadata']['RequestId']
def request_environment_info(env_name, info_type):
result = _make_api_call('request_environment_info',
EnvironmentName=env_name,
InfoType=info_type)
return result
def retrieve_environment_info(env_name, info_type):
result = _make_api_call('retrieve_environment_info',
EnvironmentName=env_name,
InfoType=info_type)
return result
def terminate_environment(env_name):
result = _make_api_call('terminate_environment',
EnvironmentName=env_name)
return result['ResponseMetadata']['RequestId']
def create_configuration_template(app_name, env_name, template_name,
description):
kwargs = {
'TemplateName': template_name,
'ApplicationName': app_name,
'Description': description,
'TemplateSpecification':
{'TemplateSource':
{'EnvironmentName': env_name}},
}
try:
result = _make_api_call('create_configuration_template', **kwargs)
except InvalidParameterValueError as e:
if e.message == responses['cfg.nameexists'].replace('{name}',
template_name):
raise AlreadyExistsError(e.message)
else:
raise
return result
def delete_configuration_template(app_name, template_name):
_make_api_call('delete_configuration_template',
ApplicationName=app_name,
TemplateName=template_name)
def validate_template(app_name, template_name, platform=None):
kwargs = {}
if platform:
kwargs['TemplateSpecification'] = \
{'TemplateSource':
{'SolutionStackName': platform}}
result = _make_api_call('validate_configuration_settings',
ApplicationName=app_name,
TemplateName=template_name,
**kwargs)
return result
def describe_template(app_name, template_name, platform=None):
kwargs = {}
if platform:
kwargs['TemplateSpecification'] = \
{'TemplateSource':
{'SolutionStackName': platform}}
LOG.debug('Inside describe_template api wrapper')
result = _make_api_call('describe_configuration_settings',
ApplicationName=app_name,
TemplateName=template_name)
return result['ConfigurationSettings'][0]
def get_environment_health(env_name, attributes=None):
if attributes is None:
attributes = [
"HealthStatus",
"Status",
"Color",
"Causes",
"ApplicationMetrics",
"InstancesHealth",
"RefreshedAt",
]
result = _make_api_call('describe_environment_health',
EnvironmentName=env_name,
AttributeNames=attributes)
return result
def get_instance_health(env_name, next_token=None, attributes=None):
if attributes is None:
attributes = [
"HealthStatus",
"Color",
"Causes",
"ApplicationMetrics",
"RefreshedAt",
"LaunchedAt",
"System"
]
kwargs = {}
if next_token:
kwargs['NextToken'] = next_token
result = _make_api_call('describe_instances_health',
EnvironmentName=env_name,
AttributeNames=attributes,
**kwargs)
return result | apache-2.0 | -2,746,825,408,057,453,000 | 32.255396 | 78 | 0.599708 | false |
zurwolf/dotfiles | home/.ipython/profile_base16-railscasts-dark/ipython_notebook_config.py | 1 | 24710 | # Configuration file for ipython-notebook.
c = get_config()
#------------------------------------------------------------------------------
# NotebookApp configuration
#------------------------------------------------------------------------------
# NotebookApp will inherit config from: BaseIPythonApplication, Application
# The url for MathJax.js.
# c.NotebookApp.mathjax_url = ''
# Supply extra arguments that will be passed to Jinja environment.
# c.NotebookApp.jinja_environment_options = {}
# The IP address the notebook server will listen on.
# c.NotebookApp.ip = 'localhost'
# DEPRECATED use base_url
# c.NotebookApp.base_project_url = '/'
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.NotebookApp.verbose_crash = False
# The random bytes used to secure cookies. By default this is a new random
# number every time you start the Notebook. Set it to a value in a config file
# to enable logins to persist across server sessions.
#
# Note: Cookie secrets should be kept private, do not share config files with
# cookie_secret stored in plaintext (you can read the value from a file).
# c.NotebookApp.cookie_secret = ''
# The number of additional ports to try if the specified port is not available.
# c.NotebookApp.port_retries = 50
# Whether to open in a browser after starting. The specific browser used is
# platform dependent and determined by the python standard library `webbrowser`
# module, unless it is overridden using the --browser (NotebookApp.browser)
# configuration option.
c.NotebookApp.open_browser = True
# The notebook manager class to use.
# c.NotebookApp.notebook_manager_class = 'IPython.html.services.notebooks.filenbmanager.FileNotebookManager'
# The date format used by logging formatters for %(asctime)s
# c.NotebookApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# The port the notebook server will listen on.
# c.NotebookApp.port = 8888
# Whether to overwrite existing config files when copying
# c.NotebookApp.overwrite = False
# Set the Access-Control-Allow-Origin header
#
# Use '*' to allow any origin to access your server.
#
# Takes precedence over allow_origin_pat.
# c.NotebookApp.allow_origin = ''
# Whether to enable MathJax for typesetting math/TeX
#
# MathJax is the javascript library IPython uses to render math/LaTeX. It is
# very large, so you may want to disable it if you have a slow internet
# connection, or for offline use of the notebook.
#
# When disabled, equations etc. will appear as their untransformed TeX source.
# c.NotebookApp.enable_mathjax = True
# Use a regular expression for the Access-Control-Allow-Origin header
#
# Requests from an origin matching the expression will get replies with:
#
# Access-Control-Allow-Origin: origin
#
# where `origin` is the origin of the request.
#
# Ignored if allow_origin is set.
# c.NotebookApp.allow_origin_pat = ''
# The full path to an SSL/TLS certificate file.
# c.NotebookApp.certfile = u''
# The base URL for the notebook server.
#
# Leading and trailing slashes can be omitted, and will automatically be added.
# c.NotebookApp.base_url = '/'
# The directory to use for notebooks and kernels.
c.NotebookApp.notebook_dir = u'/home/zubieta/Documents/Notebooks'
#
# c.NotebookApp.file_to_run = ''
# The IPython profile to use.
# c.NotebookApp.profile = u'default'
# paths for Javascript extensions. By default, this is just
# IPYTHONDIR/nbextensions
# c.NotebookApp.nbextensions_path = []
# The Logging format template
# c.NotebookApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.NotebookApp.ipython_dir = u''
# Set the log level by value or name.
# c.NotebookApp.log_level = 30
# Hashed password to use for web authentication.
#
# To generate, type in a python/IPython shell:
#
# from IPython.lib import passwd; passwd()
#
# The string should be of the form type:salt:hashed-password.
# c.NotebookApp.password = u''
# Set the Access-Control-Allow-Credentials: true header
# c.NotebookApp.allow_credentials = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.NotebookApp.extra_config_file = u''
# Extra paths to search for serving static files.
#
# This allows adding javascript/css to be available from the notebook server
# machine, or overriding individual files in the IPython
# c.NotebookApp.extra_static_paths = []
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
# c.NotebookApp.trust_xheaders = False
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.NotebookApp.copy_config_files = False
# The full path to a private key file for usage with SSL/TLS.
# c.NotebookApp.keyfile = u''
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
# c.NotebookApp.webapp_settings = {}
# Specify what command to use to invoke a web browser when opening the notebook.
# If not specified, the default browser will be determined by the `webbrowser`
# standard library module, which allows setting of the BROWSER environment
# variable to override it.
c.NotebookApp.browser = u'luakit %s'
#------------------------------------------------------------------------------
# IPKernelApp configuration
#------------------------------------------------------------------------------
# IPython: an enhanced interactive Python shell.
# IPKernelApp will inherit config from: BaseIPythonApplication, Application,
# InteractiveShellApp
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.IPKernelApp.exec_PYTHONSTARTUP = True
# The importstring for the DisplayHook factory
# c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'
# Set the IP or interface on which the kernel will listen.
# c.IPKernelApp.ip = u''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
c.IPKernelApp.pylab = u'inline'
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPKernelApp.verbose_crash = False
# The Kernel subclass to be used.
#
# This should allow easy re-use of the IPKernelApp entry point to configure and
# launch kernels other than IPython's own.
# c.IPKernelApp.kernel_class = 'IPython.kernel.zmq.ipkernel.Kernel'
# Run the module as a script.
# c.IPKernelApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# set the shell (ROUTER) port [default: random]
# c.IPKernelApp.shell_port = 0
# set the control (ROUTER) port [default: random]
# c.IPKernelApp.control_port = 0
# Whether to overwrite existing config files when copying
# c.IPKernelApp.overwrite = False
# Execute the given command string.
# c.IPKernelApp.code_to_run = ''
# set the stdin (ROUTER) port [default: random]
# c.IPKernelApp.stdin_port = 0
# Set the log level by value or name.
# c.IPKernelApp.log_level = 30
# lines of code to run at IPython startup.
# c.IPKernelApp.exec_lines = []
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.IPKernelApp.extra_config_file = u''
# The importstring for the OutStream factory
# c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream'
# Whether to create profile dir if it doesn't exist
# c.IPKernelApp.auto_create = False
# set the heartbeat port [default: random]
# c.IPKernelApp.hb_port = 0
#
# c.IPKernelApp.transport = 'tcp'
# redirect stdout to the null device
# c.IPKernelApp.no_stdout = False
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.IPKernelApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.IPKernelApp.extra_extension = ''
# A file to be run
# c.IPKernelApp.file_to_run = ''
# The IPython profile to use.
# c.IPKernelApp.profile = u'default'
#
# c.IPKernelApp.parent_appname = u''
# kill this process if its parent dies. On Windows, the argument specifies the
# HANDLE of the parent process, otherwise it is simply boolean.
# c.IPKernelApp.parent_handle = 0
# JSON file in which to store connection info [default: kernel-<pid>.json]
#
# This file will contain the IP, ports, and authentication key needed to connect
# clients to this kernel. By default, this file will be created in the security
# dir of the current profile, but can be specified by absolute path.
# c.IPKernelApp.connection_file = ''
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.IPKernelApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.IPKernelApp.ipython_dir = u''
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.IPKernelApp.matplotlib = None
# ONLY USED ON WINDOWS Interrupt this process when the parent is signaled.
# c.IPKernelApp.interrupt = 0
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.IPKernelApp.copy_config_files = False
# List of files to run at IPython startup.
# c.IPKernelApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.IPKernelApp.gui = None
# A list of dotted module names of IPython extensions to load.
c.IPKernelApp.extensions = ['base16_mplrc']
# redirect stderr to the null device
# c.IPKernelApp.no_stderr = False
# The Logging format template
# c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# set the iopub (PUB) port [default: random]
# c.IPKernelApp.iopub_port = 0
#------------------------------------------------------------------------------
# ZMQInteractiveShell configuration
#------------------------------------------------------------------------------
# A subclass of InteractiveShell for ZMQ.
# ZMQInteractiveShell will inherit config from: InteractiveShell
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.ZMQInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.ZMQInteractiveShell.ast_transformers = []
#
# c.ZMQInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.ZMQInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.ZMQInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.ZMQInteractiveShell.colors = 'Linux'
#
# c.ZMQInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.ZMQInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: '
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.ZMQInteractiveShell.deep_reload = False
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.ZMQInteractiveShell.autocall = 0
#
# c.ZMQInteractiveShell.separate_out2 = ''
# Deprecated, use PromptManager.justify
# c.ZMQInteractiveShell.prompts_pad_left = True
#
# c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# Enable magic commands to be called without the leading %.
# c.ZMQInteractiveShell.automagic = True
#
# c.ZMQInteractiveShell.debug = False
#
# c.ZMQInteractiveShell.object_info_string_level = 0
#
# c.ZMQInteractiveShell.ipython_dir = ''
#
# c.ZMQInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file.
# c.ZMQInteractiveShell.logstart = False
# The name of the logfile to use.
# c.ZMQInteractiveShell.logfile = ''
#
# c.ZMQInteractiveShell.wildcards_case_sensitive = True
# Save multi-line entries as one entry in readline history
# c.ZMQInteractiveShell.multiline_history = True
# Start logging to the given file in append mode.
# c.ZMQInteractiveShell.logappend = ''
#
# c.ZMQInteractiveShell.xmode = 'Context'
#
# c.ZMQInteractiveShell.quiet = False
# Deprecated, use PromptManager.out_template
# c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: '
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.ZMQInteractiveShell.cache_size = 1000
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr'
# Automatically call the pdb debugger after every exception.
# c.ZMQInteractiveShell.pdb = False
#------------------------------------------------------------------------------
# KernelManager configuration
#------------------------------------------------------------------------------
# Manages a single kernel in a subprocess on this host.
#
# This version starts kernels with Popen.
# KernelManager will inherit config from: ConnectionFileMixin
# The Popen Command to launch the kernel. Override this if you have a custom
# kernel. If kernel_cmd is specified in a configuration file, IPython does not
# pass any arguments to the kernel, because it cannot make any assumptions about
# the arguments that the kernel understands. In particular, this means that the
# kernel does not receive the option --debug if it given on the IPython command
# line.
# c.KernelManager.kernel_cmd = []
# Set the kernel's IP address [default localhost]. If the IP address is
# something other than localhost, then Consoles on other machines will be able
# to connect to the Kernel, so be careful!
# c.KernelManager.ip = u''
#
# c.KernelManager.transport = 'tcp'
# Should we autorestart the kernel if it dies.
# c.KernelManager.autorestart = False
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# Session configuration
#------------------------------------------------------------------------------
# Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
# Username for the Session. Default is your system username.
# c.Session.username = u'zubieta'
# The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
# c.Session.unpacker = 'json'
# Threshold (in bytes) beyond which a buffer should be sent without copying.
# c.Session.copy_threshold = 65536
# The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
# c.Session.packer = 'json'
# The maximum number of digests to remember.
#
# The digest history will be culled when it exceeds this value.
# c.Session.digest_history_size = 65536
# The UUID identifying this session.
# c.Session.session = u''
# The digest scheme used to construct the message signatures. Must have the form
# 'hmac-HASH'.
# c.Session.signature_scheme = 'hmac-sha256'
# execution key, for extra authentication.
# c.Session.key = ''
# Debug output in the Session
# c.Session.debug = False
# The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
# c.Session.item_threshold = 64
# path to file containing execution key.
# c.Session.keyfile = ''
# Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
# c.Session.buffer_threshold = 1024
# Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
# c.Session.metadata = {}
#------------------------------------------------------------------------------
# InlineBackend configuration
#------------------------------------------------------------------------------
# An object to store configuration of the inline backend.
# The figure format to enable (deprecated use `figure_formats` instead)
# c.InlineBackend.figure_format = u''
# A set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
# c.InlineBackend.figure_formats = set(['png'])
# Extra kwargs to be passed to fig.canvas.print_figure.
#
# Logical examples include: bbox_inches, quality (for jpeg figures), etc.
# c.InlineBackend.print_figure_kwargs = {'bbox_inches': 'tight'}
# Close all figures at the end of each cell.
#
# When True, ensures that each cell starts with no active figures, but it also
# means that one must keep track of references in order to edit or redraw
# figures in subsequent cells. This mode is ideal for the notebook, where
# residual plots from other cells might be surprising.
#
# When False, one must call figure() to create new figures. This means that
# gcf() and getfigs() can reference figures created in other cells, and the
# active figure can continue to be edited with pylab/pyplot methods that
# reference the current active figure. This mode facilitates iterative editing
# of figures, and behaves most consistently with other matplotlib backends, but
# figure barriers between cells must be explicit.
# c.InlineBackend.close_figures = True
# Subset of matplotlib rcParams that should be different for the inline backend.
# c.InlineBackend.rc = {'font.size': 10, 'figure.figsize': (6.0, 4.0), 'figure.facecolor': (1, 1, 1, 0), 'savefig.dpi': 72, 'figure.subplot.bottom': 0.125, 'figure.edgecolor': (1, 1, 1, 0)}
#------------------------------------------------------------------------------
# MappingKernelManager configuration
#------------------------------------------------------------------------------
# A KernelManager that handles notebook mapping and HTTP error handling
# MappingKernelManager will inherit config from: MultiKernelManager
#
# c.MappingKernelManager.root_dir = u'/home/zubieta/.ipython'
# The kernel manager class. This is configurable to allow subclassing of the
# KernelManager for customized behavior.
# c.MappingKernelManager.kernel_manager_class = 'IPython.kernel.ioloop.IOLoopKernelManager'
#------------------------------------------------------------------------------
# NotebookManager configuration
#------------------------------------------------------------------------------
# Glob patterns to hide in file and directory listings.
# c.NotebookManager.hide_globs = [u'__pycache__']
#------------------------------------------------------------------------------
# FileNotebookManager configuration
#------------------------------------------------------------------------------
# FileNotebookManager will inherit config from: NotebookManager
# The directory name in which to keep notebook checkpoints
#
# This is a path relative to the notebook's own directory.
#
# By default, it is .ipynb_checkpoints
# c.FileNotebookManager.checkpoint_dir = '.ipynb_checkpoints'
# Glob patterns to hide in file and directory listings.
# c.FileNotebookManager.hide_globs = [u'__pycache__']
# Automatically create a Python script when saving the notebook.
#
# For easier use of import, %run and %load across notebooks, a <notebook-
# name>.py script will be created next to any <notebook-name>.ipynb on each
# save. This can also be set with the short `--script` flag.
# c.FileNotebookManager.save_script = False
#
c.FileNotebookManager.notebook_dir = u'/home/zubieta/Documents/Notebooks'
#------------------------------------------------------------------------------
# NotebookNotary configuration
#------------------------------------------------------------------------------
# A class for computing and verifying notebook signatures.
# The secret key with which notebooks are signed.
# c.NotebookNotary.secret = ''
# The file where the secret key is stored.
# c.NotebookNotary.secret_file = u''
# The hashing algorithm used to sign notebooks.
# c.NotebookNotary.algorithm = 'sha256'
| mit | 375,358,943,412,812,800 | 36.326284 | 411 | 0.695629 | false |
soarpenguin/ansible | lib/ansible/modules/system/beadm.py | 9 | 11657 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: beadm
short_description: Manage ZFS boot environments on FreeBSD/Solaris/illumos systems.
description:
- Create, delete or activate ZFS boot environments.
- Mount and unmount ZFS boot environments.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
name:
description:
- ZFS boot environment name.
aliases: [ "be" ]
required: True
snapshot:
description:
- If specified, the new boot environment will be cloned from the given
snapshot or inactive boot environment.
required: false
default: false
description:
description:
- Associate a description with a new boot environment. This option is
available only on Solarish platforms.
required: false
default: false
options:
description:
- Create the datasets for new BE with specific ZFS properties. Multiple
options can be specified. This option is available only on
Solarish platforms.
required: false
default: false
mountpoint:
description:
- Path where to mount the ZFS boot environment
required: false
default: false
state:
description:
- Create or delete ZFS boot environment.
required: false
default: "present"
choices: [ "present", "absent", "activated", "mounted", "unmounted" ]
force:
description:
- Specifies if the unmount should be forced.
required: false
default: false
choices: [ "true", "false" ]
'''
EXAMPLES = '''
- name: Create ZFS boot environment
beadm:
name: upgrade-be
state: present
- name: Create ZFS boot environment from existing inactive boot environment
beadm:
name: upgrade-be
snapshot: be@old
state: present
- name: Create ZFS boot environment with compression enabled and description "upgrade"
beadm:
name: upgrade-be
options: "compression=on"
description: upgrade
state: present
- name: Delete ZFS boot environment
beadm:
name: old-be
state: absent
- name: Mount ZFS boot environment on /tmp/be
beadm:
name: BE
mountpoint: /tmp/be
state: mounted
- name: Unmount ZFS boot environment
beadm:
name: BE
state: unmounted
- name: Activate ZFS boot environment
beadm:
name: upgrade-be
state: activated
'''
RETURN = '''
name:
description: BE name
returned: always
type: string
sample: pre-upgrade
snapshot:
description: ZFS snapshot to create BE from
returned: always
type: string
sample: rpool/ROOT/oi-hipster@fresh
description:
description: BE description
returned: always
type: string
sample: Upgrade from 9.0 to 10.0
options:
description: BE additional options
returned: always
type: string
sample: compression=on
mountpoint:
description: BE mountpoint
returned: always
type: string
sample: /mnt/be
state:
description: state of the target
returned: always
type: string
sample: present
force:
description: if forced action is wanted
returned: always
type: boolean
sample: False
'''
import os
from ansible.module_utils.basic import AnsibleModule
class BE(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.snapshot = module.params['snapshot']
self.description = module.params['description']
self.options = module.params['options']
self.mountpoint = module.params['mountpoint']
self.state = module.params['state']
self.force = module.params['force']
self.is_freebsd = os.uname()[0] == 'FreeBSD'
def _beadm_list(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('list')
cmd.append('-H')
if not self.is_freebsd:
cmd.append(self.name)
return self.module.run_command(cmd)
def _find_be_by_name(self, out):
for line in out.splitlines():
if line.split('\t')[0] == self.name:
return line
return None
def exists(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
if self._find_be_by_name(out):
return True
else:
return True
else:
return False
def is_activated(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
if line is not None and 'R' in line.split('\t')[1]:
return True
else:
if 'R' in out.split(';')[2]:
return True
return False
def activate_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('activate')
cmd.append(self.name)
return self.module.run_command(cmd)
def create_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('create')
if self.snapshot:
cmd.append('-e')
cmd.append(self.snapshot)
if not self.is_freebsd:
if self.description:
cmd.append('-d')
cmd.append(self.description)
if self.options:
cmd.append('-o')
cmd.append(self.options)
cmd.append(self.name)
return self.module.run_command(cmd)
def destroy_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('destroy')
cmd.append('-F')
cmd.append(self.name)
return self.module.run_command(cmd)
def is_mounted(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
# On FreeBSD, we exclude currently mounted BE on /, as it is
# special and can be activated even if it is mounted. That is not
# possible with non-root BEs.
if line.split('\t')[2] is not '-' and \
line.split('\t')[2] is not '/':
return True
else:
if out.split(';')[3]:
return True
return False
def mount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('mount')
cmd.append(self.name)
if self.mountpoint:
cmd.append(self.mountpoint)
return self.module.run_command(cmd)
def unmount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('unmount')
if self.force:
cmd.append('-f')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['be'], type='str'),
snapshot=dict(type='str'),
description=dict(type='str'),
options=dict(type='str'),
mountpoint=dict(default=False, type='path'),
state=dict(
default='present',
choices=['present', 'absent', 'activated',
'mounted', 'unmounted']),
force=dict(default=False, type='bool'),
),
supports_check_mode=True
)
be = BE(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = be.name
result['state'] = be.state
if be.snapshot:
result['snapshot'] = be.snapshot
if be.description:
result['description'] = be.description
if be.options:
result['options'] = be.options
if be.mountpoint:
result['mountpoint'] = be.mountpoint
if be.state == 'absent':
# beadm on FreeBSD and Solarish systems differs in delete behaviour in
# that we are not allowed to delete activated BE on FreeBSD while on
# Solarish systems we cannot delete BE if it is mounted. We add mount
# check for both platforms as BE should be explicitly unmounted before
# being deleted. On FreeBSD, we also check if the BE is activated.
if be.exists():
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
if be.is_freebsd:
if be.is_activated():
module.fail_json(msg='Unable to remove active BE!')
(rc, out, err) = be.destroy_be()
if rc != 0:
module.fail_json(msg='Error while destroying BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
else:
module.fail_json(msg='Unable to remove BE as it is mounted!')
elif be.state == 'present':
if not be.exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.create_be()
if rc != 0:
module.fail_json(msg='Error while creating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'activated':
if not be.is_activated():
if module.check_mode:
module.exit_json(changed=True)
# On FreeBSD, beadm is unable to activate mounted BEs, so we add
# an explicit check for that case.
if be.is_freebsd:
if be.is_mounted():
module.fail_json(msg='Unable to activate mounted BE!')
(rc, out, err) = be.activate_be()
if rc != 0:
module.fail_json(msg='Error while activating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'mounted':
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.mount_be()
if rc != 0:
module.fail_json(msg='Error while mounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'unmounted':
if be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.unmount_be()
if rc != 0:
module.fail_json(msg='Error while unmounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | 1,704,357,489,766,518,000 | 26.618483 | 92 | 0.532218 | false |
terbolous/CouchPotatoServer | libs/tornado/web.py | 12 | 120144 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
from tornado.concurrent import Future, is_future
from tornado import escape
from tornado import gen
from tornado import httputil
from tornado import iostream
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
"""The oldest signed value version supported by this version of Tornado.
Signed values older than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
"""The newest signed value version supported by this version of Tornado.
Signed values newer than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_VERSION = 2
"""The signed value version produced by `.RequestHandler.create_signed_value`.
May be overridden by passing a ``version`` keyword argument.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
May be overrided by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self._prepared_future = None
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
self.request.connection.set_close_callback(self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
if _has_stream_request_body(self.__class__):
if not self.request.body.done():
self.request.body.set_exception(iostream.StreamClosedError())
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
})
self.set_default_headers()
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
return self._get_argument(name, default, self.request.arguments, strip)
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
return self._get_arguments(name, self.request.arguments, strip)
def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request body.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.body_arguments, strip)
def get_body_arguments(self, name, strip=True):
"""Returns a list of the body arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.body_arguments, strip)
def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request query string.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.query_arguments, strip)
def get_query_arguments(self, name, strip=True):
"""Returns a list of the query arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.query_arguments, strip)
def _get_argument(self, name, default, source, strip=True):
args = self._get_arguments(name, source, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def _get_arguments(self, name, source, strip=True):
values = []
for v in source.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
try:
return _unicode(value)
except UnicodeDecodeError:
raise HTTPError(400, "Invalid unicode in %s: %r" %
(name or "url", value[:40]))
@property
def cookies(self):
"""An alias for `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self, path="/", domain=None):
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(self, name, value, expires_days=30, version=None,
**kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.set_cookie(name, self.create_signed_value(name, value,
version=version),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value, version=None):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value, version=version)
def get_secure_cookie(self, name, value=None, max_age_days=31,
min_version=None):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days,
min_version=min_version)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
https://github.com/facebook/tornado/issues/1009
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if not isinstance(chunk, (bytes_type, unicode_type, dict)):
raise TypeError("write() only accepts bytes, unicode, and dict objects")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
.. versionchanged:: 3.3
Now returns a `.Future` if no callback is given.
"""
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
chunk = None
# Finalize the cookie headers (which have been stored in a side
# object so an outgoing cookie could be overwritten before it
# is sent).
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
self.add_header("Set-Cookie", cookie.OutputString(None))
start_line = httputil.ResponseStartLine(self.request.version,
self._status_code,
self._reason)
return self.request.connection.write_headers(
start_line, self._headers, chunk, callback=callback)
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method != "HEAD":
return self.request.connection.write(chunk, callback=callback)
else:
future = Future()
future.set_result(None)
return future
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None)
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
.. versionchanged:: 3.2.2
The xsrf token will now be have a random mask applied in every
request, which makes it safe to include the token in pages
that are compressed. See http://breachattack.com for more
information on the issue fixed by this change. Old (version 1)
cookies will be converted to version 2 when this method is called
unless the ``xsrf_cookie_version`` `Application` setting is
set to 1.
"""
if not hasattr(self, "_xsrf_token"):
version, token, timestamp = self._get_raw_xsrf_token()
output_version = self.settings.get("xsrf_cookie_version", 2)
if output_version == 1:
self._xsrf_token = binascii.b2a_hex(token)
elif output_version == 2:
mask = os.urandom(4)
self._xsrf_token = b"|".join([
b"2",
binascii.b2a_hex(mask),
binascii.b2a_hex(_websocket_mask(mask, token)),
utf8(str(int(timestamp)))])
else:
raise ValueError("unknown xsrf cookie version %d",
output_version)
if version is None:
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", self._xsrf_token,
expires_days=expires_days)
return self._xsrf_token
def _get_raw_xsrf_token(self):
"""Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
"""
if not hasattr(self, '_raw_xsrf_token'):
cookie = self.get_cookie("_xsrf")
if cookie:
version, token, timestamp = self._decode_xsrf_token(cookie)
else:
version, token, timestamp = None, None, None
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time()
self._raw_xsrf_token = (version, token, timestamp)
return self._raw_xsrf_token
def _decode_xsrf_token(self, cookie):
"""Convert a cookie string into a the tuple form returned by
_get_raw_xsrf_token.
"""
m = _signed_value_version_re.match(utf8(cookie))
if m:
version = int(m.group(1))
if version == 2:
_, mask, masked_token, timestamp = cookie.split("|")
mask = binascii.a2b_hex(utf8(mask))
token = _websocket_mask(
mask, binascii.a2b_hex(utf8(masked_token)))
timestamp = int(timestamp)
return version, token, timestamp
else:
# Treat unknown versions as not present instead of failing.
return None, None, None
elif len(cookie) == 32:
version = 1
token = binascii.a2b_hex(utf8(cookie))
# We don't have a usable timestamp in older versions.
timestamp = int(time.time())
return (version, token, timestamp)
else:
return None, None, None
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
.. versionchanged:: 3.2.2
Added support for cookie version 2. Both versions 1 and 2 are
supported.
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
_, token, _ = self._decode_xsrf_token(token)
_, expected_token, _ = self._get_raw_xsrf_token()
if not _time_independent_equals(utf8(token), utf8(expected_token)):
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None, **kwargs):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get("static_handler_class",
StaticFileHandler).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self):
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self):
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
etag = self._headers.get("Etag")
inm = utf8(self.request.headers.get("If-None-Match", ""))
return bool(etag and inm and inm.find(etag) >= 0)
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
@gen.coroutine
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
result = self.prepare()
if is_future(result):
result = yield result
if result is not None:
raise TypeError("Expected None, got %r" % result)
if self._prepared_future is not None:
# Tell the Application we've finished with prepare()
# and are ready for the body to arrive.
self._prepared_future.set_result(None)
if self._finished:
return
if _has_stream_request_body(self.__class__):
# In streaming mode request.body is a Future that signals
# the body has been completely received. The Future has no
# result; the data has been passed to self.data_received
# instead.
try:
yield self.request.body
except iostream.StreamClosedError:
return
method = getattr(self, self.request.method.lower())
result = method(*self.path_args, **self.path_kwargs)
if is_future(result):
result = yield result
if result is not None:
raise TypeError("Expected None, got %r" % result)
if self._auto_finish and not self._finished:
self.finish()
except Exception as e:
self._handle_request_exception(e)
if (self._prepared_future is not None and
not self._prepared_future.done()):
# In case we failed before setting _prepared_future, do it
# now (to unblock the HTTP server). Note that this is not
# in a finally block to avoid GC issues prior to Python 3.4.
self._prepared_future.set_result(None)
def data_received(self, chunk):
"""Implement this method to handle streamed request data.
Requires the `.stream_request_body` decorator.
"""
raise NotImplementedError()
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
.. versionadded:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if isinstance(result, Future):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
# Once we have done this, hide the Future from our
# caller (i.e. RequestHandler._when_complete), which
# would otherwise set up its own callback and
# exception handler (resulting in exceptions being
# logged twice).
return None
return result
return wrapper
def stream_request_body(cls):
"""Apply to `RequestHandler` subclasses to enable streaming body support.
This decorator implies the following changes:
* `.HTTPServerRequest.body` is undefined, and body arguments will not
be included in `RequestHandler.get_argument`.
* `RequestHandler.prepare` is called when the request headers have been
read instead of after the entire body has been read.
* The subclass must define a method ``data_received(self, data):``, which
will be called zero or more times as data is available. Note that
if the request has an empty body, ``data_received`` may not be called.
* ``prepare`` and ``data_received`` may return Futures (such as via
``@gen.coroutine``, in which case the next method will not be called
until those futures have completed.
* The regular HTTP method (``post``, ``put``, etc) will be called after
the entire body has been read.
There is a subtle interaction between ``data_received`` and asynchronous
``prepare``: The first call to ``data_recieved`` may occur at any point
after the call to ``prepare`` has returned *or yielded*.
"""
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
cls._stream_request_body = True
return cls
def _has_stream_request_body(cls):
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
return getattr(cls, '_stream_request_body', False)
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(httputil.HTTPServerConnectionDelegate):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
The request class can be specified as either a class object or a
(fully-qualified) name.
Each tuple can contain additional elements, which correspond to the
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this
only tuples of two or three elements were allowed).
A dictionary may be passed as the third element of the tuple,
which will be used as keyword arguments to the handler's
constructor and `~RequestHandler.initialize` method. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
**settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
if self.settings.get('debug'):
self.settings.setdefault('autoreload', True)
self.settings.setdefault('compiled_template_cache', False)
self.settings.setdefault('static_hash_cache', False)
self.settings.setdefault('serve_traceback', True)
# Automatically reload modified modules
if self.settings.get('autoreload'):
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3, 4)
spec = URLSpec(*spec)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def start_request(self, connection):
# Modern HTTPServer interface
return _RequestDispatcher(self, connection)
def __call__(self, request):
# Legacy HTTPServer interface
dispatcher = _RequestDispatcher(self, None)
dispatcher.set_request(request)
return dispatcher.execute()
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class _RequestDispatcher(httputil.HTTPMessageDelegate):
def __init__(self, application, connection):
self.application = application
self.connection = connection
self.request = None
self.chunks = []
self.handler_class = None
self.handler_kwargs = None
self.path_args = []
self.path_kwargs = {}
def headers_received(self, start_line, headers):
self.set_request(httputil.HTTPServerRequest(
connection=self.connection, start_line=start_line, headers=headers))
if self.stream_request_body:
self.request.body = Future()
return self.execute()
def set_request(self, request):
self.request = request
self._find_handler()
self.stream_request_body = _has_stream_request_body(self.handler_class)
def _find_handler(self):
# Identify the handler to use as soon as we have the request.
# Save url path arguments for later.
app = self.application
handlers = app._get_host_handlers(self.request)
if not handlers:
self.handler_class = RedirectHandler
self.handler_kwargs = dict(url="http://" + app.default_host + "/")
return
for spec in handlers:
match = spec.regex.match(self.request.path)
if match:
self.handler_class = spec.handler_class
self.handler_kwargs = spec.kwargs
if spec.regex.groups:
# Pass matched groups to the handler. Since
# match.groups() includes both named and
# unnamed groups, we want to use either groups
# or groupdict but not both.
if spec.regex.groupindex:
self.path_kwargs = dict(
(str(k), _unquote_or_none(v))
for (k, v) in match.groupdict().items())
else:
self.path_args = [_unquote_or_none(s)
for s in match.groups()]
return
if app.settings.get('default_handler_class'):
self.handler_class = app.settings['default_handler_class']
self.handler_kwargs = app.settings.get(
'default_handler_args', {})
else:
self.handler_class = ErrorHandler
self.handler_kwargs = dict(status_code=404)
def data_received(self, data):
if self.stream_request_body:
return self.handler.data_received(data)
else:
self.chunks.append(data)
def finish(self):
if self.stream_request_body:
self.request.body.set_result(None)
else:
self.request.body = b''.join(self.chunks)
self.request._parse_body()
self.execute()
def on_connection_close(self):
if self.stream_request_body:
self.handler.on_connection_close()
else:
self.chunks = None
def execute(self):
# If template cache is disabled (usually in the debug mode),
# re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if not self.application.settings.get("compiled_template_cache", True):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
if not self.application.settings.get('static_hash_cache', True):
StaticFileHandler.reset()
self.handler = self.handler_class(self.application, self.request,
**self.handler_kwargs)
transforms = [t(self.request) for t in self.application.transforms]
if self.stream_request_body:
self.handler._prepared_future = Future()
# Note that if an exception escapes handler._execute it will be
# trapped in the Future it returns (which we are ignoring here).
# However, that shouldn't happen because _execute has a blanket
# except handler, and we cannot easily access the IOLoop here to
# call add_future.
self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
# If we are streaming the request body, then execute() is finished
# when the handler has prepared to receive the body. If not,
# it doesn't matter when execute() finishes (so we return None)
return self.handler._prepared_future
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video), but this handler should not be used with
files that are too large to fit comfortably in memory.
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
Subclasses should only override methods discussed in this section;
overriding other methods is error-prone. Overriding
``StaticFileHandler.get`` is particularly problematic due to the
tight coupling with ``compute_etag`` and other methods.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
return self.get(path, include_body=False)
@gen.coroutine
def get(self, path, include_body=True):
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(
self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
size = self.get_content_size()
if request_range:
start, end = request_range
if (start is not None and start >= size) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" % (size, ))
return
if start is not None and start < 0:
start += size
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header("Content-Range",
httputil._get_content_range(start, end, size))
else:
start = end = None
if start is not None and end is not None:
content_length = end - start
elif end is not None:
content_length = end
elif start is not None:
content_length = size - start
else:
content_length = size
self.set_header("Content-Length", content_length)
if include_body:
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes_type):
content = [content]
for chunk in content:
self.write(chunk)
yield self.flush()
else:
assert self.request.method == "HEAD"
def compute_etag(self):
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash, )
def set_headers(self):
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self):
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
if self.check_etag_header():
return True
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root, path):
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root, absolute_path):
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
root = os.path.abspath(root)
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory",
self.path)
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(cls, abspath, start=None, end=None):
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0)
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes_type):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self):
if not hasattr(self, '_stat_result'):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self):
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses.
.. versionadded:: 3.1
.. versionchanged:: 3.3
This method is now always called, instead of only when
partial results are requested.
"""
stat_result = self._stat()
return stat_result[stat.ST_SIZE]
def get_modified_time(self):
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
return modified
def get_content_type(self):
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
return mime_type
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path, include_version=True):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get('static_url_prefix', '/static/') + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return '%s?v=%s' % (url, version_hash)
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings['static_path'], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path):
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httputil.HTTPServerRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
GZipContentEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
.. versionchanged:: 3.3
Now compresses all mime types beginning with ``text/``, instead
of just a whitelist. (the whitelist is still used for certain
non-text mime types).
"""
# Whitelist of compressible mime types (in addition to any types
# beginning with "text/").
CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
"application/xml", "application/atom+xml",
"application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
def _compressible_type(self, ctype):
return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = self._compressible_type(ctype) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
If you configure a login url with a query parameter, Tornado will
assume you know what you're doing and use it as-is. If not, it
will add a `next` parameter so the login page knows where to send
you once you're logged in.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self):
return self.handler.current_user
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
self.handler_class = handler
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value, version=None, clock=None):
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
if clock is None:
clock = time.time
timestamp = utf8(str(int(clock())))
value = base64.b64encode(utf8(value))
if version == 1:
signature = _create_signature_v1(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
elif version == 2:
# The v2 format consists of a version number and a series of
# length-prefixed fields "%d:%s", the last of which is a
# signature, all separated by pipes. All numbers are in
# decimal format with no leading zeros. The signature is an
# HMAC-SHA256 of the whole string up to that point, including
# the final pipe.
#
# The fields are:
# - format version (i.e. 2; no length prefix)
# - key version (currently 0; reserved for future key rotation features)
# - timestamp (integer seconds since epoch)
# - name (not encoded; assumed to be ~alphanumeric)
# - value (base64-encoded)
# - signature (hex-encoded; no length prefix)
def format_field(s):
return utf8("%d:" % len(s)) + utf8(s)
to_sign = b"|".join([
b"2|1:0",
format_field(timestamp),
format_field(name),
format_field(value),
b''])
signature = _create_signature_v2(secret, to_sign)
return to_sign + signature
else:
raise ValueError("Unsupported version %d" % version)
# A leading version number in decimal with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
def decode_signed_value(secret, name, value, max_age_days=31, clock=None, min_version=None):
if clock is None:
clock = time.time
if min_version is None:
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
if min_version > 2:
raise ValueError("Unsupported min_version %d" % min_version)
if not value:
return None
# Figure out what version this is. Version 1 did not include an
# explicit version field and started with arbitrary base64 data,
# which makes this tricky.
value = utf8(value)
m = _signed_value_version_re.match(value)
if m is None:
version = 1
else:
try:
version = int(m.group(1))
if version > 999:
# Certain payloads from the version-less v1 format may
# be parsed as valid integers. Due to base64 padding
# restrictions, this can only happen for numbers whose
# length is a multiple of 4, so we can treat all
# numbers up to 999 as versions, and for the rest we
# fall back to v1 format.
version = 1
except ValueError:
version = 1
if version < min_version:
return None
if version == 1:
return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
elif version == 2:
return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
else:
return None
def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature_v1(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < clock() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > clock() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
def _consume_field(s):
length, _, rest = s.partition(b':')
n = int(length)
field_value = rest[:n]
# In python 3, indexing bytes returns small integers; we must
# use a slice to get a byte string as in python 2.
if rest[n:n + 1] != b'|':
raise ValueError("malformed v2 signed value field")
rest = rest[n + 1:]
return field_value, rest
rest = value[2:] # remove version number
try:
key_version, rest = _consume_field(rest)
timestamp, rest = _consume_field(rest)
name_field, rest = _consume_field(rest)
value_field, rest = _consume_field(rest)
except ValueError:
return None
passed_sig = rest
signed_string = value[:-len(passed_sig)]
expected_sig = _create_signature_v2(secret, signed_string)
if not _time_independent_equals(passed_sig, expected_sig):
return None
if name_field != utf8(name):
return None
timestamp = int(timestamp)
if timestamp < clock() - max_age_days * 86400:
# The signature has expired.
return None
try:
return base64.b64decode(value_field)
except Exception:
return None
def _create_signature_v1(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
def _create_signature_v2(secret, s):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
hash.update(utf8(s))
return utf8(hash.hexdigest())
def _unquote_or_none(s):
"""None-safe wrapper around url_unescape to handle unamteched optional
groups correctly.
Note that args are passed as bytes so the handler can decide what
encoding to use.
"""
if s is None:
return s
return escape.url_unescape(s, encoding=None, plus=False)
| gpl-3.0 | 4,388,952,743,047,808,500 | 38.62533 | 109 | 0.599015 | false |
kierangraham/dotfiles | Sublime/Packages/SublimeCodeIntel/libs/inflector/Rules/Spanish.py | 8 | 7629 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2006 Bermi Ferrer Martinez
# info at bermi dot org
# See the end of this file for the free software, open source license
# (BSD-style).
import re
from .Base import Base
class Spanish (Base):
"""
Inflector for pluralize and singularize Spanish nouns.
"""
def pluralize(self, word):
"""Pluralizes Spanish nouns."""
rules = [
["(?i)([aeiou])x$", "\\1x"],
# This could fail if the word is oxytone.
["(?i)([áéíóú])([ns])$", "|1\\2es"],
["(?i)(^[bcdfghjklmnñpqrstvwxyz]*)an$",
"\\1anes"], # clan->clanes
["(?i)([áéíóú])s$", "|1ses"],
["(?i)(^[bcdfghjklmnñpqrstvwxyz]*)([aeiou])([ns])$",
"\\1\\2\\3es"], # tren->trenes
["(?i)([aeiouáéó])$", "\\1s"],
# casa->casas, padre->padres, papá->papás
["(?i)([aeiou])s$", "\\1s"], # atlas->atlas, virus->virus, etc.
["(?i)([éí])(s)$", "|1\\2es"], # inglés->ingleses
["(?i)z$", "ces"], # luz->luces
["(?i)([íú])$", "\\1es"], # ceutí->ceutíes, tabú->tabúes
["(?i)(ng|[wckgtp])$", "\\1s"],
# Anglicismos como puenting, frac, crack, show (En que casos
# podría fallar esto?)
["(?i)$", "es"] # ELSE +es (v.g. árbol->árboles)
]
uncountable_words = [
"tijeras", "gafas", "vacaciones", "víveres", "déficit"]
""" In fact these words have no singular form: you cannot say neither
"una gafa" nor "un vívere". So we should change the variable name to
onlyplural or something alike."""
irregular_words = {
"país": "países",
"champú": "champús",
"jersey": "jerséis",
"carácter": "caracteres",
"espécimen": "especímenes",
"menú": "menús",
"régimen": "regímenes",
"curriculum": "currículos",
"ultimátum": "ultimatos",
"memorándum": "memorandos",
"referéndum": "referendos"
}
lower_cased_word = word.lower()
for uncountable_word in uncountable_words:
if lower_cased_word[-1 * len(uncountable_word):] == uncountable_word:
return word
for irregular in list(irregular_words.keys()):
match = re.search(
"(?i)(u" + irregular + ")$", word, re.IGNORECASE)
if match:
return re.sub("(?i)" + irregular + "$", match.expand("\\1")[0] + irregular_words[irregular][1:], word)
for rule in range(len(rules)):
match = re.search(rules[rule][0], word, re.IGNORECASE)
if match:
groups = match.groups()
replacement = rules[rule][1]
if re.match("\|", replacement):
for k in range(1, len(groups)):
replacement = replacement.replace("|" + str(k), self.string_replace(
groups[k - 1], "ÁÉÍÓÚáéíóú", "AEIOUaeiou"))
result = re.sub(rules[rule][0], replacement, word)
# Esto acentua los sustantivos que al pluralizarse se
# convierten en esdrújulos como esmóquines, jóvenes...
match = re.search("(?i)([aeiou]).{1,3}([aeiou])nes$", result)
if match and len(match.groups()) > 1 and not re.search("(?i)[áéíóú]", word):
result = result.replace(match.group(0), self.string_replace(
match.group(1), "AEIOUaeiou", "ÁÉÍÓÚáéíóú") + match.group(0)[1:])
return result
return word
def singularize(self, word):
"""Singularizes Spanish nouns."""
rules = [
["(?i)^([bcdfghjklmnñpqrstvwxyz]*)([aeiou])([ns])es$",
"\\1\\2\\3"],
["(?i)([aeiou])([ns])es$", "~1\\2"],
["(?i)oides$", "oide"], # androides->androide
["(?i)(ces)$/i", "z"],
["(?i)(sis|tis|xis)+$", "\\1"], # crisis, apendicitis, praxis
["(?i)(é)s$", "\\1"], # bebés->bebé
["(?i)([^e])s$", "\\1"], # casas->casa
["(?i)([bcdfghjklmnñprstvwxyz]{2,}e)s$", "\\1"], # cofres->cofre
["(?i)([ghñpv]e)s$", "\\1"], # 24-01 llaves->llave
["(?i)es$", ""] # ELSE remove _es_ monitores->monitor
]
uncountable_words = [
"paraguas", "tijeras", "gafas", "vacaciones", "víveres", "lunes",
"martes", "miércoles", "jueves", "viernes", "cumpleaños", "virus", "atlas", "sms"]
irregular_words = {
"jersey": "jerséis",
"espécimen": "especímenes",
"carácter": "caracteres",
"régimen": "regímenes",
"menú": "menús",
"régimen": "regímenes",
"curriculum": "currículos",
"ultimátum": "ultimatos",
"memorándum": "memorandos",
"referéndum": "referendos",
"sándwich": "sándwiches"
}
lower_cased_word = word.lower()
for uncountable_word in uncountable_words:
if lower_cased_word[-1 * len(uncountable_word):] == uncountable_word:
return word
for irregular in list(irregular_words.keys()):
match = re.search("(u" + irregular + ")$", word, re.IGNORECASE)
if match:
return re.sub("(?i)" + irregular + "$", match.expand("\\1")[0] + irregular_words[irregular][1:], word)
for rule in range(len(rules)):
match = re.search(rules[rule][0], word, re.IGNORECASE)
if match:
groups = match.groups()
replacement = rules[rule][1]
if re.match("~", replacement):
for k in range(1, len(groups)):
replacement = replacement.replace("~" + str(
k), self.string_replace(groups[k - 1], "AEIOUaeiou", "ÁÉÍÓÚáéíóú"))
result = re.sub(rules[rule][0], replacement, word)
# Esta es una posible solución para el problema de dobles
# acentos. Un poco guarrillo pero funciona
match = re.search("(?i)([áéíóú]).*([áéíóú])", result)
if match and len(match.groups()) > 1 and not re.search("(?i)[áéíóú]", word):
result = self.string_replace(
result, "ÁÉÍÓÚáéíóú", "AEIOUaeiou")
return result
return word
# Copyright (c) 2006 Bermi Ferrer Martinez
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software to deal in this software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of this software, and to permit
# persons to whom this software is furnished to do so, subject to the following
# condition:
#
# THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THIS SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THIS SOFTWARE.
| bsd-2-clause | 6,912,629,925,971,030,000 | 40.392265 | 118 | 0.520155 | false |
David-Amaro/bank-payment | account_banking_pain_base/models/banking_export_pain.py | 11 | 19673 | # -*- encoding: utf-8 -*-
##############################################################################
#
# PAIN Base module for Odoo
# Copyright (C) 2013-2015 Akretion (http://www.akretion.com)
# @author: Alexis de Lattre <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, _
from openerp.exceptions import Warning
from openerp.tools.safe_eval import safe_eval
from datetime import datetime
from lxml import etree
from openerp import tools
import logging
import base64
try:
from unidecode import unidecode
except ImportError:
unidecode = None
logger = logging.getLogger(__name__)
class BankingExportPain(models.AbstractModel):
_name = 'banking.export.pain'
@api.model
def _validate_iban(self, iban):
"""if IBAN is valid, returns IBAN
if IBAN is NOT valid, raises an error message"""
if self.env['res.partner.bank'].is_iban_valid(iban):
return iban.replace(' ', '')
else:
raise Warning(_("This IBAN is not valid : %s") % iban)
@api.model
def _prepare_field(self, field_name, field_value, eval_ctx,
max_size=0, gen_args=None):
"""This function is designed to be inherited !"""
if gen_args is None:
gen_args = {}
assert isinstance(eval_ctx, dict), 'eval_ctx must contain a dict'
try:
value = safe_eval(field_value, eval_ctx)
# SEPA uses XML ; XML = UTF-8 ; UTF-8 = support for all characters
# But we are dealing with banks...
# and many banks don't want non-ASCCI characters !
# cf section 1.4 "Character set" of the SEPA Credit Transfer
# Scheme Customer-to-bank guidelines
if gen_args.get('convert_to_ascii'):
value = unidecode(value)
unallowed_ascii_chars = [
'"', '#', '$', '%', '&', '*', ';', '<', '>', '=', '@',
'[', ']', '^', '_', '`', '{', '}', '|', '~', '\\', '!']
for unallowed_ascii_char in unallowed_ascii_chars:
value = value.replace(unallowed_ascii_char, '-')
except:
line = eval_ctx.get('line')
if line:
raise Warning(
_("Cannot compute the '%s' of the Payment Line with "
"reference '%s'.")
% (field_name, line.name))
else:
raise Warning(
_("Cannot compute the '%s'.") % field_name)
if not isinstance(value, (str, unicode)):
raise Warning(
_("The type of the field '%s' is %s. It should be a string "
"or unicode.")
% (field_name, type(value)))
if not value:
raise Warning(
_("The '%s' is empty or 0. It should have a non-null value.")
% field_name)
if max_size and len(value) > max_size:
value = value[0:max_size]
return value
@api.model
def _validate_xml(self, xml_string, gen_args):
xsd_etree_obj = etree.parse(
tools.file_open(gen_args['pain_xsd_file']))
official_pain_schema = etree.XMLSchema(xsd_etree_obj)
try:
root_to_validate = etree.fromstring(xml_string)
official_pain_schema.assertValid(root_to_validate)
except Exception, e:
logger.warning(
"The XML file is invalid against the XML Schema Definition")
logger.warning(xml_string)
logger.warning(e)
raise Warning(
_("The generated XML file is not valid against the official "
"XML Schema Definition. The generated XML file and the "
"full error have been written in the server logs. Here "
"is the error, which may give you an idea on the cause "
"of the problem : %s")
% unicode(e))
return True
@api.multi
def finalize_sepa_file_creation(
self, xml_root, total_amount, transactions_count, gen_args):
xml_string = etree.tostring(
xml_root, pretty_print=True, encoding='UTF-8',
xml_declaration=True)
logger.debug(
"Generated SEPA XML file in format %s below"
% gen_args['pain_flavor'])
logger.debug(xml_string)
self._validate_xml(xml_string, gen_args)
order_ref = []
for order in self.payment_order_ids:
if order.reference:
order_ref.append(order.reference.replace('/', '-'))
filename = '%s%s.xml' % (gen_args['file_prefix'], '-'.join(order_ref))
self.write({
'nb_transactions': transactions_count,
'total_amount': total_amount,
'filename': filename,
'file': base64.encodestring(xml_string),
'state': 'finish',
})
action = {
'name': _('SEPA File'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': self._name,
'res_id': self.ids[0],
'target': 'new',
}
return action
@api.model
def generate_group_header_block(self, parent_node, gen_args):
group_header_1_0 = etree.SubElement(parent_node, 'GrpHdr')
message_identification_1_1 = etree.SubElement(
group_header_1_0, 'MsgId')
message_identification_1_1.text = self._prepare_field(
'Message Identification',
'self.payment_order_ids[0].reference',
{'self': self}, 35, gen_args=gen_args)
creation_date_time_1_2 = etree.SubElement(group_header_1_0, 'CreDtTm')
creation_date_time_1_2.text = datetime.strftime(
datetime.today(), '%Y-%m-%dT%H:%M:%S')
if gen_args.get('pain_flavor') == 'pain.001.001.02':
# batch_booking is in "Group header" with pain.001.001.02
# and in "Payment info" in pain.001.001.03/04
batch_booking = etree.SubElement(group_header_1_0, 'BtchBookg')
batch_booking.text = unicode(self.batch_booking).lower()
nb_of_transactions_1_6 = etree.SubElement(
group_header_1_0, 'NbOfTxs')
control_sum_1_7 = etree.SubElement(group_header_1_0, 'CtrlSum')
# Grpg removed in pain.001.001.03
if gen_args.get('pain_flavor') == 'pain.001.001.02':
grouping = etree.SubElement(group_header_1_0, 'Grpg')
grouping.text = 'GRPD'
self.generate_initiating_party_block(group_header_1_0, gen_args)
return group_header_1_0, nb_of_transactions_1_6, control_sum_1_7
@api.model
def generate_start_payment_info_block(
self, parent_node, payment_info_ident,
priority, local_instrument, sequence_type, requested_date,
eval_ctx, gen_args):
payment_info_2_0 = etree.SubElement(parent_node, 'PmtInf')
payment_info_identification_2_1 = etree.SubElement(
payment_info_2_0, 'PmtInfId')
payment_info_identification_2_1.text = self._prepare_field(
'Payment Information Identification',
payment_info_ident, eval_ctx, 35, gen_args=gen_args)
payment_method_2_2 = etree.SubElement(payment_info_2_0, 'PmtMtd')
payment_method_2_2.text = gen_args['payment_method']
nb_of_transactions_2_4 = False
control_sum_2_5 = False
if gen_args.get('pain_flavor') != 'pain.001.001.02':
batch_booking_2_3 = etree.SubElement(payment_info_2_0, 'BtchBookg')
batch_booking_2_3.text = unicode(self.batch_booking).lower()
# The "SEPA Customer-to-bank
# Implementation guidelines" for SCT and SDD says that control sum
# and nb_of_transactions should be present
# at both "group header" level and "payment info" level
nb_of_transactions_2_4 = etree.SubElement(
payment_info_2_0, 'NbOfTxs')
control_sum_2_5 = etree.SubElement(payment_info_2_0, 'CtrlSum')
payment_type_info_2_6 = etree.SubElement(
payment_info_2_0, 'PmtTpInf')
if priority and gen_args['payment_method'] != 'DD':
instruction_priority_2_7 = etree.SubElement(
payment_type_info_2_6, 'InstrPrty')
instruction_priority_2_7.text = priority
service_level_2_8 = etree.SubElement(
payment_type_info_2_6, 'SvcLvl')
service_level_code_2_9 = etree.SubElement(service_level_2_8, 'Cd')
service_level_code_2_9.text = 'SEPA'
if local_instrument:
local_instrument_2_11 = etree.SubElement(
payment_type_info_2_6, 'LclInstrm')
local_instr_code_2_12 = etree.SubElement(
local_instrument_2_11, 'Cd')
local_instr_code_2_12.text = local_instrument
if sequence_type:
sequence_type_2_14 = etree.SubElement(
payment_type_info_2_6, 'SeqTp')
sequence_type_2_14.text = sequence_type
if gen_args['payment_method'] == 'DD':
request_date_tag = 'ReqdColltnDt'
else:
request_date_tag = 'ReqdExctnDt'
requested_date_2_17 = etree.SubElement(
payment_info_2_0, request_date_tag)
requested_date_2_17.text = requested_date
return payment_info_2_0, nb_of_transactions_2_4, control_sum_2_5
@api.model
def _must_have_initiating_party(self, gen_args):
'''This method is designed to be inherited in localization modules for
countries in which the initiating party is required'''
return False
@api.model
def generate_initiating_party_block(self, parent_node, gen_args):
my_company_name = self._prepare_field(
'Company Name',
'self.payment_order_ids[0].mode.bank_id.partner_id.name',
{'self': self}, gen_args.get('name_maxsize'), gen_args=gen_args)
initiating_party_1_8 = etree.SubElement(parent_node, 'InitgPty')
initiating_party_name = etree.SubElement(initiating_party_1_8, 'Nm')
initiating_party_name.text = my_company_name
initiating_party_identifier =\
self.payment_order_ids[0].company_id.\
initiating_party_identifier
initiating_party_issuer =\
self.payment_order_ids[0].company_id.\
initiating_party_issuer
if initiating_party_identifier and initiating_party_issuer:
iniparty_id = etree.SubElement(initiating_party_1_8, 'Id')
iniparty_org_id = etree.SubElement(iniparty_id, 'OrgId')
iniparty_org_other = etree.SubElement(iniparty_org_id, 'Othr')
iniparty_org_other_id = etree.SubElement(iniparty_org_other, 'Id')
iniparty_org_other_id.text = initiating_party_identifier
iniparty_org_other_issuer = etree.SubElement(
iniparty_org_other, 'Issr')
iniparty_org_other_issuer.text = initiating_party_issuer
elif self._must_have_initiating_party(gen_args):
raise Warning(
_("Missing 'Initiating Party Issuer' and/or "
"'Initiating Party Identifier' for the company '%s'. "
"Both fields must have a value.")
% self.payment_order_ids[0].company_id.name)
return True
@api.model
def generate_party_agent(
self, parent_node, party_type, party_type_label,
order, party_name, iban, bic, eval_ctx, gen_args):
"""Generate the piece of the XML file corresponding to BIC
This code is mutualized between TRF and DD"""
assert order in ('B', 'C'), "Order can be 'B' or 'C'"
try:
bic = self._prepare_field(
'%s BIC' % party_type_label, bic, eval_ctx, gen_args=gen_args)
party_agent = etree.SubElement(parent_node, '%sAgt' % party_type)
party_agent_institution = etree.SubElement(
party_agent, 'FinInstnId')
party_agent_bic = etree.SubElement(
party_agent_institution, gen_args.get('bic_xml_tag'))
party_agent_bic.text = bic
except Warning:
if order == 'C':
if iban[0:2] != gen_args['initiating_party_country_code']:
raise Warning(
_('Error:'),
_("The bank account with IBAN '%s' of partner '%s' "
"must have an associated BIC because it is a "
"cross-border SEPA operation.")
% (iban, party_name))
if order == 'B' or (
order == 'C' and gen_args['payment_method'] == 'DD'):
party_agent = etree.SubElement(
parent_node, '%sAgt' % party_type)
party_agent_institution = etree.SubElement(
party_agent, 'FinInstnId')
party_agent_other = etree.SubElement(
party_agent_institution, 'Othr')
party_agent_other_identification = etree.SubElement(
party_agent_other, 'Id')
party_agent_other_identification.text = 'NOTPROVIDED'
# for Credit Transfers, in the 'C' block, if BIC is not provided,
# we should not put the 'Creditor Agent' block at all,
# as per the guidelines of the EPC
return True
@api.model
def generate_party_block(
self, parent_node, party_type, order, name, iban, bic,
eval_ctx, gen_args):
"""Generate the piece of the XML file corresponding to Name+IBAN+BIC
This code is mutualized between TRF and DD"""
assert order in ('B', 'C'), "Order can be 'B' or 'C'"
if party_type == 'Cdtr':
party_type_label = 'Creditor'
elif party_type == 'Dbtr':
party_type_label = 'Debtor'
party_name = self._prepare_field(
'%s Name' % party_type_label, name, eval_ctx,
gen_args.get('name_maxsize'), gen_args=gen_args)
piban = self._prepare_field(
'%s IBAN' % party_type_label, iban, eval_ctx, gen_args=gen_args)
viban = self._validate_iban(piban)
# At C level, the order is : BIC, Name, IBAN
# At B level, the order is : Name, IBAN, BIC
if order == 'B':
gen_args['initiating_party_country_code'] = viban[0:2]
elif order == 'C':
self.generate_party_agent(
parent_node, party_type, party_type_label,
order, party_name, viban, bic, eval_ctx, gen_args)
party = etree.SubElement(parent_node, party_type)
party_nm = etree.SubElement(party, 'Nm')
party_nm.text = party_name
party_account = etree.SubElement(
parent_node, '%sAcct' % party_type)
party_account_id = etree.SubElement(party_account, 'Id')
party_account_iban = etree.SubElement(
party_account_id, 'IBAN')
party_account_iban.text = viban
if order == 'B':
self.generate_party_agent(
parent_node, party_type, party_type_label,
order, party_name, viban, bic, eval_ctx, gen_args)
return True
@api.model
def generate_remittance_info_block(self, parent_node, line, gen_args):
remittance_info_2_91 = etree.SubElement(
parent_node, 'RmtInf')
if line.state == 'normal':
remittance_info_unstructured_2_99 = etree.SubElement(
remittance_info_2_91, 'Ustrd')
remittance_info_unstructured_2_99.text = \
self._prepare_field(
'Remittance Unstructured Information',
'line.communication', {'line': line}, 140,
gen_args=gen_args)
else:
if not line.struct_communication_type:
raise Warning(
_("Missing 'Structured Communication Type' on payment "
"line with reference '%s'.")
% line.name)
remittance_info_structured_2_100 = etree.SubElement(
remittance_info_2_91, 'Strd')
creditor_ref_information_2_120 = etree.SubElement(
remittance_info_structured_2_100, 'CdtrRefInf')
if gen_args.get('pain_flavor') == 'pain.001.001.02':
creditor_ref_info_type_2_121 = etree.SubElement(
creditor_ref_information_2_120, 'CdtrRefTp')
creditor_ref_info_type_code_2_123 = etree.SubElement(
creditor_ref_info_type_2_121, 'Cd')
creditor_ref_info_type_issuer_2_125 = etree.SubElement(
creditor_ref_info_type_2_121, 'Issr')
creditor_reference_2_126 = etree.SubElement(
creditor_ref_information_2_120, 'CdtrRef')
else:
creditor_ref_info_type_2_121 = etree.SubElement(
creditor_ref_information_2_120, 'Tp')
creditor_ref_info_type_or_2_122 = etree.SubElement(
creditor_ref_info_type_2_121, 'CdOrPrtry')
creditor_ref_info_type_code_2_123 = etree.SubElement(
creditor_ref_info_type_or_2_122, 'Cd')
creditor_ref_info_type_issuer_2_125 = etree.SubElement(
creditor_ref_info_type_2_121, 'Issr')
creditor_reference_2_126 = etree.SubElement(
creditor_ref_information_2_120, 'Ref')
creditor_ref_info_type_code_2_123.text = 'SCOR'
creditor_ref_info_type_issuer_2_125.text = \
line.struct_communication_type
creditor_reference_2_126.text = \
self._prepare_field(
'Creditor Structured Reference',
'line.communication', {'line': line}, 35,
gen_args=gen_args)
return True
@api.model
def generate_creditor_scheme_identification(
self, parent_node, identification, identification_label,
eval_ctx, scheme_name_proprietary, gen_args):
csi_id = etree.SubElement(parent_node, 'Id')
csi_privateid = etree.SubElement(csi_id, 'PrvtId')
csi_other = etree.SubElement(csi_privateid, 'Othr')
csi_other_id = etree.SubElement(csi_other, 'Id')
csi_other_id.text = self._prepare_field(
identification_label, identification, eval_ctx, gen_args=gen_args)
csi_scheme_name = etree.SubElement(csi_other, 'SchmeNm')
csi_scheme_name_proprietary = etree.SubElement(
csi_scheme_name, 'Prtry')
csi_scheme_name_proprietary.text = scheme_name_proprietary
return True
| agpl-3.0 | -2,970,348,099,814,226,000 | 45.289412 | 79 | 0.564073 | false |
BranislavBajuzik/1.12-to-1.13-by-TheAl_T | 1_12to1_13aMCEditPlugin.py | 1 | 4918 | # MCEdit Filter by TheAl_T
# planetminecraft.com/member/theal_t
# Block data value -> block state database by: Onnowhere youtube.com/onnowhere2
import json
from pymclevel import TAG_String
try:
converter = __import__("1_12to1_13")
except ImportError:
converter = False
displayName = "1.12 to 1.13 by TheAl_T"
inputs = (
("Convert to 1.12 to 1.13:", "label"),
("", "label"),
("Important: Run this filter only once. If you run it on already converted commands, it wil fail, because 1.13 syntax is different than 1.12 (Because of the very reason you are using this filter)", "label"),
("The way this filter is intended to be used: Turn off Errors, filter, undo, look at the console, fix all errors, filter again and save.", "label"),
("", "label"),
("", "label"),
("Warnings will be printed to the console. They will NOT stop the execution of this script. If you want to do so, tick the next box.", "label"),
("Warnings", False),
("Errors will be printed to the console. They will stop the execution of this script. If you want to ignore them, un-tick the next box.", "label"),
("Errors", True)
)
statTags = ("AffectedBlocks", "AffectedEntities", "AffectedItems", "QueryResult", "SuccessCount")
def perform(level, box, options):
if not converter:
raise ImportError("Unable to import main library (1_12to1_13.py). Please make sure this file is in the same directory as this filter")
def validate(what, label):
if "CommandStats" in e:
s = ""
tmp = {entry.name: entry.value for entry in e["CommandStats"].value}
for stat in statTags:
if stat+"Name" in tmp:
s += "execute store {} {} {} ".format("success" if stat == "SuccessCount" else "result", tmp[stat+"Name"], tmp[stat+"Objective"])
what = s + what
if len(what) > 32500:
print("The command at [{}, {}, {}] is too long ({}) after conversion\n(more than 32 500 characters)\n".format(x, y, z, len(what)))
if options["Errors"]:
raise AssertionError("The command at [{}, {}, {}] is too long ({}) after conversion\n(more than 32 500 characters)".format(x, y, z, len(what)))
if converter.Globals.flags["commentedOut"]:
print("A command at [{}, {}, {}] was commented out because it has to be converted manually\n".format(x, y, z))
if options["Warnings"]:
raise AssertionError("A command at [{}, {}, {}] has to be converted manually".format(x, y, z))
e[label] = TAG_String(what)
chunk.dirty = True
def fixCommand(entity=False):
command = e["Command"].value.strip()
if command:
try:
converter.Globals.flags["commentedOut"] = False
command = unicode(converter.decide(command))
except SyntaxError as ex:
print(u"Error in block at [{}, {}, {}]:\n{}".format(x, y, z, ex))
if options["Errors"]:
if entity:
raise SyntaxError(u"Error in minecart at [{}, {}, {}]:\n{}".format(x, y, z, ex.message))
else:
raise SyntaxError(u"Error in block at [{}, {}, {}]:\n{}".format(x, y, z, ex.message))
validate(command, "Command")
for chunk, _, _ in level.getChunkSlices(box):
for e in chunk.Entities:
x, y, z = map(lambda x: x.value, e["Pos"].value)
if (x, y, z) in box and e["id"].value == "minecraft:commandblock_minecart":
fixCommand(True)
for e in chunk.TileEntities:
x, y, z = e["x"].value, e["y"].value, e["z"].value
if (x, y, z) in box:
if e["id"].value in ("minecraft:command_block", "minecraft:chain_command_block", "minecraft:repeating_command_block"):
fixCommand()
if e["id"].value == "minecraft:sign":
for label in ("Text1", "Text2", "Text3", "Text4"):
try:
converter.Globals.flags["commentedOut"] = False
s = json.JSONDecoder().decode(e[label].value.strip())
converter.walk(s)
s = json.JSONEncoder(separators=(',', ':')).encode(s)
except ValueError:
continue
except SyntaxError as ex:
print(u"Error in sign at [{}, {}, {}]:\n{}".format(x, y, z, ex))
if options["Errors"]:
raise SyntaxError(u"Error in sign at [{}, {}, {}]:\n{}".format(x, y, z, ex.message))
continue
validate(s, label)
| gpl-3.0 | -589,329,938,607,469,300 | 48.183673 | 211 | 0.53172 | false |
mikefitz888/owtf | framework/http/wafbypasser/wafbypasser.py | 3 | 20838 | #!/bin/python
from tornado.httputil import HTTPHeaders
from tornado.httpclient import HTTPRequest
from framework.http.wafbypasser.core.hpp_lib import asp_hpp, param_overwrite
from framework.http.wafbypasser.core.placeholder_length import find_length
from framework.http.wafbypasser.core.detection import *
from framework.http.wafbypasser.core.argument_parser import get_args
from framework.http.wafbypasser.core.fuzzer import Fuzzer
from framework.http.wafbypasser.core.helper import load_payload_file, Error
from framework.http.wafbypasser.core.http_helper import HTTPHelper
from framework.http.wafbypasser.core.param_source_detector import detect_accepted_sources
from framework.http.wafbypasser.core.response_analyzer import analyze_responses, print_request, \
print_response, analyze_chars, analyze_encoded_chars, \
analyze_accepted_sources
from framework.http.wafbypasser.core.placeholder_manager import PlaceholderManager
from framework.http.wafbypasser.core.obfuscation_lib import unicode_urlencode, urlencode, \
transformations_info
import string
class WAFBypasser:
def fuzz(self, args, requests):
if args["follow_cookies"] or args["delay"]:
delay = args["delay"] or 0
follow_cookies = args["follow_cookies"] or False
print "Synchronous Fuzzing: Started"
responses = self.fuzzer.sync_fuzz(requests, delay, follow_cookies)
else:
print "Requests number: " + str(len(requests))
print "Asynchronous Fuzzing: Started"
responses = self.fuzzer.async_fuzz(requests)
print "Fuzzing: Completed"
return responses
def is_detection_set(self, args):
det_functions = ["contains", "resp_code_det", "response_time"]
args_set = False
for name in det_functions:
if args[name] is not None:
return
Error(self.owtf, "You need to specify at least one Detection Function.")
def require(self, args, params):
param_missing = False
for param in params:
if args[param] is None:
param_missing = True
print "Specify: --" + param
if param_missing:
Error(self.owtf, "Missing Parameter(s).")
def __init__(self, owtf):
self.ua = "Mozilla/5.0 (X11; Linux i686; rv:6.0) Gecko/20100101 /" \
"Firefox/15.0"
self.init_request = HTTPRequest("",
auth_username=None,
auth_password=None,
follow_redirects=True,
max_redirects=10,
allow_nonstandard_methods=True,
headers=None,
proxy_host=None,
proxy_port=None,
proxy_username=None,
proxy_password=None,
user_agent=self.ua,
request_timeout=40.0)
self.sig = "@@@"
self.length_signature = self.sig + "length" + self.sig
self.fsig = self.sig + "fuzzhere" + self.sig # fuzzing signature
# Template signature regular expression
self.template_signature_re = self.sig + "[^" + self.sig + "]+"
self.template_signature_re += self.sig
self.detection_struct = []
self.pm = None # PlaceholderManager
self.http_helper = None
self.fuzzer = None
self.owtf = owtf
def init_methods(self, args):
methods = args["methods"] or []
# Setting Methods
if methods:
if self.sig + "all" + self.sig in methods:
methods.remove(self.sig + "all" + self.sig)
methods.extend(
load_payload_file("./payloads/HTTP/methods.txt"))
methods = list(set(methods)) # Removing doublesk
else:
if args["data"] is None:
methods.append("GET") # Autodetect Method
else:
methods.append("POST") # Autodetect Method
return methods
def init_headers(self, args):
headers = HTTPHeaders()
if args["headers"]:
for header in args["headers"]:
values = header.split(':', 1)
if len(values) == 2:
headers.add(*values)
else: # values == 1
headers.add(values[0], "")
# Setting Cookies
if args["cookie"]:
headers.add("Cookie", args["cookie"])
return headers
def init_detection_struct(self, args):
if args["contains"]:
detection_args = {}
detection_args["phrase"] = args["contains"][0] # detection string
detection_args["case_sensitive"] = "cs" in args["contains"][1:]
detection_args["reverse"] = False
if args["reverse"]:
detection_args["reverse"] = True
self.detection_struct.append({"method": contains,
"arguments": detection_args,
"info": "Contains"})
if args["resp_code_det"]:
detection_args = {}
detection_args["response_codes"] = args["resp_code_det"][0]
detection_args["reverse"] = False
if args["reverse"]:
detection_args["reverse"] = True
self.detection_struct.append({"method": resp_code_detection,
"arguments": detection_args,
"info": "Response code detection"})
if args["response_time"]:
detection_args = {}
detection_args["time"] = args["response_time"][0]
detection_args["reverse"] = False
if args["reverse"]:
detection_args["reverse"] = True
self.detection_struct.append({"method": resp_time_detection,
"arguments": detection_args,
"info": "Response time detection"})
def start(self, args):
# Initializations
self.sig = args["fuzzing_signature"] or self.sig
self.pm = PlaceholderManager(self.sig)
target = args["target"]
methods = self.init_methods(args)
headers = self.init_headers(args)
data = args["data"] or ""
self.init_detection_struct(args)
self.init_request.headers = headers
self.http_helper = HTTPHelper(self.init_request)
self.fuzzer = Fuzzer(self.http_helper)
# Finding the length of a placeholder
if args["mode"] == "length":
self.require(args, ["accepted_value"])
self.is_detection_set(args)
if len(methods) > 1:
Error(self.owtf, "Only you need to specify only one Method")
print "Scanning mode: Length Detection"
ch = args["accepted_value"][0]
length = find_length(self.owtf,
self.http_helper,
self.length_signature,
target,
methods[0],
self.detection_struct,
ch,
headers,
data)
print "Placeholder Allowed Length = " + str(length)
# Detecting Allowed Sources
elif args["mode"] == "detect_accepted_sources":
self.is_detection_set(args)
self.require(args, ["methods",
"param_name",
"accepted_value",
"param_source"])
if len(methods) > 1:
Error(self.owtf, "Only you need to specify only one Method")
print "Scanning mode: Allowed Sources Detection"
accepted_method = methods[0]
param_name = args["param_name"]
accepted_value = args["accepted_value"]
param_source = args["param_source"]
requests = detect_accepted_sources(self.http_helper,
target,
data,
headers,
param_name,
param_source,
accepted_value,
accepted_method)
responses = self.fuzz(args, requests)
analyze_accepted_sources(responses, self.detection_struct)
elif args["mode"] == "content_type_tamper":
print "Tampering Content-Type mode"
cnt_types = load_payload_file("./payloads/HTTP/content_types.txt")
new_headers = self.http_helper.add_header_param(
headers,
"Content-Type", self.fsig)
self.pm = PlaceholderManager(self.sig)
requests = self.pm.transformed_http_requests(
self.http_helper,
methods,
target,
cnt_types, # Payloads
new_headers,
data)
responses = self.fuzz(args, requests)
for response in responses:
print "[->]Request"
print_request(response)
print "[<-]Response"
print_response(response)
print
# HPP modes
elif args["mode"] == "asp_hpp" or args["mode"] == "param_overwriting":
self.require(args, ["param_name", "param_source", "payloads"])
param_name = args["param_name"]
param_source = args["param_source"]
self.is_detection_set(args)
payloads = []
for p_file in args["payloads"]:
payloads += load_payload_file(p_file)
if args["mode"] == "asp_hpp":
print "Scanning mode: ASP HPP Parameter Splitting"
requests = asp_hpp(self.http_helper,
methods,
payloads,
param_name,
param_source,
target,
headers,
data)
responses = self.fuzz(args, requests)
elif args["mode"] == "param_overwriting":
requests = param_overwrite(self.http_helper,
param_name,
param_source,
payloads[0],
target,
data,
headers)
responses = self.fuzz(args, requests)
analyze_responses(responses,
self.http_helper,
self.detection_struct)
elif args["mode"] == "detect_chars":
self.is_detection_set(args)
payloads = []
for i in range(0, 256):
payloads.append(chr(i))
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
sent_payloads = analyze_chars(responses,
self.http_helper,
self.detection_struct)
payloads = []
if sent_payloads["detected"]:
# urlencode blocked chars
print
print "URL encoding bad characters"
for bad_char in sent_payloads["detected"]:
payloads.append(urlencode(bad_char))
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
print
print "UnicodeURL encoding bad characters"
payloads = []
# unicode urlencode blocked chars
for bad_char in sent_payloads["detected"]:
payloads.append(unicode_urlencode(bad_char))
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
# Finding a white-listed character
good_char = None
for char in string.letters:
good_char = char
break
if not good_char:
for char in string.digits:
good_char = char
break
if not good_char:
good_char = sent_payloads["undetected"][0]
print
print "Sending a detected char followed by an undetected"
payloads = []
# add an accepted char before a blocked char
for bad_char in sent_payloads["detected"]:
payloads.append(bad_char + good_char)
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
print
print "Sending a detected char after an undetected"
payloads = []
for bad_char in sent_payloads["detected"]:
payloads.append(good_char + bad_char)
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
print "Sending an undetected char after a detected"
payloads = []
for bad_char in sent_payloads["detected"]:
payloads.append(bad_char + good_char)
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
print "Sending an detected char surrounded by undetected chars"
payloads = []
for bad_char in sent_payloads["detected"]:
payloads.append(good_char + bad_char + good_char)
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_encoded_chars(responses,
self.http_helper,
self.detection_struct)
# Fuzzing mode
elif args["mode"] == "fuzz":
fuzzing_placeholders = PlaceholderManager.get_placeholder_number(
self.template_signature_re, str(args))
if fuzzing_placeholders> 1:
Error(self.owtf, "Multiple fuzzing placeholder signatures found. "
"Only one fuzzing placeholder is supported.")
elif fuzzing_placeholders == 0:
Error(self.owtf, "No fuzzing placeholder signatures found.")
self.is_detection_set(args)
payloads = []
if args["payloads"]:
for p_file in args["payloads"]:
payloads += load_payload_file(p_file)
else:
payloads.append("")
print "Scanning mode: Fuzzing Using placeholders"
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_responses(responses,
self.http_helper,
self.detection_struct)
elif args["mode"] == "show_transform_functions":
print transformations_info()
elif args["mode"] == "overchar":
self.require(args, ["payloads", "accepted_value"])
length = int(args["length"][0])
accepted_value = args["accepted_value"][0]
payloads = []
for p_file in args["payloads"]:
payloads += load_payload_file(p_file)
payloads = [(length - len(payload)) * accepted_value + payload
for payload in payloads]
requests = self.pm.transformed_http_requests(self.http_helper,
methods,
target,
payloads,
headers,
data)
responses = self.fuzz(args, requests)
analyze_responses(responses,
self.http_helper,
self.detection_struct)
else:
Error(self.owtf, "Unknown bypassing mode.")
if __name__ == "__main__":
wafbypasser = WAFBypasser()
arguments = get_args()
wafbypasser.start(arguments) | bsd-3-clause | -6,764,156,412,355,484,000 | 46.795872 | 97 | 0.434591 | false |
gooddata/openstack-nova | nova/policies/security_groups.py | 3 | 2297 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-security-groups'
security_groups_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""List, show, add, or remove security groups.
APIs which are directly related to security groups resource are deprecated:
Lists, shows information for, creates, updates and deletes
security groups. Creates and deletes security group rules. All these
APIs are deprecated.
APIs which are related to server resource are not deprecated:
Lists Security Groups for a server. Add Security Group to a server
and remove security group from a server.""",
[
{
'method': 'GET',
'path': '/os-security-groups'
},
{
'method': 'GET',
'path': '/os-security-groups/{security_group_id}'
},
{
'method': 'POST',
'path': '/os-security-groups'
},
{
'method': 'PUT',
'path': '/os-security-groups/{security_group_id}'
},
{
'method': 'DELETE',
'path': '/os-security-groups/{security_group_id}'
},
{
'method': 'GET',
'path': '/servers/{server_id}/os-security-groups'
},
{
'method': 'POST',
'path': '/servers/{server_id}/action (addSecurityGroup)'
},
{
'method': 'POST',
'path': '/servers/{server_id}/action (removeSecurityGroup)'
},
],
),
]
def list_rules():
return security_groups_policies
| apache-2.0 | -8,075,100,577,388,602,000 | 28.831169 | 78 | 0.602525 | false |
chrys87/fenrir | src/fenrirscreenreader/commands/onCursorChange/25000-word_echo_type.py | 3 | 2102 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
from fenrirscreenreader.utils import word_utils
import string
class command():
def __init__(self):
pass
def initialize(self, environment):
self.env = environment
def shutdown(self):
pass
def getDescription(self):
return 'No Description found'
def run(self):
# is it enabled?
if not self.env['runtime']['settingsManager'].getSettingAsBool('keyboard', 'wordEcho'):
return
# is naviation?
if self.env['screen']['newCursor']['x'] - self.env['screen']['oldCursor']['x'] != 1:
return
# just when cursor move worddetection is needed
if not self.env['runtime']['cursorManager'].isCursorHorizontalMove():
return
# for now no new line
if self.env['runtime']['cursorManager'].isCursorVerticalMove():
return
# currently writing
if self.env['runtime']['screenManager'].isDelta():
return
# get the word
newContent = self.env['screen']['newContentText'].split('\n')[self.env['screen']['newCursor']['y']]
x, y, currWord, endOfScreen, lineBreak = \
word_utils.getCurrentWord(self.env['screen']['newCursor']['x'], 0, newContent)
# is there a word?
if currWord == '':
return
# at the end of a word
if not newContent[self.env['screen']['newCursor']['x']].isspace():
return
# at the end of a word
if (x + len(currWord) != self.env['screen']['newCursor']['x']) and \
(x + len(currWord) != self.env['screen']['newCursor']['x']-1):
return
self.env['runtime']['outputManager'].presentText(currWord, interrupt=True, flush=False)
def setCallback(self, callback):
pass
| lgpl-3.0 | 7,721,297,552,750,031,000 | 34.241379 | 114 | 0.542341 | false |
haizawa/odenos | src/main/python/org/o3project/odenos/core/component/logic.py | 3 | 50794 | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
import logging
import re
import copy
from inspect import stack
from org.o3project.odenos.core.component.component import Component
from org.o3project.odenos.core.component.conversion_table import ConversionTable
from org.o3project.odenos.core.util.network_interface import NetworkInterface
from org.o3project.odenos.core.util.system_manager_interface import (
SystemManagerInterface
)
from org.o3project.odenos.remoteobject.message.request import Request
from org.o3project.odenos.remoteobject.manager.system.event.component_connection_changed\
import ComponentConnectionChanged
from org.o3project.odenos.core.component.network.topology.node_changed import (
NodeChanged
)
from org.o3project.odenos.core.component.network.topology.port_changed import (
PortChanged
)
from org.o3project.odenos.core.component.network.topology.link_changed import (
LinkChanged
)
from org.o3project.odenos.core.component.network.flow.flow_changed import (
FlowChanged
)
from org.o3project.odenos.core.component.network.packet.in_packet_added import (
InPacketAdded
)
from org.o3project.odenos.core.component.network.packet.out_packet_added import (
OutPacketAdded
)
from org.o3project.odenos.core.component.network.topology.node import Node
from org.o3project.odenos.core.component.network.topology.port import Port
from org.o3project.odenos.core.component.network.topology.link import Link
from org.o3project.odenos.core.component.network.flow.flow import Flow
from org.o3project.odenos.core.component.network.packet.in_packet import InPacket
from org.o3project.odenos.core.component.network.packet.out_packet import OutPacket
class Logic(Component):
NETWORK_EVENT_TYPE_LIST = [NodeChanged.TYPE, PortChanged.TYPE,
LinkChanged.TYPE, FlowChanged.TYPE]
PACKET_EVENT_TYPE_LIST = [InPacketAdded.TYPE, OutPacketAdded.TYPE]
def __init__(self, object_id, dispatcher):
super(Logic, self).__init__(object_id, dispatcher)
# ConversionTable Object
self._conversion_table = ConversionTable()
# key:network_id value:NetworkInterface Object
self._network_interfaces = {}
# key:event_type + network_id value:attribute list
self.__subscription_table = {}
# SystemManager IF
if self.dispatcher is None:
return
self._sys_manager_interface = SystemManagerInterface(dispatcher, object_id)
def conversion_table(self):
return self._conversion_table
###################################
# Receive ComponentConnectionChanged
###################################
def _do_event_componentconnectionchanged(self, event):
msg = None
try:
msg = ComponentConnectionChanged.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid ComponentConnectionChanged Message"
+ " KeyError: " + str(err))
return
logging.debug("Receive ComponentConnectionChanged action:" +
msg.action)
if msg.action == ComponentConnectionChanged.Action.ADD:
if (self._connection_changed_added_pre(msg)):
network_id = msg.curr.network_id
if network_id in self._network_interfaces:
return
self._network_interfaces[network_id] = \
NetworkInterface(self.dispatcher, network_id, self.object_id)
self._connection_changed_added(msg)
elif msg.action == ComponentConnectionChanged.Action.UPDATE:
if (self._connection_changed_update_pre(msg)):
self._connection_changed_update(msg)
elif msg.action == ComponentConnectionChanged.Action.DELETE:
if (self._connection_changed_delete_pre(msg)):
network_id = msg.prev.network_id
self._connection_changed_delete(msg)
del self._network_interfaces[network_id]
else:
return
def _connection_changed_added_pre(self, msg):
logging.debug("%s", self.object_id)
return True
def _connection_changed_update_pre(self, msg):
logging.debug("%s", self.object_id)
return True
def _connection_changed_delete_pre(self, msg):
logging.debug("%s", self.object_id)
return True
def _connection_changed_added(self, msg):
logging.debug("%s", self.object_id)
return
def _connection_changed_update(self, msg):
logging.debug("%s", self.object_id)
return
def _connection_changed_delete(self, msg):
logging.debug("%s", self.object_id)
return
# Add Network Event Subscription
def _add_event_subscription(self, event_type, neteork_id):
if event_type is None or neteork_id is None:
return
if event_type in self.NETWORK_EVENT_TYPE_LIST or\
event_type in self.PACKET_EVENT_TYPE_LIST:
logging.debug("add_event_subscription Type:" + event_type
+ " NetworkID:" + neteork_id)
self._event_subscription.add_filter(neteork_id, event_type)
self.__subscription_table[event_type + "::" + neteork_id] = None
return
# Remove Network Event Subscription
def _remove_event_subscription(self, event_type, neteork_id):
if event_type is None or neteork_id is None:
return
if event_type in self.NETWORK_EVENT_TYPE_LIST or\
event_type in self.PACKET_EVENT_TYPE_LIST:
logging.debug("remove_event_subscription Type:" + event_type
+ " NetworkID:" + neteork_id)
self._event_subscription.remove_filter(neteork_id,
event_type)
del self.__subscription_table[event_type + "::" + neteork_id]
return
# Update Network Event Subscription
def _update_event_subscription(self, event_type,
neteork_id, attributes=[]):
if event_type is None or neteork_id is None:
return
if event_type in self.NETWORK_EVENT_TYPE_LIST:
self._event_subscription.add_filter(neteork_id, event_type)
self.__subscription_table[event_type + "::UPDATE::"
+ neteork_id] = attributes
return
###################################
# Receive NodeChanged
###################################
def _do_event_nodechanged(self, event):
msg = None
try:
msg = NodeChanged.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid NodeChanged Message"
+ " KeyError: " + str(err))
return
logging.debug("Received NodeChanged from Network ID:" +
event.publisher_id + " action:" + msg.action)
if msg.action == NodeChanged.Action.ADD:
key = NodeChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_node_added(event.publisher_id,
msg.curr)
elif msg.action == NodeChanged.Action.UPDATE:
key = NodeChanged.TYPE + "::UPDATE::" + event.publisher_id
if key in self.__subscription_table:
self._on_node_update(event.publisher_id,
msg.prev,
msg.curr,
self.__subscription_table[key])
elif msg.action == NodeChanged.Action.DELETE:
key = NodeChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_node_delete(event.publisher_id,
msg.prev)
else:
logging.debug("invalid action")
return
###################################
# Receive PortChanged
###################################
def _do_event_portchanged(self, event):
msg = None
try:
msg = PortChanged.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid PortChanged Message"
+ " KeyError: " + str(err))
return
logging.debug("Received PortChanged from Network ID:" +
event.publisher_id + " action:" + msg.action)
if msg.action == PortChanged.Action.ADD:
key = PortChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_port_added(event.publisher_id,
msg.curr)
elif msg.action == PortChanged.Action.UPDATE:
key = PortChanged.TYPE + "::UPDATE::" + event.publisher_id
if key in self.__subscription_table:
self._on_port_update(event.publisher_id,
msg.prev,
msg.curr,
self.__subscription_table[key])
elif msg.action == PortChanged.Action.DELETE:
key = PortChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_port_delete(event.publisher_id,
msg.prev)
else:
logging.debug("invalid action")
return
###################################
# Receive LinkChanged
###################################
def _do_event_linkchanged(self, event):
msg = None
try:
msg = LinkChanged.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid LinkChanged Message"
+ " KeyError: " + str(err))
return
logging.debug("Received LinkChanged from Network ID:" +
event.publisher_id + " action:" + msg.action)
if msg.action == LinkChanged.Action.ADD:
key = LinkChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_link_added(event.publisher_id,
msg.curr)
elif msg.action == LinkChanged.Action.UPDATE:
key = LinkChanged.TYPE + "::UPDATE::" + event.publisher_id
if key in self.__subscription_table:
self._on_link_update(event.publisher_id,
msg.prev,
msg.curr,
self.__subscription_table[key])
elif msg.action == LinkChanged.Action.DELETE:
key = LinkChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_link_delete(event.publisher_id,
msg.prev)
else:
logging.debug("invalid action")
return
###################################
# Receive FlowChanged
###################################
def _do_event_flowchanged(self, event):
msg = None
try:
msg = FlowChanged.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid FlowChanged Message"
+ " KeyError: " + str(err))
return
logging.debug("Received FlowChanged from Network ID:" +
event.publisher_id + " action:" + msg.action)
if msg.action == FlowChanged.Action.ADD:
key = FlowChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_flow_added(event.publisher_id,
msg.curr)
elif msg.action == FlowChanged.Action.UPDATE:
key = FlowChanged.TYPE + "::UPDATE::" + event.publisher_id
if key in self.__subscription_table:
self._on_flow_update(event.publisher_id,
msg.prev,
msg.curr,
self.__subscription_table[key])
elif msg.action == FlowChanged.Action.DELETE:
key = FlowChanged.TYPE + "::" + event.publisher_id
if key in self.__subscription_table:
self._on_flow_delete(event.publisher_id,
msg.prev)
else:
logging.debug("invalid action")
return
###################################
# Receive InPacketAdded
###################################
def _do_event_inpacketadded(self, event):
msg = None
try:
msg = InPacketAdded.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid InPacketAdded Message"
+ " KeyError: " + str(err))
return
logging.debug("Received InPacketAdded from Network ID:" +
event.publisher_id)
if (self._on_in_packet_added_pre(event.publisher_id, msg)):
resp_list = self._add_in_packet_conversion(event.publisher_id, msg)
self._on_in_packet_added_post(event.publisher_id, msg, resp_list)
return
def _on_in_packet_added_pre(self, network_id, msg):
logging.debug("%s", self.object_id)
return True
def _on_in_packet_added_post(self, network_id, msg, resp_list):
logging.debug("%s", self.object_id)
return
###################################
# Receive OutPacketAdded
###################################
def _do_event_outpacketadded(self, event):
msg = None
try:
msg = OutPacketAdded.create_from_packed(event.body)
except KeyError, err:
logging.error("Receive Invalid OutPacketAdded Message"
+ " KeyError: " + str(err))
return
logging.debug("Received OutPacketAdded from Network ID:" +
event.publisher_id)
if (self._on_out_packet_added_pre(event.publisher_id, msg)):
resp_list = self._add_out_packet_conversion(event.publisher_id,
msg)
self._on_out_packet_added_post(event.publisher_id, msg, resp_list)
return
def _on_out_packet_added_pre(self, network_id, msg):
logging.debug("%s", self.object_id)
return True
def _on_out_packet_added_post(self, network_id, msg, resp_list):
logging.debug("%s", self.object_id)
return
# Add Node
def _on_node_added(self, network_id, node_msg):
logging.debug("%s", self.object_id)
if (self._on_node_added_pre(network_id, node_msg)):
resp_list = self._add_node_conversion(network_id, node_msg)
self._on_node_added_post(network_id, node_msg, resp_list)
return
def _on_node_added_pre(self, network_id, node_msg):
logging.debug("%s", self.object_id)
return True
def _on_node_added_post(self, network_id, node_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Add Port
def _on_port_added(self, network_id, port_msg):
logging.debug("%s", self.object_id)
if (self._on_port_added_pre(network_id, port_msg)):
resp_list = self._add_port_conversion(network_id, port_msg)
self._on_port_added_post(network_id, port_msg, resp_list)
return
def _on_port_added_pre(self, network_id, port_msg):
logging.debug("%s", self.object_id)
return True
def _on_port_added_post(self, network_id, port_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Add Link
def _on_link_added(self, network_id, link_msg):
logging.debug("%s", self.object_id)
if (self._on_link_added_pre(network_id, link_msg)):
resp_list = self._add_link_conversion(network_id, link_msg)
self._on_link_added_post(network_id, link_msg, resp_list)
return
def _on_link_added_pre(self, network_id, link_msg):
logging.debug("%s", self.object_id)
return True
def _on_link_added_post(self, network_id, link_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Add Flow
def _on_flow_added(self, network_id, flow_msg):
logging.debug("%s", self.object_id)
if (self._on_flow_added_pre(network_id, flow_msg)):
resp_list = self._add_flow_conversion(network_id, flow_msg)
self._on_flow_added_post(network_id, flow_msg, resp_list)
return
def _on_flow_added_pre(self, network_id, flow_msg):
logging.debug("%s", self.object_id)
return True
def _on_flow_added_post(self, network_id, flow_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Update Node
def _on_node_update(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
if (self._on_node_update_pre(network_id, prev, curr, attributes)):
resp_list = self._update_node_conversion(network_id,
prev,
curr,
attributes)
self._on_node_update_post(network_id,
prev,
curr,
attributes,
resp_list)
return
def _on_node_update_pre(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
return True
def _on_node_update_post(self, network_id,
prev, curr, attributes, resp_list):
logging.debug("%s", self.object_id)
return
# Update Port
def _on_port_update(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
if (self._on_port_update_pre(network_id, prev, curr, attributes)):
resp_list = self._update_port_conversion(network_id,
prev,
curr,
attributes)
self._on_port_update_post(network_id,
prev,
curr,
attributes,
resp_list)
return
def _on_port_update_pre(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
return True
def _on_port_update_post(self, network_id,
prev, curr, attributes, resp_list):
logging.debug("%s", self.object_id)
return
# Update Link
def _on_link_update(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
if (self._on_link_update_pre(network_id, prev, curr, attributes)):
resp_list = self._update_link_conversion(network_id,
prev,
curr,
attributes)
self._on_link_update_post(network_id,
prev,
curr,
attributes,
resp_list)
return
def _on_link_update_pre(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
return True
def _on_link_update_post(self, network_id,
prev, curr, attributes, resp_list):
logging.debug("%s", self.object_id)
return
# Update Flow
def _on_flow_update(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
if (self._on_flow_update_pre(network_id, prev, curr, attributes)):
resp_list = self._update_flow_conversion(network_id,
prev,
curr,
attributes)
self._on_flow_update_post(network_id,
prev,
curr,
attributes,
resp_list)
return
def _on_flow_update_pre(self, network_id, prev, curr, attributes):
logging.debug("%s", self.object_id)
return True
def _on_flow_update_post(self, network_id,
prev, curr, attributes, resp_list):
logging.debug("%s", self.object_id)
return
# Delete Node
def _on_node_delete(self, network_id, node_msg):
logging.debug("%s", self.object_id)
if (self._on_node_delete_pre(network_id, node_msg)):
resp_list = self._delete_node_conversion(network_id, node_msg)
self._on_node_delete_post(network_id, node_msg, resp_list)
return
def _on_node_delete_pre(self, network_id, node_msg):
logging.debug("%s", self.object_id)
return True
def _on_node_delete_post(self, network_id, node_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Delete Port
def _on_port_delete(self, network_id, port_msg):
logging.debug("%s", self.object_id)
if (self._on_port_delete_pre(network_id, port_msg)):
resp_list = self._delete_port_conversion(network_id, port_msg)
self._on_port_delete_post(network_id, port_msg, resp_list)
return
def _on_port_delete_pre(self, network_id, port_msg):
logging.debug("%s", self.object_id)
return True
def _on_port_delete_post(self, network_id, port_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Delete Link
def _on_link_delete(self, network_id, link_msg):
logging.debug("%s", self.object_id)
if (self._on_link_delete_pre(network_id, link_msg)):
resp_list = self._delete_link_conversion(network_id, link_msg)
self._on_link_delete_post(network_id, link_msg, resp_list)
return
def _on_link_delete_pre(self, network_id, link_msg):
logging.debug("%s", self.object_id)
return True
def _on_link_delete_post(self, network_id, link_msg, resp_list):
logging.debug("%s", self.object_id)
return
# Delete Flow
def _on_flow_delete(self, network_id, flow_msg):
logging.debug("%s", self.object_id)
if (self._on_flow_delete_pre(network_id, flow_msg)):
resp_list = self._delete_flow_conversion(network_id, flow_msg)
self._on_flow_delete_post(network_id, flow_msg, resp_list)
return
def _on_flow_delete_pre(self, network_id, flow_msg):
logging.debug("%s", self.object_id)
return True
def _on_flow_delete_post(self, network_id, flow_msg, resp_list):
logging.debug("%s", self.object_id)
return
###################################
# Add Conversion
###################################
# Add Node Conversion
def _add_node_conversion(self, network_id, node):
logging.debug("%s", self.object_id)
resp_list = {}
for nw_id in self._conversion_table.get_network(network_id):
if nw_id not in self._network_interfaces:
continue
network_if = self._network_interfaces[nw_id]
resp = network_if.put_node(node)
resp_list[nw_id] = resp
try:
resp_node = Node.create_from_packed(resp.body)
self._conversion_table.add_entry_node(network_id,
node.node_id,
nw_id,
resp_node.node_id)
except KeyError, err:
logging.error("PUT Node Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Add Port Conversion
def _add_port_conversion(self, network_id, port):
logging.debug("%s", self.object_id)
resp_list = {}
for nw_id in self._conversion_table.get_network(network_id):
if nw_id not in self._network_interfaces:
continue
network_if = self._network_interfaces[nw_id]
resp = network_if.put_port(port)
resp_list[nw_id] = resp
try:
resp_port = Port.create_from_packed(resp.body)
self._conversion_table.add_entry_port(network_id,
port.node_id,
port.port_id,
nw_id,
resp_port.node_id,
resp_port.port_id)
except KeyError, err:
logging.error("PUT Port Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Add Link Conversion
def _add_link_conversion(self, network_id, link):
logging.debug("%s", self.object_id)
resp_list = {}
for nw_id in self._conversion_table.get_network(network_id):
if nw_id not in self._network_interfaces:
continue
network_if = self._network_interfaces[nw_id]
resp = network_if.put_link(link)
resp_list[nw_id] = resp
try:
resp_link = Link.create_from_packed(resp.body)
self._conversion_table.add_entry_link(network_id,
link.link_id,
nw_id,
resp_link.link_id)
except KeyError, err:
logging.error("PUT Link Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Add Flow Conversion
def _add_flow_conversion(self, network_id, flow):
logging.debug("%s", self.object_id)
resp_list = {}
for nw_id in self._conversion_table.get_network(network_id):
if nw_id not in self._network_interfaces:
continue
network_if = self._network_interfaces[nw_id]
resp = network_if.put_flow(flow)
resp_list[nw_id] = resp
try:
resp_flow = Flow.create_from_packed(resp.body)
self._conversion_table.add_entry_flow(network_id,
flow.flow_id,
nw_id,
resp_flow.flow_id)
except KeyError, err:
logging.error("PUT Flow Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Add InPacket Conversion
def _add_in_packet_conversion(self, network_id, in_packet):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id not in self._network_interfaces:
return resp_list
network_if = self._network_interfaces[network_id]
del_in_packet = self._del_in_packet(network_if, in_packet.id)
if del_in_packet is None:
logging.error("invalid DELETE Packet.")
return resp_list
# convert in_node.
if del_in_packet.node is None:
return resp_list
pre_node_id = del_in_packet.node
convert_in_node_id_list = self._conversion_table.get_node(network_id,
pre_node_id)
if len(convert_in_node_id_list) == 0:
return resp_list
n_list = convert_in_node_id_list[0].split('::')
del_in_packet.node = n_list[1]
# convert in_port.
if del_in_packet.port is None:
return resp_list
convert_in_port_id_list =\
self._conversion_table.get_port(network_id,
pre_node_id,
del_in_packet.port)
if len(convert_in_port_id_list) == 0:
return resp_list
p_list = convert_in_port_id_list[0].split('::')
del_in_packet.port = p_list[2]
# convert header.
convert_port_id_list = \
self._conversion_table.get_port(network_id,
del_in_packet.header.in_node,
del_in_packet.header.in_port)
if len(convert_port_id_list) == 0:
return resp_list
attr_list = convert_port_id_list[0].split('::')
if attr_list[0] not in self._network_interfaces:
return resp_list
network_if = self._network_interfaces[attr_list[0]]
del_in_packet.header.in_node = attr_list[1]
del_in_packet.header.in_port = attr_list[2]
resp_list[network_if.network_id] =\
network_if.post_in_packet(del_in_packet)
return resp_list
# Add OutPacket Conversion
def _add_out_packet_conversion(self, network_id, out_packet):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id not in self._network_interfaces:
return resp_list
network_if = self._network_interfaces[network_id]
del_out_packet = self._del_out_packet(network_if, out_packet.id)
if del_out_packet is None:
logging.error("invalid DELETE Packet.")
return resp_list
# convert header.
convert_port_id_list = \
self._conversion_table.get_port(network_id,
del_out_packet.header.in_node,
del_out_packet.header.in_port)
if len(convert_port_id_list) == 0:
return resp_list
port_list = convert_port_id_list[0].split('::')
del_out_packet.header.in_node = port_list[1]
del_out_packet.header.in_port = port_list[2]
# convert node.
if del_out_packet.node is None:
return resp_list
pre_node_id = del_out_packet.node
convert_node_id_list = \
self._conversion_table.get_node(network_id,
pre_node_id)
if len(convert_node_id_list) == 0:
return resp_list
n_list = convert_node_id_list[0].split('::')
del_out_packet.node = n_list[1]
# convert ports, ports-except.
ports = del_out_packet.ports
convert_ports = []
except_ports = del_out_packet.ports_except
convert_except_ports = []
if ports is not None and len(ports) > 0:
for port_id in ports:
convert_port_id = self._conversion_table.get_port(network_id,
pre_node_id,
port_id)
if len(convert_port_id) == 0:
return resp_list
p_list = convert_port_id[0].split('::')
convert_ports.append(p_list[2])
elif except_ports is not None and len(except_ports) > 0:
for port_id in except_ports:
convert_port_id = self._conversion_table.get_port(network_id,
pre_node_id,
port_id)
if len(convert_port_id) == 0:
return resp_list
p_list = convert_port_id[0].split('::')
convert_except_ports.append(p_list[2])
if len(convert_ports) > 0:
del_out_packet.ports = convert_ports
elif len(convert_except_ports) > 0:
del_out_packet.ports_except = convert_except_ports
network_if = self._network_interfaces[port_list[0]]
resp_list[network_if.network_id] =\
network_if.post_out_packet(del_out_packet)
return resp_list
###################################
# Update Conversion
###################################
keys_node = ["type", "version", "node_id"]
keys_port = ["type", "version", "node_id", "port_id",
"out_link", "in_link"]
keys_link = ["type", "version", "link_id", "src_node", "src_port",
"dst_node", "dst_port"]
keys_flow = ["type", "version", "flow_id", "owner",
"enabled", "priority", "status"]
attributes_node = ["oper_status", "physical_id", "vendor"]
attributes_port = ["oper_status", "physical_id", "vendor",
"max_bandwidth", "unreserved_bandwidth", "is_boundary"]
attributes_link = ["oper_status", "cost", "latency", "req_latency",
"max_bandwidth", "unreserved_bandwidth",
"req_bandwidth", "establishment_status"]
attributes_flow = ["bandwidth", "req_bandwidth", "latency", "req_latency"]
# Update Node Conversion
def _update_node_conversion(self, network_id,
node_prev, node_curr, attributes):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or node_curr is None:
return resp_list
attributes_list = []
if attributes is None:
attributes_list = []
else:
attributes_list = attributes
# get ignore list
node_ignore_attributes = self.__get_ignore_keys(self.attributes_node,
attributes_list)
for dst_node in self._conversion_table.get_node(network_id,
node_curr.node_id):
node_id = dst_node.split("::")
if node_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[node_id[0]]
# get node
node = network_if.get_node(node_id[1])
if node is None:
continue
# attr copy (curr -> body)
updated = False
curr_attr = node_curr.attributes
for attr_key in curr_attr:
if (attr_key in node_ignore_attributes or
(attr_key in node.attributes and
node.attributes[attr_key] == curr_attr[attr_key])):
continue
updated = True
node.attributes[attr_key] = \
node_curr.attributes[attr_key]
# put node
if updated:
resp = network_if.put_node(node)
try:
resp_node = Node.create_from_packed(resp.body)
resp_list[dst_node] = resp_node
except KeyError, err:
logging.error("PUT Node Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Update Port Conversion
def _update_port_conversion(self, network_id,
port_prev, port_curr, attributes):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or port_curr is None:
return resp_list
attributes_list = []
if attributes is None:
attributes_list = []
else:
attributes_list = attributes
# get ignore list
port_ignore_attributes = self.__get_ignore_keys(self.attributes_port,
attributes_list)
for dst_port in self._conversion_table.get_port(network_id,
port_curr.node_id,
port_curr.port_id):
port_id = dst_port.split("::")
if port_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[port_id[0]]
# get port
port = network_if.get_port(port_id[1], port_id[2])
if port is None:
continue
# attr copy (curr -> body)
updated = False
curr_attr = port_curr.attributes
for attr_key in curr_attr:
if (attr_key in port_ignore_attributes or
(attr_key in port.attributes and
port.attributes[attr_key] == curr_attr[attr_key])):
continue
updated = True
port.attributes[attr_key] = \
port_curr.attributes[attr_key]
# put node
if updated:
resp = network_if.put_port(port)
try:
resp_port = Port.create_from_packed(resp.body)
resp_list[dst_port] = resp_port
except KeyError, err:
logging.error("PUT Port Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Update Link Conversion
def _update_link_conversion(self, network_id,
link_prev, link_curr, attributes):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or link_curr is None:
return resp_list
attributes_list = []
if attributes is None:
attributes_list = []
else:
attributes_list = attributes
# get ignore list
link_ignore_attributes = self.__get_ignore_keys(self.attributes_link,
attributes_list)
for dst_link in self._conversion_table.get_link(network_id,
link_curr.link_id):
link_id = dst_link.split("::")
if link_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[link_id[0]]
# get link
link = network_if.get_link(link_id[1])
if link is None:
continue
# attr copy (curr -> body)
updated = False
curr_attr = link_curr.attributes
for attr_key in curr_attr:
if (attr_key in link_ignore_attributes or
(attr_key in link.attributes and
link.attributes[attr_key] == curr_attr[attr_key])):
continue
updated = True
link.attributes[attr_key] = \
link_curr.attributes[attr_key]
# put link
if updated:
resp = network_if.put_link(link)
try:
resp_link = Link.create_from_packed(resp.body)
resp_list[dst_link] = resp_link
except KeyError, err:
logging.error("PUT Link Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
# Update Flow Conversion
def _update_flow_conversion(self, network_id,
flow_prev, flow_curr, attributes):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or flow_curr is None:
return resp_list
attributes_list = []
if attributes is None:
attributes_list = []
else:
attributes_list = attributes
# get ignore list
flow_ignore_keys = self.__get_ignore_keys(self.keys_flow, attributes)
flow_ignore_attributes = self.__get_ignore_keys(self.attributes_flow,
attributes_list)
for dst_flow in self._conversion_table.get_flow(network_id,
flow_curr.flow_id):
flow_id = dst_flow.split("::")
if flow_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[flow_id[0]]
# get flow
flow = network_if.get_flow(flow_id[1])
if flow is None:
continue
# key copy (curr -> body)
updated = False
if ("enabled" not in flow_ignore_keys
and flow.enabled != flow_curr.enabled):
updated = True
flow.enabled = flow_curr.enabled
if ("priority" not in flow_ignore_keys
and flow.priority != flow_curr.priority):
updated = True
flow.priority = flow_curr.priority
if ("status" not in flow_ignore_keys
and flow.status != flow_curr.status):
updated = True
flow.status = flow_curr.status
# attr copy (curr -> body)
curr_attr = flow_curr.attributes
for attr_key in curr_attr:
if (attr_key in flow_ignore_attributes or
(attr_key in flow.attributes and
flow.attributes[attr_key] == curr_attr[attr_key])):
continue
updated = True
flow.attributes[attr_key] = \
flow_curr.attributes[attr_key]
# put flow
if updated:
resp = network_if.put_flow(flow)
try:
resp_flow = Flow.create_from_packed(resp.body)
resp_list[dst_flow] = resp_flow
except KeyError, err:
logging.error("PUT Flow Invalid Response Message"
+ " KeyError: " + str(err))
return resp_list
###################################
# Delete Conversion
###################################
# Delete Node Conversion
def _delete_node_conversion(self, network_id, node):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or node is None:
return resp_list
dst_nodes = self._conversion_table.get_node(network_id,
node.node_id)
for dst_node in dst_nodes:
node_id = dst_node.split("::")
if node_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[node_id[0]]
resp = network_if.del_node(node_id[1])
resp_node = None
if resp.body is not None:
try:
resp_node = Node.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE Node Invalid Response Message"
+ " KeyError: " + str(err))
return None
resp_list[dst_node] = resp_node
self._conversion_table.del_entry_node(network_id, node.node_id)
return resp_list
# Delete Port Conversion
def _delete_port_conversion(self, network_id, port):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or port is None:
return resp_list
dst_ports = self._conversion_table.get_port(network_id,
port.node_id,
port.port_id)
for dst_port in dst_ports:
port_id = dst_port.split("::")
if port_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[port_id[0]]
resp = network_if.del_port(port_id[1], port_id[2])
resp_port = None
if resp.body is not None:
try:
resp_port = Port.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE Port Invalid Response Message"
+ " KeyError: " + str(err))
return None
resp_list[dst_port] = resp_port
self._conversion_table.del_entry_port(network_id,
port.node_id,
port.port_id)
return resp_list
# Delete Link Conversion
def _delete_link_conversion(self, network_id, link):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or link is None:
return resp_list
dst_links = self._conversion_table.get_link(network_id,
link.link_id)
for dst_link in dst_links:
link_id = dst_link.split("::")
if link_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[link_id[0]]
resp = network_if.del_link(link_id[1])
resp_link = None
if resp.body is not None:
try:
resp_link = Link.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE Link Invalid Response Message"
+ " KeyError: " + str(err))
return None
resp_list[dst_link] = resp_link
self._conversion_table.del_entry_link(network_id, link.link_id)
return resp_list
# Delete Flow Conversion
def _delete_flow_conversion(self, network_id, flow):
logging.debug("%s", self.object_id)
resp_list = {}
if network_id is None or flow is None:
return resp_list
dst_flows = self._conversion_table.get_flow(network_id,
flow.flow_id)
for dst_flow in dst_flows:
flow_id = dst_flow.split("::")
if flow_id[0] not in self._network_interfaces:
continue
network_if = self._network_interfaces[flow_id[0]]
resp = network_if.del_flow(flow_id[1])
resp_flow = None
if resp.body is not None:
try:
resp_flow = Flow.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE Flow Invalid Response Message"
+ " KeyError: " + str(err))
return None
resp_list[dst_flow] = resp_flow
network_if = self._network_interfaces[network_id]
src_flow = network_if.get_flow(flow.flow_id)
if src_flow is not None:
src_flow.status = Flow.Status.TEARDOWN
network_if.put_flow(src_flow)
src_flow = network_if.get_flow(flow.flow_id)
src_flow.status = Flow.Status.NONE
network_if.put_flow(src_flow)
self._conversion_table.del_entry_flow(network_id, flow.flow_id)
return resp_list
###################################
# common method
###################################
def _del_in_packet(self, nw_if, packet_id):
logging.debug("%s", self.object_id)
resp = nw_if.del_in_packet(packet_id)
if resp.is_error(Request.Method.DELETE):
logging.error("invalid DELETE InPacket:" + resp.status_code)
return None
try:
resp_in_packet = InPacket.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE InPacket Invalid Response Message"
+ " KeyError: " + str(err))
return None
return resp_in_packet
def _del_out_packet(self, nw_if, packet_id):
logging.debug("%s", self.object_id)
resp = nw_if.del_out_packet(packet_id)
if resp.is_error(Request.Method.DELETE):
logging.error("invalid DELETE OutPacket:" + resp.status_code)
return None
try:
resp_out_packet = OutPacket.create_from_packed(resp.body)
except KeyError, err:
logging.error("DELETE OutPacket Invalid Response Message"
+ " KeyError: " + str(err))
return None
return resp_out_packet
###################################
# private method
###################################
def __get_ignore_keys(self, all_keys, update_keys):
ignore_keys = copy.deepcopy(all_keys)
reg_attr = re.compile("^attributes::.*")
for update_key in update_keys:
if reg_attr.match(update_key) is not None:
attr_key = update_key.split("::")
if attr_key[1] in ignore_keys:
ignore_keys.remove(attr_key[1])
else:
if update_key in ignore_keys:
ignore_keys.remove(update_key)
logging.debug("ignore key_list:: " + str(ignore_keys))
return ignore_keys
| apache-2.0 | 5,288,327,957,740,424,000 | 37.714939 | 89 | 0.508013 | false |
xoddark/libretro-arxlibertatis | scripts/cpplint.py | 3 | 137622 | #!/usr/bin/env python
#
# Note: this file has been adjusted to fit the Arx Libertatis project:
# - adjusted include guard style
# - hacked so that build/include doesn't complain about #include "Configure.h" lines
# - Allow lines that are only whitespace.
# - Remove 80-char line limit, keep 100 char limit.
# - No space after if et al.
# - Warn if a tab follows a non-tab character.
# - Don't require two spaces between code and comments
# - Warn if spaces are used for identation.
# - Allow //! and //!< comments
# - Allow struct name { typedef a type; }; one-liners
# - Allow #ifdef BOOST_PP_IS_ITERATING + #endif in place of header guards
# - C++ source files are named .cpp, not .cc
#
# Copyright (c) 2011-1013 Arx Libertatis Team (see the AUTHORS file)
# Copyright (c) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Here are some issues that I've had people identify in my code during reviews,
# that I think are possible to flag automatically in a lint tool. If these were
# caught by lint, it would save time both for myself and that of my reviewers.
# Most likely, some of these are beyond the scope of the current lint framework,
# but I think it is valuable to retain these wish-list items even if they cannot
# be immediately implemented.
#
# Suggestions
# -----------
# - Check for no 'explicit' for multi-arg ctor
# - Check for boolean assign RHS in parens
# - Check for ctor initializer-list colon position and spacing
# - Check that if there's a ctor, there should be a dtor
# - Check accessors that return non-pointer member variables are
# declared const
# - Check accessors that return non-const pointer member vars are
# *not* declared const
# - Check for using public includes for testing
# - Check for spaces between brackets in one-line inline method
# - Check for no assert()
# - Check for spaces surrounding operators
# - Check for 0 in pointer context (should be NULL)
# - Check for 0 in char context (should be '\0')
# - Check for camel-case method name conventions for methods
# that are not simple inline getters and setters
# - Check that base classes have virtual destructors
# put " // namespace" after } that closes a namespace, with
# namespace's name after 'namespace' if it is named.
# - Do not indent namespace contents
# - Avoid inlining non-trivial constructors in header files
# include base/basictypes.h if DISALLOW_EVIL_CONSTRUCTORS is used
# - Check for old-school (void) cast for call-sites of functions
# ignored return value
# - Check gUnit usage of anonymous namespace
# - Check for class declaration order (typedefs, consts, enums,
# ctor(s?), dtor, friend declarations, methods, member vars)
#
"""Does google-lint on c++ files.
The goal of this script is to identify places in the code that *may*
be in non-compliance with google style. It does not attempt to fix
up these problems -- the point is to educate. It does also not
attempt to find all problems, or to ensure that everything it does
find is legitimately a problem.
In particular, we can get very confused by /* and // inside strings!
We do a small hack, which is to ignore //'s with "'s after them on the
same line, but it is far from perfect (in either direction).
"""
import codecs
import getopt
import math # for log
import os
import re
import sre_compile
import string
import sys
import unicodedata
EXTENSIONS = ['c', 'cc', 'cpp', 'cxx', 'c++',
'h', 'hpp', 'hxx', 'h++']
HEADER_EXTENSIONS = ['h', 'hpp', 'hxx', 'h++']
_USAGE = """
Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
[--counting=total|toplevel|detailed]
[--project=name]
<file> [file] ...
The style guidelines this tries to follow are those in
http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
Every problem is given a confidence score from 1-5, with 5 meaning we are
certain of the problem, and 1 meaning it could be a legitimate construct.
This will miss some errors, and is not a substitute for a code review.
To suppress false-positive errors of a certain category, add a
'NOLINT(category)' comment to the line. NOLINT or NOLINT(*)
suppresses errors of all categories on that line.
The files passed in will be linted; at least one file must be provided.
Linted extensions are %s. Other file types will be ignored.
Flags:
output=vs7
By default, the output is formatted to ease emacs parsing. Visual Studio
compatible output (vs7) may also be used. Other formats are unsupported.
project=name
Project name to use as include guard prefix.
verbose=#
Specify a number 0-5 to restrict errors to certain verbosity levels.
filter=-x,+y,...
Specify a comma-separated list of category-filters to apply: only
error messages whose category names pass the filters will be printed.
(Category names are printed with the message and look like
"[whitespace/indent]".) Filters are evaluated left to right.
"-FOO" and "FOO" means "do not print categories that start with FOO".
"+FOO" means "do print categories that start with FOO".
Examples: --filter=-whitespace,+whitespace/braces
--filter=whitespace,runtime/printf,+runtime/printf_format
--filter=-,+build/include_what_you_use
To see a list of all the categories used in cpplint, pass no arg:
--filter=
counting=total|toplevel|detailed
The total number of errors found is always printed. If
'toplevel' is provided, then the count of errors in each of
the top-level categories like 'build' and 'whitespace' will
also be printed. If 'detailed' is provided, then a count
is provided for each category like 'build/class'.
""" % (EXTENSIONS)
# We categorize each error message we print. Here are the categories.
# We want an explicit list so we can list them all in cpplint --filter=.
# If you add a new error message with a new category, add it to the list
# here! cpplint_unittest.py should tell you if you forget to do this.
# \ used for clearer layout -- pylint: disable-msg=C6013
_ERROR_CATEGORIES = [
'build/class',
'build/deprecated',
'build/endif_comment',
'build/explicit_make_pair',
'build/forward_decl',
'build/header_guard',
'build/include',
'build/include_alpha',
'build/include_order',
'build/include_what_you_use',
'build/namespaces',
'build/printf_format',
'build/storage_class',
'legal/copyright',
'readability/braces',
'readability/casting',
'readability/check',
'readability/constructors',
'readability/fn_size',
'readability/function',
'readability/multiline_comment',
'readability/multiline_string',
'readability/nolint',
'readability/streams',
'readability/todo',
'readability/utf8',
'runtime/arrays',
'runtime/casting',
'runtime/explicit',
'runtime/int',
'runtime/init',
'runtime/invalid_increment',
'runtime/member_string_references',
'runtime/memset',
'runtime/operator',
'runtime/printf',
'runtime/printf_format',
'runtime/references',
'runtime/rtti',
'runtime/sizeof',
'runtime/string',
'runtime/threadsafe_fn',
'runtime/virtual',
'whitespace/align_tab'
'whitespace/blank_line',
'whitespace/braces',
'whitespace/carriage-return',
'whitespace/comma',
'whitespace/comments',
'whitespace/end_of_line',
'whitespace/ending_newline',
'whitespace/indent',
'whitespace/ident_space',
'whitespace/labels',
'whitespace/line_length',
'whitespace/newline',
'whitespace/operators',
'whitespace/templates',
'whitespace/parens',
'whitespace/semicolon',
'whitespace/tab',
'whitespace/todo'
]
# The default state of the category filter. This is overrided by the --filter=
# flag. By default all errors are on, so only add here categories that should be
# off by default (i.e., categories that must be enabled by the --filter= flags).
# All entries here should start with a '-' or '+', as in the --filter= flag.
_DEFAULT_FILTERS = ['-build/include_alpha']
# We used to check for high-bit characters, but after much discussion we
# decided those were OK, as long as they were in UTF-8 and didn't represent
# hard-coded international strings, which belong in a separate i18n file.
# Headers that we consider STL headers.
_STL_HEADERS = frozenset([
'algobase.h', 'algorithm', 'alloc.h', 'bitset', 'deque', 'exception',
'function.h', 'functional', 'hash_map', 'hash_map.h', 'hash_set',
'hash_set.h', 'iterator', 'list', 'list.h', 'map', 'memory', 'new',
'pair.h', 'pthread_alloc', 'queue', 'set', 'set.h', 'sstream', 'stack',
'stl_alloc.h', 'stl_relops.h', 'type_traits.h',
'utility', 'vector', 'vector.h',
])
# Non-STL C++ system headers.
_CPP_HEADERS = frozenset([
'algo.h', 'builtinbuf.h', 'bvector.h', 'cassert', 'cctype',
'cerrno', 'cfloat', 'ciso646', 'climits', 'clocale', 'cmath',
'complex', 'complex.h', 'csetjmp', 'csignal', 'cstdarg', 'cstddef',
'cstdio', 'cstdlib', 'cstring', 'ctime', 'cwchar', 'cwctype',
'defalloc.h', 'deque.h', 'editbuf.h', 'exception', 'fstream',
'fstream.h', 'hashtable.h', 'heap.h', 'indstream.h', 'iomanip',
'iomanip.h', 'ios', 'iosfwd', 'iostream', 'iostream.h', 'istream',
'istream.h', 'iterator.h', 'limits', 'map.h', 'multimap.h', 'multiset.h',
'numeric', 'ostream', 'ostream.h', 'parsestream.h', 'pfstream.h',
'PlotFile.h', 'procbuf.h', 'pthread_alloc.h', 'rope', 'rope.h',
'ropeimpl.h', 'SFile.h', 'slist', 'slist.h', 'stack.h', 'stdexcept',
'stdiostream.h', 'streambuf.h', 'stream.h', 'strfile.h', 'string',
'strstream', 'strstream.h', 'tempbuf.h', 'tree.h', 'typeinfo', 'valarray',
])
# Assertion macros. These are defined in base/logging.h and
# testing/base/gunit.h. Note that the _M versions need to come first
# for substring matching to work.
_CHECK_MACROS = [
'DCHECK', 'CHECK',
'EXPECT_TRUE_M', 'EXPECT_TRUE',
'ASSERT_TRUE_M', 'ASSERT_TRUE',
'EXPECT_FALSE_M', 'EXPECT_FALSE',
'ASSERT_FALSE_M', 'ASSERT_FALSE',
]
# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
('>=', 'GE'), ('>', 'GT'),
('<=', 'LE'), ('<', 'LT')]:
_CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
_CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
_CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
_CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
_CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement
_CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement
for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
('>=', 'LT'), ('>', 'LE'),
('<=', 'GT'), ('<', 'GE')]:
_CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
_CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
_CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement
_CHECK_REPLACEMENT['ASSERT_FALSE_M'][op] = 'ASSERT_%s_M' % inv_replacement
# These constants define types of headers for use with
# _IncludeState.CheckNextIncludeOrder().
_C_SYS_HEADER = 1
_CPP_SYS_HEADER = 2
_LIKELY_MY_HEADER = 3
_POSSIBLE_MY_HEADER = 4
_OTHER_HEADER = 5
_regexp_compile_cache = {}
# Finds occurrences of NOLINT or NOLINT(...).
_RE_SUPPRESSION = re.compile(r'\bNOLINT\b(\([^)]*\))?')
# {str, set(int)}: a map from error categories to sets of linenumbers
# on which those errors are expected and should be suppressed.
_error_suppressions = {}
if sys.version_info < (3,):
def u(x):
return codecs.unicode_escape_decode(x)[0]
TEXT_TYPE = unicode
# BINARY_TYPE = str
range = xrange
itervalues = dict.itervalues
iteritems = dict.iteritems
else:
def u(x):
return x
TEXT_TYPE = str
# BINARY_TYPE = bytes
itervalues = dict.values
iteritems = dict.items
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
# FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
matched = _RE_SUPPRESSION.search(raw_line)
if matched:
category = matched.group(1)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(linenum)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(linenum)
else:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
def ResetNolintSuppressions():
"Resets the set of NOLINT suppressions to empty."
_error_suppressions.clear()
def IsErrorSuppressedByNolint(category, linenum):
"""Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment.
"""
return (linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set()))
def Match(pattern, s):
"""Matches the string with the pattern, caching the compiled regexp."""
# The regexp compilation caching is inlined in both Match and Search for
# performance reasons; factoring it out into a separate function turns out
# to be noticeably expensive.
if not pattern in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].match(s)
def Search(pattern, s):
"""Searches the string for the pattern, caching the compiled regexp."""
if not pattern in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].search(s)
class _IncludeState(dict):
"""Tracks line numbers for includes, and the order in which includes appear.
As a dict, an _IncludeState object serves as a mapping between include
filename and line number on which that file was included.
Call CheckNextIncludeOrder() once for each header in the file, passing
in the type constants defined above. Calls in an illegal order will
raise an _IncludeError with an appropriate error message.
"""
# self._section will move monotonically through this set. If it ever
# needs to move backwards, CheckNextIncludeOrder will raise an error.
_INITIAL_SECTION = 0
_MY_H_SECTION = 1
_C_SECTION = 2
_CPP_SECTION = 3
_OTHER_H_SECTION = 4
_TYPE_NAMES = {
_C_SYS_HEADER: 'C system header',
_CPP_SYS_HEADER: 'C++ system header',
_LIKELY_MY_HEADER: 'header this file implements',
_POSSIBLE_MY_HEADER: 'header this file may implement',
_OTHER_HEADER: 'other header',
}
_SECTION_NAMES = {
_INITIAL_SECTION: "... nothing. (This can't be an error.)",
_MY_H_SECTION: 'a header this file implements',
_C_SECTION: 'C system header',
_CPP_SECTION: 'C++ system header',
_OTHER_H_SECTION: 'other header',
}
def __init__(self):
dict.__init__(self)
# The name of the current section.
self._section = self._INITIAL_SECTION
# The path of last found header.
self._last_header = ''
def CanonicalizeAlphabeticalOrder(self, header_path):
"""Returns a path canonicalized for alphabetical comparison.
- replaces "-" with "_" so they both cmp the same.
- removes '-inl' since we don't require them to be after the main header.
- lowercase everything, just in case.
Args:
header_path: Path to be canonicalized.
Returns:
Canonicalized path.
"""
return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
def IsInAlphabeticalOrder(self, header_path):
"""Check if a header is in alphabetical order with the previous header.
Args:
header_path: Header to be checked.
Returns:
Returns true if the header is in alphabetical order.
"""
canonical_header = self.CanonicalizeAlphabeticalOrder(header_path)
if self._last_header > canonical_header:
return False
self._last_header = canonical_header
return True
def CheckNextIncludeOrder(self, header_type):
"""Returns a non-empty error message if the next header is out of order.
This function also updates the internal state to be ready to check
the next include.
Args:
header_type: One of the _XXX_HEADER constants defined above.
Returns:
The empty string if the header is in the right order, or an
error message describing what's wrong.
"""
error_message = ('Found %s after %s' %
(self._TYPE_NAMES[header_type],
self._SECTION_NAMES[self._section]))
last_section = self._section
if header_type == _C_SYS_HEADER:
if self._section <= self._C_SECTION:
self._section = self._C_SECTION
else:
self._last_header = ''
return error_message
elif header_type == _CPP_SYS_HEADER:
if self._section <= self._CPP_SECTION:
self._section = self._CPP_SECTION
else:
self._last_header = ''
return error_message
elif header_type == _LIKELY_MY_HEADER:
if self._section <= self._MY_H_SECTION:
self._section = self._MY_H_SECTION
else:
self._section = self._OTHER_H_SECTION
elif header_type == _POSSIBLE_MY_HEADER:
if self._section <= self._MY_H_SECTION:
self._section = self._MY_H_SECTION
else:
# This will always be the fallback because we're not sure
# enough that the header is associated with this file.
self._section = self._OTHER_H_SECTION
else:
assert header_type == _OTHER_HEADER
self._section = self._OTHER_H_SECTION
if last_section != self._section:
self._last_header = ''
return ''
class _CppLintState(object):
"""Maintains module-wide state.."""
def __init__(self):
self.project_name = 'src'
self.verbose_level = 1 # global setting.
self.error_count = 0 # global count of reported errors
# filters to apply when emitting error messages
self.filters = _DEFAULT_FILTERS[:]
self.counting = 'total' # In what way are we counting errors?
self.errors_by_category = {} # string to int dict storing error counts
# output format:
# "emacs" - format that emacs can parse (default)
# "vs7" - format that Microsoft Visual Studio 7 can parse
self.output_format = 'emacs'
def SetOutputFormat(self, output_format):
"""Sets the output format for errors."""
self.output_format = output_format
def SetVerboseLevel(self, level):
"""Sets the module's verbosity, and returns the previous setting."""
last_verbose_level = self.verbose_level
self.verbose_level = level
return last_verbose_level
def SetProjectName(self, name):
"""Sets the module's verbosity, and returns the previous setting."""
last_project_name = self.project_name
self.project_name = name
return last_project_name
def SetCountingStyle(self, counting_style):
"""Sets the module's counting options."""
self.counting = counting_style
def SetFilters(self, filters):
"""Sets the error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "+whitespace/indent").
Each filter should start with + or -; else we die.
Raises:
ValueError: The comma-separated filters did not all start with '+' or '-'.
E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
"""
# Default filters always have less priority than the flag ones.
self.filters = _DEFAULT_FILTERS[:]
for filt in filters.split(','):
clean_filt = filt.strip()
if clean_filt:
self.filters.append(clean_filt)
for filt in self.filters:
if not (filt.startswith('+') or filt.startswith('-')):
raise ValueError('Every filter in --filters must start with + or -'
' (%s does not)' % filt)
def ResetErrorCounts(self):
"""Sets the module's error statistic back to zero."""
self.error_count = 0
self.errors_by_category = {}
def IncrementErrorCount(self, category):
"""Bumps the module's error statistic."""
self.error_count += 1
if self.counting in ('toplevel', 'detailed'):
if self.counting != 'detailed':
category = category.split('/')[0]
if category not in self.errors_by_category:
self.errors_by_category[category] = 0
self.errors_by_category[category] += 1
def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
for category, count in iteritems(self.errors_by_category):
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count)
_cpplint_state = _CppLintState()
def _OutputFormat():
"""Gets the module's output format."""
return _cpplint_state.output_format
def _SetOutputFormat(output_format):
"""Sets the module's output format."""
_cpplint_state.SetOutputFormat(output_format)
def _VerboseLevel():
"""Returns the module's verbosity setting."""
return _cpplint_state.verbose_level
def _SetVerboseLevel(level):
"""Sets the module's verbosity, and returns the previous setting."""
return _cpplint_state.SetVerboseLevel(level)
def _ProjectName():
"""Returns the module's project name setting."""
return _cpplint_state.project_name
def _SetProjectName(name):
"""Sets the module's project name, and returns the previous setting."""
return _cpplint_state.SetProjectName(name)
def _SetCountingStyle(level):
"""Sets the module's counting options."""
_cpplint_state.SetCountingStyle(level)
def _Filters():
"""Returns the module's list of output filters, as a list."""
return _cpplint_state.filters
def _SetFilters(filters):
"""Sets the module's error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "whitespace/indent").
Each filter should start with + or -; else we die.
"""
_cpplint_state.SetFilters(filters)
class _FunctionState(object):
"""Tracks current function name and the number of lines in its body."""
_NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc.
_TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER.
def __init__(self):
self.in_a_function = False
self.lines_in_function = 0
self.current_function = ''
def Begin(self, function_name):
"""Start analyzing function body.
Args:
function_name: The name of the function being tracked.
"""
self.in_a_function = True
self.lines_in_function = 0
self.current_function = function_name
def Count(self):
"""Count line in current function body."""
if self.in_a_function:
self.lines_in_function += 1
def Check(self, error, filename, linenum):
"""Report if too many lines in function body.
Args:
error: The function to call with any errors found.
filename: The name of the current file.
linenum: The number of the line to check.
"""
if Match(r'T(EST|est)', self.current_function):
base_trigger = self._TEST_TRIGGER
else:
base_trigger = self._NORMAL_TRIGGER
trigger = base_trigger * 2**_VerboseLevel()
if self.lines_in_function > trigger:
error_level = int(math.log(self.lines_in_function / base_trigger, 2))
# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
if error_level > 5:
error_level = 5
error(filename, linenum, 'readability/fn_size', error_level,
'Small and focused functions are preferred:'
' %s has %d non-comment lines'
' (error triggered by exceeding %d lines).' % (
self.current_function, self.lines_in_function, trigger))
def End(self):
"""Stop analyzing function body."""
self.in_a_function = False
class _IncludeError(Exception):
"""Indicates a problem with the include order in a file."""
pass
class FileInfo:
"""Provides utility functions for filenames.
FileInfo provides easy access to the components of a file's path
relative to the project root.
"""
def __init__(self, filename):
self._filename = filename
def FullName(self):
"""Make Windows paths like Unix."""
return os.path.abspath(self._filename).replace('\\', '/')
def RepositoryName(self):
"""FullName after removing the local path to the repository.
If we have a real absolute path name here we can try to do something smart:
detecting the root of the checkout and truncating /path/to/checkout from
the name so that we get header guards that don't include things like
"C:\Documents and Settings\..." or "/home/username/..." in them and thus
people on different computers who have checked the source out to different
locations won't see bogus errors.
"""
fullname = self.FullName()
if os.path.exists(fullname):
project_dir = os.path.dirname(fullname)
if os.path.exists(os.path.join(project_dir, ".svn")):
# If there's a .svn file in the current directory, we recursively look
# up the directory tree for the top of the SVN checkout
root_dir = project_dir
one_up_dir = os.path.dirname(root_dir)
while os.path.exists(os.path.join(one_up_dir, ".svn")):
root_dir = os.path.dirname(root_dir)
one_up_dir = os.path.dirname(one_up_dir)
prefix = os.path.commonprefix([root_dir, project_dir])
return fullname[len(prefix) + 1:]
# Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
# searching up from the current path.
root_dir = os.path.dirname(fullname)
while (root_dir != os.path.dirname(root_dir) and
os.path.basename(root_dir) != "src" and
not os.path.exists(os.path.join(root_dir, ".git")) and
not os.path.exists(os.path.join(root_dir, ".hg")) and
not os.path.exists(os.path.join(root_dir, ".svn"))):
root_dir = os.path.dirname(root_dir)
if (os.path.basename(root_dir) == "src" or
os.path.exists(os.path.join(root_dir, ".git")) or
os.path.exists(os.path.join(root_dir, ".hg")) or
os.path.exists(os.path.join(root_dir, ".svn"))):
prefix = os.path.commonprefix([root_dir, project_dir])
return fullname[len(prefix) + 1:]
# Don't know what to do; header guard warnings may be wrong...
return fullname
def Split(self):
"""Splits the file into the directory, basename, and extension.
For 'chrome/browser/browser.cpp', Split() would
return ('chrome/browser', 'browser', '.cpp')
Returns:
A tuple of (directory, basename, extension).
"""
googlename = self.RepositoryName()
project, rest = os.path.split(googlename)
return (project,) + os.path.splitext(rest)
def BaseName(self):
"""File base name - text after the final slash, before the final period."""
return self.Split()[1]
def Extension(self):
"""File extension - text following the final period."""
return self.Split()[2]
def NoExtension(self):
"""File has no source file extension."""
return '/'.join(self.Split()[0:2])
def IsSource(self):
"""File has a source file extension."""
return self.Extension()[1:] in EXTENSIONS
def _ShouldPrintError(category, confidence, linenum):
"""If confidence >= verbose, category passes filter and is not suppressed."""
# There are three ways we might decide not to print an error message:
# a "NOLINT(category)" comment appears in the source,
# the verbosity level isn't high enough, or the filters filter it out.
if IsErrorSuppressedByNolint(category, linenum):
return False
if confidence < _cpplint_state.verbose_level:
return False
is_filtered = False
for one_filter in _Filters():
if one_filter.startswith('-'):
if category.startswith(one_filter[1:]):
is_filtered = True
elif one_filter.startswith('+'):
if category.startswith(one_filter[1:]):
is_filtered = False
else:
assert False # should have been checked for in SetFilter.
if is_filtered:
return False
return True
def Error(filename, linenum, category, confidence, message):
"""Logs the fact we've found a lint error.
We log where the error was found, and also our confidence in the error,
that is, how certain we are this is a legitimate style regression, and
not a misidentification or a use that's sometimes justified.
False positives can be suppressed by the use of
"cpplint(category)" comments on the offending line. These are
parsed into _error_suppressions.
Args:
filename: The name of the file containing the error.
linenum: The number of the line containing the error.
category: A string used to describe the "category" this bug
falls under: "whitespace", say, or "runtime". Categories
may have a hierarchy separated by slashes: "whitespace/indent".
confidence: A number from 1-5 representing a confidence score for
the error, with 5 meaning that we are certain of the problem,
and 1 meaning that it could be a legitimate construct.
message: The error message.
"""
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
sys.stderr.write('%s(%s): %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
else:
m = '%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence)
sys.stderr.write(m)
# Matches standard C++ escape esequences per 2.13.2.3 of the C++ standard.
_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
# Matches strings. Escape codes should already be removed by ESCAPES.
_RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES = re.compile(r'"[^"]*"')
# Matches characters. Escape codes should already be removed by ESCAPES.
_RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES = re.compile(r"'.'")
# Matches multi-line C++ comments.
# This RE is a little bit more complicated than one might expect, because we
# have to take care of space removals tools so we can handle comments inside
# statements better.
# The current rule is: We only clear spaces from both sides when we're at the
# end of the line. Otherwise, we try to remove spaces from the right side,
# if this doesn't work we try on left side but only if there's a non-character
# on the right.
_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
r"""(\s*/\*.*\*/\s*$|
/\*.*\*/\s+|
\s+/\*.*\*/(?=\W)|
/\*.*\*/)""", re.VERBOSE)
def IsCppString(line):
"""Does line terminate so, that the next symbol is in string constant.
This function does not consider single-line nor multi-line comments.
Args:
line: is a partial line of code starting from the 0..n.
Returns:
True, if next character appended to 'line' is inside a
string constant.
"""
line = line.replace(r'\\', 'XX') # after this, \\" does not match to \"
return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
def FindNextMultiLineCommentStart(lines, lineix):
"""Find the beginning marker for a multiline comment."""
while lineix < len(lines):
if lines[lineix].strip().startswith('/*'):
# Only return this marker if the comment goes beyond this line
if lines[lineix].strip().find('*/', 2) < 0:
return lineix
lineix += 1
return len(lines)
def FindNextMultiLineCommentEnd(lines, lineix):
"""We are inside a comment, find the end marker."""
while lineix < len(lines):
if lines[lineix].strip().endswith('*/'):
return lineix
lineix += 1
return len(lines)
def RemoveMultiLineCommentsFromRange(lines, begin, end):
"""Clears a range of lines for multi-line comments."""
# Having // dummy comments makes the lines non-empty, so we will not get
# unnecessary blank line warnings later in the code.
for i in range(begin, end):
lines[i] = re.search('^\t*', lines[i]).group(0) + '// dummy'
def RemoveMultiLineComments(filename, lines, error):
"""Removes multiline (c-style) comments from lines."""
lineix = 0
while lineix < len(lines):
lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
if lineix_begin >= len(lines):
return
lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
if lineix_end >= len(lines):
error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
'Could not find end of multi-line comment')
return
RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
lineix = lineix_end + 1
def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
class CleansedLines(object):
"""Holds 3 copies of all lines with different preprocessing applied to them.
1) elided member contains lines without strings and comments,
2) lines member contains lines without comments, and
3) raw member contains all the lines without processing.
All these three members are of <type 'list'>, and of the same length.
"""
def __init__(self, lines):
self.elided = []
self.lines = []
self.raw_lines = lines
self.num_lines = len(lines)
for linenum in range(len(lines)):
self.lines.append(CleanseComments(lines[linenum]))
elided = self._CollapseStrings(lines[linenum])
self.elided.append(CleanseComments(elided))
def NumLines(self):
"""Returns the number of lines represented."""
return self.num_lines
@staticmethod
def _CollapseStrings(elided):
"""Collapses strings and chars on a line to simple "" or '' blocks.
We nix strings first so we're not fooled by text like '"http://"'
Args:
elided: The line being processed.
Returns:
The line with collapsed strings.
"""
if not _RE_PATTERN_INCLUDE.match(elided):
# Remove escaped characters first to make quote/single quote collapsing
# basic. Things that look like escaped characters shouldn't occur
# outside of strings and chars.
elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
elided = _RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES.sub("''", elided)
elided = _RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES.sub('""', elided)
return elided
def CloseExpression(clean_lines, linenum, pos):
"""If input points to ( or { or [, finds the position that closes it.
If lines[linenum][pos] points to a '(' or '{' or '[', finds the
linenum/pos that correspond to the closing of the expression.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
pos: A position on the line.
Returns:
A tuple (line, linenum, pos) pointer *past* the closing brace, or
(line, len(lines), -1) if we never find a close. Note we ignore
strings and comments when matching; and the line we return is the
'cleansed' line at linenum.
"""
line = clean_lines.elided[linenum]
startchar = line[pos]
if startchar not in '({[':
return (line, clean_lines.NumLines(), -1)
if startchar == '(': endchar = ')'
if startchar == '[': endchar = ']'
if startchar == '{': endchar = '}'
num_open = line.count(startchar) - line.count(endchar)
while linenum < clean_lines.NumLines() and num_open > 0:
linenum += 1
line = clean_lines.elided[linenum]
num_open += line.count(startchar) - line.count(endchar)
# OK, now find the endchar that actually got us back to even
endpos = len(line)
while num_open >= 0:
endpos = line.rfind(')', 0, endpos)
num_open -= 1 # chopped off another )
return (line, linenum, endpos + 1)
def CheckForCopyright(filename, lines, error):
"""Logs an error if no Copyright message appears at the top of the file."""
# We'll say it should occur by line 10. Don't forget there's a
# dummy line at the front.
for line in range(1, min(len(lines), 11)):
if re.search(r'Copyright', lines[line], re.I):
break
else: # means no copyright line was found
error(filename, 0, 'legal/copyright', 5,
'No copyright message found. '
'You should have a line: "Copyright [year] <Copyright Owner>"')
def GetHeaderGuardCPPVariable(filename):
"""Returns the CPP variable that should be used as a header guard.
Args:
filename: The name of a C++ header file.
Returns:
The CPP variable that should be used as a header guard in the
named file.
"""
# Restores original filename in case that cpplint is invoked from Emacs's
# flymake.
filename = re.sub(r'_flymake\.h$', '.h', filename)
fileinfo = FileInfo(filename)
return (_ProjectName() + '_' + re.sub(r'[-./\s]', '_', fileinfo.RepositoryName())).upper()
def CheckForHeaderGuard(filename, lines, error):
"""Checks that the file contains a header guard.
Logs an error if no #ifndef header guard is present. For other
headers, checks that the full pathname is used.
Args:
filename: The name of the C++ header file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
cppvar = GetHeaderGuardCPPVariable(filename)
ifndef = None
ifndef_linenum = 0
define = None
endif = None
endif_linenum = 0
boostppiterating = None
for linenum, line in enumerate(lines):
linesplit = line.split()
if len(linesplit) >= 2:
if not ifndef and linesplit[0] == '#ifdef' and linesplit[1] == 'BOOST_PP_IS_ITERATING':
boostppiterating = linesplit[1]
# find the first occurrence of #ifndef and #define, save arg
if not boostppiterating and not ifndef and linesplit[0] == '#ifndef':
# set ifndef to the header guard presented on the #ifndef line.
ifndef = linesplit[1]
ifndef_linenum = linenum
if not boostppiterating and not define and linesplit[0] == '#define':
define = linesplit[1]
# find the last occurrence of #endif, save entire line
if line.startswith('#endif'):
endif = line
endif_linenum = linenum
if not boostppiterating and not ifndef:
error(filename, 0, 'build/header_guard', 5,
'No #ifndef header guard found, suggested CPP variable is: %s' %
cppvar)
return
if not boostppiterating and not define:
error(filename, 0, 'build/header_guard', 5,
'No #define header guard found, suggested CPP variable is: %s' %
cppvar)
return
# The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
# for backward compatibility.
if not boostppiterating and ifndef != cppvar:
error_level = 0
if ifndef != cppvar + '_':
error_level = 5
ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum,
error)
error(filename, ifndef_linenum, 'build/header_guard', error_level,
'#ifndef header guard has wrong style, please use: %s' % cppvar)
if not boostppiterating and define != ifndef:
error(filename, 0, 'build/header_guard', 5,
'#ifndef and #define don\'t match, suggested CPP variable is: %s' %
cppvar)
return
if not boostppiterating and endif != ('#endif // %s' % cppvar):
error_level = 0
if endif != ('#endif // %s' % (cppvar + '_')):
error_level = 5
ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
error)
error(filename, endif_linenum, 'build/header_guard', error_level,
'#endif line should be "#endif // %s"' % cppvar)
if boostppiterating and endif != ('#endif // %s' % boostppiterating):
error_level = 5
ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
error)
error(filename, endif_linenum, 'build/header_guard', error_level,
'#endif line should be "#endif // %s"' % boostppiterating)
def CheckForUnicodeReplacementCharacters(filename, lines, error):
"""Logs an error for each line containing Unicode replacement characters.
These indicate that either the file contained invalid UTF-8 (likely)
or Unicode replacement characters (which it shouldn't). Note that
it's possible for this to throw off line numbering if the invalid
UTF-8 occurred adjacent to a newline.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
for linenum, line in enumerate(lines):
if u('\ufffd') in line:
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
def CheckForNewlineAtEOF(filename, lines, error):
"""Logs an error if there is no newline char at the end of the file.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
# The array lines() was created by adding two newlines to the
# original file (go figure), then splitting on \n.
# To verify that the file ends in \n, we just have to make sure the
# last-but-two element of lines() exists and is empty.
if len(lines) < 3 or lines[-2]:
error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
'Could not find a newline character at the end of the file.')
def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
"""Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Remove all \\ (escaped backslashes) from the line. They are OK, and the
# second (escaped) slash may trigger later \" detection erroneously.
line = line.replace('\\\\', '')
if line.count('/*') > line.count('*/'):
error(filename, linenum, 'readability/multiline_comment', 5,
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. '
'Consider replacing these with //-style comments, '
'with #if 0...#endif, '
'or with more clearly structured multi-line comments.')
if (line.count('"') - line.count('\\"')) % 2:
error(filename, linenum, 'readability/multiline_string', 5,
'Multi-line string ("...") found. This lint script doesn\'t '
'do well with such strings, and may give bogus warnings. They\'re '
'ugly and unnecessary, and you should use concatenation instead".')
threading_list = (
('asctime(', 'asctime_r('),
('ctime(', 'ctime_r('),
('getgrgid(', 'getgrgid_r('),
('getgrnam(', 'getgrnam_r('),
('getlogin(', 'getlogin_r('),
('getpwnam(', 'getpwnam_r('),
('getpwuid(', 'getpwuid_r('),
('gmtime(', 'gmtime_r('),
('localtime(', 'localtime_r('),
('rand(', 'rand_r('),
('readdir(', 'readdir_r('),
('strtok(', 'strtok_r('),
('ttyname(', 'ttyname_r('),
)
def CheckPosixThreading(filename, clean_lines, linenum, error):
"""Checks for calls to thread-unsafe functions.
Much code has been originally written without consideration of
multi-threading. Also, engineers are relying on their old experience;
they have learned posix before threading extensions were added. These
tests guide the engineers to use thread-safe functions (when using
posix directly).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
if '/* not thread-safe */' in clean_lines.raw_lines[linenum]:
return
line = clean_lines.elided[linenum]
for single_thread_function, multithread_safe_function in threading_list:
ix = line.find(single_thread_function)
# Comparisons made explicit for clarity -- pylint: disable-msg=C6403
if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
line[ix - 1] not in ('_', '.', '>'))):
error(filename, linenum, 'runtime/threadsafe_fn', 2,
'Consider using ' + multithread_safe_function +
'...) instead of ' + single_thread_function +
'...) for improved thread safety.')
# Matches invalid increment: *count++, which moves pointer instead of
# incrementing a value.
_RE_PATTERN_INVALID_INCREMENT = re.compile(
r'^\s*\*\w+(\+\+|--);')
def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).')
class _ClassInfo(object):
"""Stores information about a class."""
def __init__(self, name, clean_lines, linenum):
self.name = name
self.linenum = linenum
self.seen_open_brace = False
self.is_derived = False
self.virtual_method_linenumber = None
self.has_virtual_destructor = False
self.brace_depth = 0
# Try to find the end of the class. This will be confused by things like:
# class A {
# } *x = { ...
#
# But it's still good enough for CheckSectionSpacing.
self.last_line = 0
depth = 0
for i in range(linenum, clean_lines.NumLines()):
line = clean_lines.lines[i]
depth += line.count('{') - line.count('}')
if not depth:
self.last_line = i
break
class _ClassState(object):
"""Holds the current state of the parse relating to class declarations.
It maintains a stack of _ClassInfos representing the parser's guess
as to the current nesting of class declarations. The innermost class
is at the top (back) of the stack. Typically, the stack will either
be empty or have exactly one entry.
"""
def __init__(self):
self.classinfo_stack = []
def CheckFinished(self, filename, error):
"""Checks that all classes have been completely parsed.
Call this when all lines in a file have been processed.
Args:
filename: The name of the current file.
error: The function to call with any errors found.
"""
if self.classinfo_stack:
# Note: This test can result in false positives if #ifdef constructs
# get in the way of brace matching. See the testBuildClass test in
# cpplint_unittest.py for an example of this.
error(filename, self.classinfo_stack[0].linenum, 'build/class', 5,
'Failed to find complete declaration of class %s' %
self.classinfo_stack[0].name)
def CheckForNonStandardConstructs(filename, clean_lines, linenum,
class_state, error):
"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
Complain about several constructs which gcc-2 accepts, but which are
not standard C++. Warning about these in lint is one way to ease the
transition to new compilers.
- put storage class first (e.g. "static const" instead of "const static").
- "%lld" instead of %qd" in printf-type functions.
- "%1$d" is non-standard in printf-type functions.
- "\%" is an undefined character escape sequence.
- text after #endif is not allowed.
- invalid inner-style forward declaration.
- >? and <? operators, and their >?= and <?= cousins.
- classes with virtual methods need virtual destructors (compiler warning
available, but not turned on yet.)
Additionally, check for constructor/destructor style violations and reference
members, as it is very convenient to do so while checking for
gcc-2 compliance.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
class_state: A _ClassState instance which maintains information about
the current stack of nested class declarations being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
"""
# Remove comments from the line, but leave in strings for now.
line = clean_lines.lines[linenum]
if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
error(filename, linenum, 'runtime/printf_format', 3,
'%q in format strings is deprecated. Use %ll instead.')
if Search(r'printf\s*\(.*".*%\d+\$', line):
error(filename, linenum, 'runtime/printf_format', 2,
'%N$ formats are unconventional. Try rewriting to avoid them.')
# Remove escaped backslashes before looking for undefined escapes.
line = line.replace('\\\\', '')
if Search(r'("|\').*\\(%|\[|\(|{)', line):
error(filename, linenum, 'build/printf_format', 3,
'%, [, (, and { are undefined character escapes. Unescape them.')
# For the rest, work with both comments and strings removed.
line = clean_lines.elided[linenum]
if Search(r'\b(const|volatile|void|char|short|int|long'
r'|float|double|signed|unsigned'
r'|schar|u?int8|u?int16|u?int32|u?int64)'
r'\s+(auto|register|static|extern|typedef)\b',
line):
error(filename, linenum, 'build/storage_class', 5,
'Storage class (static, extern, typedef, etc) should be first.')
if Match(r'\s*#\s*endif\s*[^/\s]+', line):
error(filename, linenum, 'build/endif_comment', 5,
'Uncommented text after #endif is non-standard. Use a comment.')
if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
error(filename, linenum, 'build/forward_decl', 5,
'Inner-style forward declarations are invalid. Remove this line.')
if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
line):
error(filename, linenum, 'build/deprecated', 3,
'>? and <? (max and min) operators are non-standard and deprecated.')
if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
# TODO(unknown): Could it be expanded safely to arbitrary references,
# without triggering too many false positives? The first
# attempt triggered 5 warnings for mostly benign code in the regtest, hence
# the restriction.
# Here's the original regexp, for the reference:
# type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
# r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
error(filename, linenum, 'runtime/member_string_references', 2,
'const string& members are dangerous. It is much better to use '
'alternatives, such as pointers or simple constants.')
# Track class entry and exit, and attempt to find cases within the
# class declaration that don't meet the C++ style
# guidelines. Tracking is very dependent on the code matching Google
# style guidelines, but it seems to perform well enough in testing
# to be a worthwhile addition to the checks.
classinfo_stack = class_state.classinfo_stack
# Look for a class declaration. The regexp accounts for decorated classes
# such as in:
# class LOCKABLE API Object {
# };
class_decl_match = Match(
r'\s*(template\s*<[\w\s<>,:]*>\s*)?'
'(class|struct)\s+([A-Z_]+\s+)*(\w+(::\w+)*)', line)
if class_decl_match:
classinfo_stack.append(_ClassInfo(
class_decl_match.group(4), clean_lines, linenum))
# Everything else in this function uses the top of the stack if it's
# not empty.
if not classinfo_stack:
return
classinfo = classinfo_stack[-1]
# If the opening brace hasn't been seen look for it and also
# parent class declarations.
if not classinfo.seen_open_brace:
# If the line has a ';' in it, assume it's a forward declaration or
# a single-line class declaration, which we won't process.
if line.find(';') != -1:
classinfo_stack.pop()
return
classinfo.seen_open_brace = (line.find('{') != -1)
# Look for a bare ':'
if Search('(^|[^:]):($|[^:])', line):
classinfo.is_derived = True
if not classinfo.seen_open_brace:
return # Everything else in this function is for after open brace
# The class may have been declared with namespace or classname qualifiers.
# The constructor and destructor will not have those qualifiers.
base_classname = classinfo.name.split('::')[-1]
# Look for single-argument constructors that aren't marked explicit.
# Technically a valid construct, but against style.
args = Match(r'\s+(?:inline\s+)?%s\s*\(([^,()]+)\)'
% re.escape(base_classname),
line)
if (args and
args.group(1) != 'void' and
clean_lines.raw_lines[linenum].find('/* implicit */') < 0 and
not Match(r'(const\s+)?%s\s*(?:<\w+>\s*)?&' % re.escape(base_classname),
args.group(1).strip())):
error(filename, linenum, 'runtime/explicit', 5,
'Single-argument constructors should be marked explicit.')
# Look for methods declared virtual.
if Search(r'\bvirtual\b', line):
classinfo.virtual_method_linenumber = linenum
# Only look for a destructor declaration on the same line. It would
# be extremely unlikely for the destructor declaration to occupy
# more than one line.
if Search(r'~%s\s*\(' % base_classname, line):
classinfo.has_virtual_destructor = True
# Look for class end.
brace_depth = classinfo.brace_depth
brace_depth = brace_depth + line.count('{') - line.count('}')
if brace_depth <= 0:
classinfo = classinfo_stack.pop()
# Try to detect missing virtual destructor declarations.
# For now, only warn if a non-derived class with virtual methods lacks
# a virtual destructor. This is to make it less likely that people will
# declare derived virtual destructors without declaring the base
# destructor virtual.
if ((classinfo.virtual_method_linenumber is not None) and
(not classinfo.has_virtual_destructor) and
(not classinfo.is_derived)): # Only warn for base classes
error(filename, classinfo.linenum, 'runtime/virtual', 4,
'The class %s probably needs a virtual destructor due to '
'having virtual method(s), one declared at line %d.'
% (classinfo.name, classinfo.virtual_method_linenumber))
else:
classinfo.brace_depth = brace_depth
def CheckSpacingForFunctionCall(filename, line, linenum, error):
"""Checks for the correctness of various spacing around function calls.
Args:
filename: The name of the current file.
line: The text of the line to check.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
# Since function calls often occur inside if/for/while/switch
# expressions - which have their own, more liberal conventions - we
# first see if we should be looking inside such an expression for a
# function call, to which we can apply more strict standards.
fncall = line # if there's no control flow construct, look at whole line
for pattern in (r'\bif\s*\((.*)\)\s*{',
r'\bfor\s*\((.*)\)\s*{',
r'\bwhile\s*\((.*)\)\s*[{;]',
r'\bswitch\s*\((.*)\)\s*{'):
match = Search(pattern, line)
if match:
fncall = match.group(1) # look inside the parens for function calls
break
# Except in if/for/while/switch, there should never be space
# immediately inside parens (eg "f( 3, 4 )"). We make an exception
# for nested parens ( (a+b) + c ). Likewise, there should never be
# a space before a ( when it's a function argument. I assume it's a
# function argument when the char before the whitespace is legal in
# a function name (alnum + _) and we're not starting a macro. Also ignore
# pointers and references to arrays and functions coz they're too tricky:
# we use a very simple way to recognize these:
# " (something)(maybe-something)" or
# " (something)(maybe-something," or
# " (something)[something]"
# Note that we assume the contents of [] to be short enough that
# they'll never need to wrap.
if ( # Ignore control structures.
not Search(r'\b(if|for|while|switch|return|delete)\b', fncall) and
# Ignore pointers/references to functions.
not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
# Ignore pointers/references to arrays.
not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call
error(filename, linenum, 'whitespace/parens', 4,
'Extra space after ( in function call')
elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
error(filename, linenum, 'whitespace/parens', 2,
'Extra space after (')
if (Search(r'\w\s+\(', fncall) and
not Search(r'#\s*define|typedef', fncall)):
error(filename, linenum, 'whitespace/parens', 4,
'Extra space before ( in function call')
# If the ) is followed only by a newline or a { + newline, assume it's
# part of a control statement (if/while/etc), and don't complain
if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
# If the closing parenthesis is preceded by only whitespaces,
# try to give a more descriptive error message.
if Search(r'^\s+\)', fncall):
error(filename, linenum, 'whitespace/parens', 2,
'Closing ) should be moved to the previous line')
else:
error(filename, linenum, 'whitespace/parens', 2,
'Extra space before )')
def IsBlankLine(line):
"""Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank.
"""
return not line or line.isspace()
def CheckForFunctionLengths(filename, clean_lines, linenum,
function_state, error):
"""Reports for long function bodies.
For an overview why this is done, see:
http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
Uses a simplistic algorithm assuming other style guidelines
(especially spacing) are followed.
Only checks unindented functions, so class members are unchecked.
Trivial bodies are unchecked, so constructors with huge initializer lists
may be missed.
Blank/comment lines are not counted so as to avoid encouraging the removal
of vertical space and comments just to get through a lint check.
NOLINT *on the last line of a function* disables this check.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
function_state: Current function name and lines in body so far.
error: The function to call with any errors found.
"""
lines = clean_lines.lines
line = lines[linenum]
raw = clean_lines.raw_lines
raw_line = raw[linenum]
joined_line = ''
starting_func = False
regexp = r'(\w(\w|::|\*|\&|\s)*)\(' # decls * & space::name( ...
match_result = Match(regexp, line)
if match_result:
# If the name is all caps and underscores, figure it's a macro and
# ignore it, unless it's TEST or TEST_F.
function_name = match_result.group(1).split()[-1]
if function_name == 'TEST' or function_name == 'TEST_F' or (
not Match(r'[A-Z_]+$', function_name)):
starting_func = True
if starting_func:
body_found = False
for start_linenum in range(linenum, clean_lines.NumLines()):
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
body_found = True
break # ... ignore
elif Search(r'{', start_line):
body_found = True
function = Search(r'((\w|:)*)\(', line).group(1)
if Match(r'TEST', function): # Handle TEST... macros
parameter_regexp = Search(r'(\(.*\))', joined_line)
if parameter_regexp: # Ignore bad syntax
function += parameter_regexp.group(1)
else:
function += '()'
function_state.Begin(function)
break
if not body_found:
# No body for the function (or evidence of a non-function) was found.
error(filename, linenum, 'readability/fn_size', 5,
'Lint failed to find start of function body.')
elif Match(r'^\}\s*$', line): # function end
function_state.Check(error, filename, linenum)
function_state.End()
elif not Match(r'^\s*$', line):
function_state.Count() # Count non-blank/non-comment lines.
_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
def CheckComment(comment, filename, linenum, error):
"""Checks for common mistakes in TODO comments.
Args:
comment: The text of the comment from the line in question.
filename: The name of the current file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
match = _RE_PATTERN_TODO.match(comment)
if match:
# One whitespace is correct; zero whitespace is handled elsewhere.
leading_whitespace = match.group(1)
if len(leading_whitespace) > 1:
error(filename, linenum, 'whitespace/todo', 2,
'Too many spaces before TODO')
username = match.group(2)
if not username:
error(filename, linenum, 'readability/todo', 2,
'Missing username in TODO; it should look like '
'"// TODO(my_username): Stuff."')
middle_whitespace = match.group(3)
# Comparisons made explicit for correctness -- pylint: disable-msg=C6403
if middle_whitespace != ' ' and middle_whitespace != '':
error(filename, linenum, 'whitespace/todo', 2,
'TODO(my_username) should be followed by a space')
def CheckSpacing(filename, clean_lines, linenum, error):
"""Checks for the correctness of various spacing issues in the code.
Things we check for: spaces around operators, spaces after
if/for/while/switch, no spaces around parens in function calls, two
spaces between code and comment, don't start a block with a blank
line, don't end a function with a blank line, don't add a blank line
after public/protected/private, don't have too many blank lines in a row.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
raw = clean_lines.raw_lines
line = raw[linenum]
# Before nixing comments, check if the line is blank for no good
# reason. This includes the first line after a block is opened, and
# blank lines at the end of a function (ie, right before a line like '}'
if IsBlankLine(line):
elided = clean_lines.elided
prev_line = elided[linenum - 1]
prevbrace = prev_line.rfind('{')
# TODO(unknown): Don't complain if line before blank line, and line after,
# both start with alnums and are indented the same amount.
# This ignores whitespace at the start of a namespace block
# because those are not usually indented.
if (prevbrace != -1 and prev_line[prevbrace:].find('}') == -1
and prev_line[:prevbrace].find('namespace') == -1):
# OK, we have a blank line at the start of a code block. Before we
# complain, we check if it is an exception to the rule: The previous
# non-empty line has the parameters of a function header that are indented
# 4 spaces (because they did not fit in a 80 column line when placed on
# the same line as the function name). We also check for the case where
# the previous line is indented 6 spaces, which may happen when the
# initializers of a constructor do not fit into a 80 column line.
exception = False
if Match(r' {6}\w', prev_line): # Initializer list?
# We are looking for the opening column of initializer list, which
# should be indented 4 spaces to cause 6 space indentation afterwards.
search_position = linenum-2
while (search_position >= 0
and Match(r' {6}\w', elided[search_position])):
search_position -= 1
exception = (search_position >= 0
and elided[search_position][:5] == ' :')
else:
# Search for the function arguments or an initializer list. We use a
# simple heuristic here: If the line is indented 4 spaces; and we have a
# closing paren, without the opening paren, followed by an opening brace
# or colon (for initializer lists) we assume that it is the last line of
# a function header. If we have a colon indented 4 spaces, it is an
# initializer list.
exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
prev_line)
or Match(r' {4}:', prev_line))
if not exception:
error(filename, linenum, 'whitespace/blank_line', 2,
'Blank line at the start of a code block. Is this needed?')
# This doesn't ignore whitespace at the end of a namespace block
# because that is too hard without pairing open/close braces;
# however, a special exception is made for namespace closing
# brackets which have a comment containing "namespace".
#
# Also, ignore blank lines at the end of a block in a long if-else
# chain, like this:
# if (condition1) {
# // Something followed by a blank line
#
# } else if (condition2) {
# // Something else
# }
if linenum + 1 < clean_lines.NumLines():
next_line = raw[linenum + 1]
if (next_line
and Match(r'\s*}', next_line)
and next_line.find('namespace') == -1
and next_line.find('} else ') == -1):
error(filename, linenum, 'whitespace/blank_line', 3,
'Blank line at the end of a code block. Is this needed?')
matched = Match(r'\s*(public|protected|private):', prev_line)
if matched:
error(filename, linenum, 'whitespace/blank_line', 3,
'Do not leave a blank line after "%s:"' % matched.group(1))
# Next, we complain if there's a comment too near the text
commentpos = line.find('//')
if commentpos != -1:
# Check if the // may be in quotes. If so, ignore it
# Comparisons made explicit for clarity -- pylint: disable-msg=C6403
if (line.count('"', 0, commentpos) -
line.count('\\"', 0, commentpos)) % 2 == 0: # not in quotes
# Allow one space for new scopes, two spaces otherwise:
#if (not Match(r'^\s*{ //', line) and
# ((commentpos >= 1 and
# line[commentpos-1] not in string.whitespace) or
# (commentpos >= 2 and
# line[commentpos-2] not in string.whitespace))):
# error(filename, linenum, 'whitespace/comments', 2,
# 'At least two spaces is best between code and comments')
# There should always be a space between the // and the comment
commentend = commentpos + 2
if commentend < len(line) and not line[commentend] == ' ':
# but some lines are exceptions -- e.g. if they're big
# comment delimiters like:
# //----------------------------------------------------------
# or are an empty C++ style Doxygen comment, like:
# ///
# or they begin with multiple slashes followed by a space:
# //////// Header comment
match = (Search(r'[=/-]{4,}\s*$', line[commentend:]) or
Search(r'^/$', line[commentend:]) or
Search(r'^! ', line[commentend:]) or
Search(r'^!< ', line[commentend:]) or
Search(r'^/+ ', line[commentend:]))
if not match:
error(filename, linenum, 'whitespace/comments', 4,
'Should have a space between // and comment')
CheckComment(line[commentpos:], filename, linenum, error)
line = clean_lines.elided[linenum] # get rid of comments and strings
# Don't try to do spacing checks for operator methods
line = re.sub(r'operator(==|!=|<|<<|<=|>=|>>|>)\(', 'operator\(', line)
if Search(r'template\<', line):
error(filename, linenum, 'whitespace/templates', 4,
'Missing space beteen template and <')
# We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
# Otherwise not. Note we only check for non-spaces on *both* sides;
# sometimes people put non-spaces on one side when aligning ='s among
# many lines (not that this is behavior that I approve of...)
if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
error(filename, linenum, 'whitespace/operators', 4,
'Missing spaces around =')
# It's ok not to have spaces around binary operators like + - * /, but if
# there's too little whitespace, we get concerned. It's hard to tell,
# though, so we punt on this one for now. TODO.
# You should always have whitespace around binary operators.
# Alas, we can't test < or > because they're legitimately used sans spaces
# (a->b, vector<int> a). The only time we can tell is a < with no >, and
# only if it's not template params list spilling into the next line.
match = Search(r'[^<>=!\s](==|!=|<=|>=)[^<>=!\s]', line)
if not match:
# Note that while it seems that the '<[^<]*' term in the following
# regexp could be simplified to '<.*', which would indeed match
# the same class of strings, the [^<] means that searching for the
# regexp takes linear rather than quadratic time.
if not Search(r'<[^<]*,\s*$', line): # template params spill
match = Search(r'[^<>=!\s](<)[^<>=!\s]([^>]|->)*$', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around %s' % match.group(1))
# We allow no-spaces around << and >> when used like this: 10<<20, but
# not otherwise (particularly, not when used as streams)
match = Search(r'[^0-9\s](<<|>>)[^0-9\s]', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around %s' % match.group(1))
# There shouldn't be space around unary operators
match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
if match:
error(filename, linenum, 'whitespace/operators', 4,
'Extra space for operator %s' % match.group(1))
# A pet peeve of mine: no spaces after an if, while, switch, or for
match = Search(r' (if\s\(|for\s\(|while\s\(|switch\s\()', line)
if match:
error(filename, linenum, 'whitespace/parens', 5,
'Extra space before ( in %s' % match.group(1))
# For if/for/while/switch, the left and right parens should be
# consistent about how many spaces are inside the parens, and
# there should either be zero or one spaces inside the parens.
# We don't want: "if ( foo)" or "if ( foo )".
# Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
match = Search(r'\b(if|for|while|switch)\s*'
r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
line)
if match:
if len(match.group(2)) != len(match.group(4)):
if not (match.group(3) == ';' and
len(match.group(2)) == 1 + len(match.group(4)) or
not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
error(filename, linenum, 'whitespace/parens', 5,
'Mismatching spaces inside () in %s' % match.group(1))
if not len(match.group(2)) in [0, 1]:
error(filename, linenum, 'whitespace/parens', 5,
'Should have zero or one spaces inside ( and ) in %s' %
match.group(1))
# You should always have a space after a comma (either as fn arg or operator)
if Search(r',[^\s]', line):
error(filename, linenum, 'whitespace/comma', 3,
'Missing space after ,')
# You should always have a space after a semicolon
# except for few corner cases
# TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
# space after ;
if Search(r';[^\s};\\)/]', line):
error(filename, linenum, 'whitespace/semicolon', 3,
'Missing space after ;')
# Next we will look for issues with function calls.
CheckSpacingForFunctionCall(filename, line, linenum, error)
# Except after an opening paren, or after another opening brace (in case of
# an initializer list, for instance), you should have spaces before your
# braces. And since you should never have braces at the beginning of a line,
# this is an easy test.
if Search(r'[^ (\t{]{', line):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before {')
# Make sure '} else {' has spaces.
if Search(r'}else', line):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before else')
# You shouldn't have spaces before your brackets, except maybe after
# 'delete []' or 'new char * []'.
if Search(r'\w\s+\[', line) and not Search(r'delete\s+\[', line):
error(filename, linenum, 'whitespace/braces', 5,
'Extra space before [')
# You shouldn't have a space before a semicolon at the end of the line.
# There's a special case for "for" since the style guide allows space before
# the semicolon there.
if Search(r':\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Semicolon defining empty statement. Use { } instead.')
elif Search(r'^\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Line contains only semicolon. If this should be an empty statement, '
'use { } instead.')
elif (Search(r'\s+;\s*$', line) and
not Search(r'\bfor\b', line)):
error(filename, linenum, 'whitespace/semicolon', 5,
'Extra space before last semicolon. If this should be an empty '
'statement, use { } instead.')
def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
"""Checks for additional blank line issues related to sections.
Currently the only thing checked here is blank line before protected/private.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
class_info: A _ClassInfo objects.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
# Skip checks if the class is small, where small means 25 lines or less.
# 25 lines seems like a good cutoff since that's the usual height of
# terminals, and any class that can't fit in one screen can't really
# be considered "small".
#
# Also skip checks if we are on the first line. This accounts for
# classes that look like
# class Foo { public: ... };
#
# If we didn't find the end of the class, last_line would be zero,
# and the check will be skipped by the first condition.
if (class_info.last_line - class_info.linenum <= 24 or
linenum <= class_info.linenum):
return
matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
if matched:
# Issue warning if the line before public/protected/private was
# not a blank line, but don't do this if the previous line contains
# "class" or "struct". This can happen two ways:
# - We are at the beginning of the class.
# - We are forward-declaring an inner class that is semantically
# private, but needed to be public for implementation reasons.
prev_line = clean_lines.lines[linenum - 1]
if (not IsBlankLine(prev_line) and
not Search(r'\b(class|struct)\b', prev_line)):
# Try a bit harder to find the beginning of the class. This is to
# account for multi-line base-specifier lists, e.g.:
# class Derived
# : public Base {
end_class_head = class_info.linenum
for i in range(class_info.linenum, linenum):
if Search(r'\{\s*$', clean_lines.lines[i]):
end_class_head = i
break
if end_class_head < linenum - 1:
error(filename, linenum, 'whitespace/blank_line', 3,
'"%s:" should be preceded by a blank line' % matched.group(1))
def GetPreviousNonBlankLine(clean_lines, linenum):
"""Return the most recent non-blank line and its line number.
Args:
clean_lines: A CleansedLines instance containing the file contents.
linenum: The number of the line to check.
Returns:
A tuple with two elements. The first element is the contents of the last
non-blank line before the current line, or the empty string if this is the
first non-blank line. The second is the line number of that line, or -1
if this is the first non-blank line.
"""
prevlinenum = linenum - 1
while prevlinenum >= 0:
prevline = clean_lines.elided[prevlinenum]
if not IsBlankLine(prevline): # if not a blank line...
return (prevline, prevlinenum)
prevlinenum -= 1
return ('', -1)
def CheckBraces(filename, clean_lines, linenum, error):
"""Looks for misplaced braces (e.g. at the end of line).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum] # get rid of comments and strings
if Match(r'\s*{\s*$', line):
# We allow an open brace to start a line in the case where someone
# is using braces in a block to explicitly create a new scope,
# which is commonly used to control the lifetime of
# stack-allocated variables. We don't detect this perfectly: we
# just don't complain if the last non-whitespace character on the
# previous non-blank line is ';', ':', '{', or '}'.
prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
if not Search(r'[;:}{]\s*$', prevline):
error(filename, linenum, 'whitespace/braces', 4,
'{ should almost always be at the end of the previous line')
# An else clause should be on the same line as the preceding closing brace.
if Match(r'\s*else\s*', line):
prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
if Match(r'\s*}\s*$', prevline):
error(filename, linenum, 'whitespace/newline', 4,
'An else should appear on the same line as the preceding }')
# If braces come on one side of an else, they should be on both.
# However, we have to worry about "else if" that spans multiple lines!
if Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
if Search(r'}\s*else if([^{]*)$', line): # could be multi-line if
# find the ( after the if
pos = line.find('else if')
pos = line.find('(', pos)
if pos > 0:
(endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
if endline[endpos:].find('{') == -1: # must be brace after if
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
else: # common case: else not followed by a multi-line if
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
# Likewise, an else should never have the else clause on the same line
if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
error(filename, linenum, 'whitespace/newline', 4,
'Else clause should never be on same line as else (use 2 lines)')
# In the same way, a do/while should never be on one line
if Match(r'\s*do [^\s{]', line):
error(filename, linenum, 'whitespace/newline', 4,
'do/while clauses should not be on a single line')
# Braces shouldn't be followed by a ; unless they're defining a struct
# or initializing an array.
# We can't tell in general, but we can for some common cases.
prevlinenum = linenum
while True:
(prevline, prevlinenum) = GetPreviousNonBlankLine(clean_lines, prevlinenum)
if Match(r'\s+{.*}\s*;', line) and not prevline.count(';'):
line = prevline + line
else:
break
if (Search(r'{.*}\s*;', line) and
line.count('{') == line.count('}') and
not Search(r'struct|class|enum|\s*=\s*{', line)):
error(filename, linenum, 'readability/braces', 4,
"You don't need a ; after a }")
def ReplaceableCheck(operator, macro, line):
"""Determine whether a basic CHECK can be replaced with a more specific one.
For example suggest using CHECK_EQ instead of CHECK(a == b) and
similarly for CHECK_GE, CHECK_GT, CHECK_LE, CHECK_LT, CHECK_NE.
Args:
operator: The C++ operator used in the CHECK.
macro: The CHECK or EXPECT macro being called.
line: The current source line.
Returns:
True if the CHECK can be replaced with a more specific one.
"""
# This matches decimal and hex integers, strings, and chars (in that order).
match_constant = r'([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')'
# Expression to match two sides of the operator with something that
# looks like a literal, since CHECK(x == iterator) won't compile.
# This means we can't catch all the cases where a more specific
# CHECK is possible, but it's less annoying than dealing with
# extraneous warnings.
match_this = (r'\s*' + macro + r'\((\s*' +
match_constant + r'\s*' + operator + r'[^<>].*|'
r'.*[^<>]' + operator + r'\s*' + match_constant +
r'\s*\))')
# Don't complain about CHECK(x == NULL) or similar because
# CHECK_EQ(x, NULL) won't compile (requires a cast).
# Also, don't complain about more complex boolean expressions
# involving && or || such as CHECK(a == b || c == d).
return Match(match_this, line) and not Search(r'NULL|&&|\|\|', line)
def CheckCheck(filename, clean_lines, linenum, error):
"""Checks the use of CHECK and EXPECT macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
# Decide the set of replacement macros that should be suggested
raw_lines = clean_lines.raw_lines
current_macro = ''
for macro in _CHECK_MACROS:
if raw_lines[linenum].find(macro) >= 0:
current_macro = macro
break
if not current_macro:
# Don't waste time here if line doesn't contain 'CHECK' or 'EXPECT'
return
line = clean_lines.elided[linenum] # get rid of comments and strings
# Encourage replacing plain CHECKs with CHECK_EQ/CHECK_NE/etc.
for operator in ['==', '!=', '>=', '>', '<=', '<']:
if ReplaceableCheck(operator, current_macro, line):
error(filename, linenum, 'readability/check', 2,
'Consider using %s instead of %s(a %s b)' % (
_CHECK_REPLACEMENT[current_macro][operator],
current_macro, operator))
break
def GetLineWidth(line):
"""Determines the width of the line in column positions.
Args:
line: A string, which may be a Unicode string.
Returns:
The width of the line in column positions, accounting for Unicode
combining characters and wide characters.
"""
if isinstance(line, TEXT_TYPE):
width = 0
for uc in unicodedata.normalize('NFC', line):
if unicodedata.east_asian_width(uc) in ('W', 'F'):
width += 2
elif not unicodedata.combining(uc):
width += 1
return width
else:
return len(line)
def CheckStyle(filename, clean_lines, linenum, file_extension, class_state,
error):
"""Checks rules from the 'C++ style rules' section of cppguide.html.
Most of these rules are hard to test (naming, comment style), but we
do what we can. In particular we check for 2-space indents, line lengths,
tab usage, spaces inside code, etc.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
error: The function to call with any errors found.
"""
raw_lines = clean_lines.raw_lines
line = raw_lines[linenum]
if line.find('\t') != -1:
error(filename, linenum, 'whitespace/tab', 1,
'Tab found; better to use spaces')
if linenum > 0:
last_line = raw_lines[linenum - 1]
lasttabs = 0
while lasttabs < len(last_line) and last_line[lasttabs] == '\t':
lasttabs += 1
for char in line:
if not char.isspace():
break
if lasttabs == 0 and char != '\t':
break
if lasttabs == -1:
if char == '\t' and last_line != '':
error(filename, linenum, 'whitespace/align_tab', 4,
'Too much indentation or tab used as alignment.')
break
if lasttabs > 0 and char != '\t':
error(filename, linenum, 'whitespace/ident_space', 4,
'Space used for identation, use tabs instead.')
break
lasttabs -= 1
foundntab = 0
for char in line:
if char != '\t':
foundntab = 1
if foundntab and char == '\t':
error(filename, linenum, 'whitespace/align_tab', 4,
'Tab used for alignment, use spaces instead.')
break
# One or three blank spaces at the beginning of the line is weird; it's
# hard to reconcile that with 2-space indents.
# NOTE: here are the conditions rob pike used for his tests. Mine aren't
# as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces
# if(RLENGTH > 20) complain = 0;
# if(match($0, " +(error|private|public|protected):")) complain = 0;
# if(match(prev, "&& *$")) complain = 0;
# if(match(prev, "\\|\\| *$")) complain = 0;
# if(match(prev, "[\",=><] *$")) complain = 0;
# if(match($0, " <<")) complain = 0;
# if(match(prev, " +for \\(")) complain = 0;
# if(prevodd && match(prevprev, " +for \\(")) complain = 0;
cleansed_line = clean_lines.elided[linenum]
if line and line[-1].isspace() and not line.isspace():
error(filename, linenum, 'whitespace/end_of_line', 4,
'Line ends in whitespace. Consider deleting these extra spaces.')
# Check if the line is a header guard.
is_header_guard = False
if file_extension in HEADER_EXTENSIONS:
cppvar = GetHeaderGuardCPPVariable(filename)
if (line.startswith('#ifndef %s' % cppvar) or
line.startswith('#define %s' % cppvar) or
line.startswith('#endif // %s' % cppvar)):
is_header_guard = True
# #include lines and header guards can be long, since there's no clean way to
# split them.
#
# URLs can be long too. It's possible to split these, but it makes them
# harder to cut&paste.
#
# The "$Id:...$" comment may also get very long without it being the
# developers fault.
if (not line.startswith('#include') and not is_header_guard and
not Match(r'^\s*//.*http(s?)://\S*$', line) and
not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
line_width = GetLineWidth(line)
if line_width > 100:
error(filename, linenum, 'whitespace/line_length', 4,
'Lines should very rarely be longer than 100 characters')
if (cleansed_line.count(';') > 1 and
# allow one-line definitions for small structs or classes
not ((cleansed_line.find('struct ') != -1 or
cleansed_line.find('class ') != -1) and
cleansed_line.find('};') != -1) and
# for loops are allowed two ;'s (and may run over two lines).
cleansed_line.find('for') == -1 and
(GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
# It's ok to have many commands in a switch case that fits in 1 line
not ((cleansed_line.find('case ') != -1 or
cleansed_line.find('default:') != -1) and
cleansed_line.find('break;') != -1)):
error(filename, linenum, 'whitespace/newline', 4,
'More than one command on the same line')
# Some more style checks
CheckBraces(filename, clean_lines, linenum, error)
CheckSpacing(filename, clean_lines, linenum, error)
CheckCheck(filename, clean_lines, linenum, error)
if class_state and class_state.classinfo_stack:
CheckSectionSpacing(filename, clean_lines,
class_state.classinfo_stack[-1], linenum, error)
_RE_PATTERN_INCLUDE_DUPLICATE = re.compile('// +duplicate-include')
_RE_PATTERN_INCLUDE_NEW_STYLE = re.compile(r'#include +"[^/]+\.h"')
_RE_PATTERN_INCLUDE_QT = re.compile(r'#include +"(ui_|moc_)[^/]+\.h"')
_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
# Matches the first component of a filename delimited by -s and _s. That is:
# _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo.cpp').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo-bar_baz.cpp').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo_bar-baz.cpp').group(0) == 'foo'
_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
def _DropCommonSuffixes(filename):
"""Drops common suffixes like _test.cpp or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cpp')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed.
"""
for suffix in ('test.cpp', 'regtest.cpp', 'unittest.cpp',
'inl.h', 'impl.h', 'internal.h'):
if (filename.endswith(suffix) and len(filename) > len(suffix) and
filename[-len(suffix) - 1] in ('-', '_')):
return filename[:-len(suffix) - 1]
return os.path.splitext(filename)[0]
def _IsTestFilename(filename):
"""Determines if the given filename has a suffix that identifies it as a test.
Args:
filename: The input filename.
Returns:
True if 'filename' looks like a test, False otherwise.
"""
if (filename.endswith('_test.cpp') or
filename.endswith('_unittest.cpp') or
filename.endswith('_regtest.cpp')):
return True
else:
return False
def _ClassifyInclude(fileinfo, include, is_system):
"""Figures out what kind of header 'include' is.
Args:
fileinfo: The current file cpplint is running over. A FileInfo instance.
include: The path to a #included file.
is_system: True if the #include used <> rather than "".
Returns:
One of the _XXX_HEADER constants.
For example:
>>> _ClassifyInclude(FileInfo('foo/foo.cpp'), 'stdio.h', True)
_C_SYS_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cpp'), 'string', True)
_CPP_SYS_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cpp'), 'foo/foo.h', False)
_LIKELY_MY_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cpp'),
... 'bar/foo_other_ext.h', False)
_POSSIBLE_MY_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cpp'), 'foo/bar.h', False)
_OTHER_HEADER
"""
# This is a list of all standard c++ header files, except
# those already checked for above.
is_stl_h = include in _STL_HEADERS
is_cpp_h = is_stl_h or include in _CPP_HEADERS
if is_system:
if is_cpp_h:
return _CPP_SYS_HEADER
else:
return _C_SYS_HEADER
# If the target file and the include we're checking share a
# basename when we drop common extensions, and the include
# lives in . , then it's likely to be owned by the target file.
target_dir, target_base = (
os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
if target_base == include_base and (
include_dir == target_dir or
include_dir == os.path.normpath(target_dir + '/../public')):
return _LIKELY_MY_HEADER
# If the target and include share some initial basename
# component, it's possible the target is implementing the
# include, so it's allowed to be first, but we'll never
# complain if it's not there.
target_first_component = _RE_FIRST_COMPONENT.match(target_base)
include_first_component = _RE_FIRST_COMPONENT.match(include_base)
if (target_first_component and include_first_component and
target_first_component.group(0) ==
include_first_component.group(0)):
return _POSSIBLE_MY_HEADER
return _OTHER_HEADER
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
"""Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
fileinfo = FileInfo(filename)
line = clean_lines.lines[linenum]
# "include" should use the new style "foo/bar.h" instead of just "bar.h"
if _RE_PATTERN_INCLUDE_NEW_STYLE.search(line) and line != "#include \"Configure.h\"" and not _RE_PATTERN_INCLUDE_QT.search(line):
error(filename, linenum, 'build/include', 4,
'Include the directory when naming .h files')
# we shouldn't include a file more than once. actually, there are a
# handful of instances where doing so is okay, but in general it's
# not.
match = _RE_PATTERN_INCLUDE.search(line)
if match:
include = match.group(2)
is_system = (match.group(1) == '<')
if include in include_state and not _RE_PATTERN_INCLUDE_DUPLICATE.search(clean_lines.raw_lines[linenum]):
error(filename, linenum, 'build/include', 4,
'"%s" already included at %s:%s' %
(include, filename, include_state[include]))
else:
include_state[include] = linenum
# We want to ensure that headers appear in the right order:
# 1) for foo.cpp, foo.h (preferred location)
# 2) c system files
# 3) cpp system files
# 4) for foo.cpp, foo.h (deprecated location)
# 5) other google headers
#
# We classify each include statement as one of those 5 types
# using a number of techniques. The include_state object keeps
# track of the highest type seen, and complains if we see a
# lower type after that.
error_message = include_state.CheckNextIncludeOrder(
_ClassifyInclude(fileinfo, include, is_system))
if error_message:
error(filename, linenum, 'build/include_order', 4,
'%s. Should be: %s.h, c system, c++ system, other.' %
(error_message, fileinfo.BaseName()))
if not include_state.IsInAlphabeticalOrder(include):
error(filename, linenum, 'build/include_alpha', 4,
'Include "%s" not in alphabetical order' % include)
# Look for any of the stream classes that are part of standard C++.
match = _RE_PATTERN_INCLUDE.match(line)
if match:
include = match.group(2)
if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
# Many unit tests use cout, so we exempt them.
if not _IsTestFilename(filename):
error(filename, linenum, 'readability/streams', 3,
'Streams are highly discouraged.')
def _GetTextInside(text, start_pattern):
"""Retrieves all the text between matching open and close parentheses.
Given a string of lines and a regular expression string, retrieve all the text
following the expression and between opening punctuation symbols like
(, [, or {, and the matching close-punctuation symbol. This properly nested
occurrences of the punctuations, so for the text like
printf(a(), b(c()));
a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
start_pattern must match string having an open punctuation symbol at the end.
Args:
text: The lines to extract text. Its comments and strings must be elided.
It can be single line and can span multiple lines.
start_pattern: The regexp string indicating where to start extracting
the text.
Returns:
The extracted text.
None if either the opening string or ending punctuation could not be found.
"""
# TODO(sugawarayu): Audit cpplint.py to see what places could be profitably
# rewritten to use _GetTextInside (and use inferior regexp matching today).
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}', '[': ']'}
closing_punctuation = set(itervalues(matching_punctuation))
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
if not match: # start_pattern not found in text.
return None
start_position = match.end(0)
assert start_position > 0, (
'start_pattern must ends with an opening punctuation.')
assert text[start_position - 1] in matching_punctuation, (
'start_pattern must ends with an opening punctuation.')
# Stack of closing punctuations we expect to have in text after position.
punctuation_stack = [matching_punctuation[text[start_position - 1]]]
position = start_position
while punctuation_stack and position < len(text):
if text[position] == punctuation_stack[-1]:
punctuation_stack.pop()
elif text[position] in closing_punctuation:
# A closing punctuation without matching opening punctuations.
return None
elif text[position] in matching_punctuation:
punctuation_stack.append(matching_punctuation[text[position]])
position += 1
if punctuation_stack:
# Opening punctuations left without matching close-punctuations.
return None
# punctuations match.
return text[start_position:position - 1]
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state,
error):
"""Checks rules from the 'C++ language rules' section of cppguide.html.
Some of these rules are hard to test (function overloading, using
uint32 inappropriately), but we do the best we can.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
# If the line is empty or consists of entirely a comment, no need to
# check it.
line = clean_lines.elided[linenum]
if not line:
return
match = _RE_PATTERN_INCLUDE.search(line)
if match:
CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
return
# Create an extended_line, which is the concatenation of the current and
# next lines, for more effective checking of code that may span more than one
# line.
if linenum + 1 < clean_lines.NumLines():
extended_line = line + clean_lines.elided[linenum + 1]
else:
extended_line = line
# Make Windows paths like Unix.
fullname = os.path.abspath(filename).replace('\\', '/')
# TODO(unknown): figure out if they're using default arguments in fn proto.
# Check for non-const references in functions. This is tricky because &
# is also used to take the address of something. We allow <> for templates,
# (ignoring whatever is between the braces) and : for classes.
# These are complicated re's. They try to capture the following:
# paren (for fn-prototype start), typename, &, varname. For the const
# version, we're willing for const to be before typename or after
# Don't check the implementation on same line.
fnline = line.split('{', 1)[0]
if (len(re.findall(r'\([^()]*\b(?:[\w:]|<[^()]*>)+(\s?&|&\s?)\w+', fnline)) >
len(re.findall(r'\([^()]*\bconst\s+(?:typename\s+)?(?:struct\s+)?'
r'(?:[\w:]|<[^()]*>)+(\s?&|&\s?)\w+', fnline)) +
len(re.findall(r'\([^()]*\b(?:[\w:]|<[^()]*>)+\s+const(\s?&|&\s?)[\w]+',
fnline))):
# We allow non-const references in a few standard places, like functions
# called "swap()" or iostream operators like "<<" or ">>".
if not Search(
r'(swap|Swap|operator[<>][<>])\s*\(\s*(?:[\w:]|<.*>)+\s*&',
fnline):
error(filename, linenum, 'runtime/references', 2,
'Is this a non-const reference? '
'If so, make const or use a pointer.')
# Check to see if they're using an conversion function cast.
# I just try to capture the most common basic types, though there are more.
# Parameterless conversion functions, such as bool(), are allowed as they are
# probably a member operator declaration or default constructor.
match = Search(
r'(\bnew\s+)?\b' # Grab 'new' operator, if it's there
r'(int|float|double|bool|char|int32|uint32|int64|uint64)\([^)]', line)
if match:
# gMock methods are defined using some variant of MOCK_METHODx(name, type)
# where type may be float(), int(string), etc. Without context they are
# virtually indistinguishable from int(x) casts. Likewise, gMock's
# MockCallback takes a template parameter of the form return_type(arg_type),
# which looks much like the cast we're trying to detect.
if (match.group(1) is None and # If new operator, then this isn't a cast
not (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
Match(r'^\s*MockCallback<.*>', line))):
error(filename, linenum, 'readability/casting', 4,
'Using deprecated casting style. '
'Use static_cast<%s>(...) instead' %
match.group(2))
CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
'static_cast',
r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
# This doesn't catch all cases. Consider (const char * const)"hello".
#
# (char *) "foo" should always be a const_cast (reinterpret_cast won't
# compile).
if CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
'const_cast', r'\((char\s?\*+\s?)\)\s*"', error):
pass
else:
# Check pointer casts for other than string constants
CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
'reinterpret_cast', r'\((\w+\s?\*+\s?)\)', error)
# In addition, we look for people taking the address of a cast. This
# is dangerous -- casts can assign to temporaries, so the pointer doesn't
# point where you think.
if Search(
r'(&\([^)]+\)[\w(])|(&(static|dynamic|reinterpret)_cast\b)', line):
error(filename, linenum, 'runtime/casting', 4,
('Are you taking an address of a cast? '
'This is dangerous: could be a temp var. '
'Take the address before doing the cast, rather than after'))
# Check for people declaring static/global STL strings at the top level.
# This is dangerous because the C++ language does not guarantee that
# globals with constructors are initialized before the first access.
match = Match(
r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
line)
# Make sure it's not a function.
# Function template specialization looks like: "string foo<Type>(...".
# Class template definitions look like: "string Foo<Type>::Method(...".
if match and not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)?\s*\(([^"]|$)',
match.group(3)):
error(filename, linenum, 'runtime/string', 4,
'For a static/global string constant, use a C style string instead: '
'"%schar %s[]".' %
(match.group(1), match.group(2)))
# Check that we're not using RTTI outside of testing code.
if Search(r'\bdynamic_cast<', line) and not _IsTestFilename(filename):
error(filename, linenum, 'runtime/rtti', 5,
'Do not use dynamic_cast<>. If you need to cast within a class '
"hierarchy, use static_cast<> to upcast. Google doesn't support "
'RTTI.')
if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line):
error(filename, linenum, 'runtime/init', 4,
'You seem to be initializing a member variable with itself.')
if file_extension in HEADER_EXTENSIONS:
# TODO(unknown): check that 1-arg constructors are explicit.
# How to tell it's a constructor?
# (handled in CheckForNonStandardConstructs for now)
# TODO(unknown): check that classes have DISALLOW_EVIL_CONSTRUCTORS
# (level 1 error)
pass
# Check if people are using the verboten C basic types. The only exception
# we regularly allow is "unsigned short port" for port.
if Search(r'\bshort port\b', line):
if not Search(r'\bunsigned short port\b', line):
error(filename, linenum, 'runtime/int', 4,
'Use "unsigned short" for ports, not "short"')
else:
match = Search(r'\b(short|long(?! +double)|long long)\b', line)
if match:
error(filename, linenum, 'runtime/int', 4,
'Use int16/int64/etc, rather than the C type %s' % match.group(1))
# When snprintf is used, the second argument shouldn't be a literal.
match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
if match and match.group(2) != '0':
# If 2nd arg is zero, snprintf is used to calculate size.
error(filename, linenum, 'runtime/printf', 3,
'If you can, use sizeof(%s) instead of %s as the 2nd arg '
'to snprintf.' % (match.group(1), match.group(2)))
# Check if some verboten C functions are being used.
if Search(r'\bsprintf\b', line):
error(filename, linenum, 'runtime/printf', 5,
'Never use sprintf. Use snprintf instead.')
match = Search(r'\b(strcpy|strcat)\b', line)
if match:
error(filename, linenum, 'runtime/printf', 4,
'Almost always, snprintf is better than %s' % match.group(1))
if Search(r'\bsscanf\b', line):
error(filename, linenum, 'runtime/printf', 1,
'sscanf can be ok, but is slow and can overflow buffers.')
# Check if some verboten operator overloading is going on
# TODO(unknown): catch out-of-line unary operator&:
# class X {};
# int operator&(const X& x) { return 42; } // unary operator&
# The trick is it's hard to tell apart from binary operator&:
# class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
if Search(r'\boperator\s*&\s*\(\s*\)', line):
error(filename, linenum, 'runtime/operator', 4,
'Unary operator& is dangerous. Do not use it.')
# Check for suspicious usage of "if" like
# } if (a == b) {
if Search(r'\}\s*if\s*\(', line):
error(filename, linenum, 'readability/braces', 4,
'Did you mean "else if"? If not, start a new line for "if".')
# Check for potential format string bugs like printf(foo).
# We constrain the pattern not to pick things like DocidForPrintf(foo).
# Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
# TODO(sugawarayu): Catch the following case. Need to change the calling
# convention of the whole function to process multiple line to handle it.
# printf(
# boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(')
if printf_args:
match = Match(r'([\w.\->()]+)$', printf_args)
if match:
function_name = re.search(r'\b((?:string)?printf)\s*\(',
line, re.I).group(1)
error(filename, linenum, 'runtime/printf', 4,
'Potential format string bug. Do %s("%%s", %s) instead.'
% (function_name, match.group(1)))
# Check for potential memset bugs like memset(buf, sizeof(buf), 0).
match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
error(filename, linenum, 'runtime/memset', 4,
'Did you mean "memset(%s, 0, %s)"?'
% (match.group(1), match.group(2)))
if Search(r'\busing namespace\b', line):
error(filename, linenum, 'build/namespaces', 5,
'Do not use namespace using-directives. '
'Use using-declarations instead.')
# Detect variable-length arrays.
match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
match.group(3).find(']') == -1):
# Split the size using space and arithmetic operators as delimiters.
# If any of the resulting tokens are not compile time constants then
# report the error.
tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
is_const = True
skip_next = False
for tok in tokens:
if skip_next:
skip_next = False
continue
if Search(r'sizeof\(.+\)', tok): continue
if Search(r'arraysize\(\w+\)', tok): continue
tok = tok.lstrip('(')
tok = tok.rstrip(')')
if not tok: continue
if Match(r'\d+', tok): continue
if Match(r'0[xX][0-9a-fA-F]+', tok): continue
if Match(r'k[A-Z0-9]\w*', tok): continue
if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
# A catch all for tricky sizeof cases, including 'sizeof expression',
# 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
# requires skipping the next token because we split on ' ' and '*'.
if tok.startswith('sizeof'):
skip_next = True
continue
is_const = False
break
if not is_const:
error(filename, linenum, 'runtime/arrays', 1,
'Do not use variable-length arrays. Use an appropriately named '
"('k' followed by CamelCase) compile-time constant for the size.")
# If DISALLOW_EVIL_CONSTRUCTORS, DISALLOW_COPY_AND_ASSIGN, or
# DISALLOW_IMPLICIT_CONSTRUCTORS is present, then it should be the last thing
# in the class declaration.
match = Match(
(r'\s*'
r'(DISALLOW_(EVIL_CONSTRUCTORS|COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS))'
r'\(.*\);$'),
line)
if match and linenum + 1 < clean_lines.NumLines():
next_line = clean_lines.elided[linenum + 1]
# We allow some, but not all, declarations of variables to be present
# in the statement that defines the class. The [\w\*,\s]* fragment of
# the regular expression below allows users to declare instances of
# the class or pointers to instances, but not less common types such
# as function pointers or arrays. It's a tradeoff between allowing
# reasonable code and avoiding trying to parse more C++ using regexps.
if not Search(r'^\s*}[\w\*,\s]*;', next_line):
error(filename, linenum, 'readability/constructors', 3,
match.group(1) + ' should be the last thing in the class')
# Check for use of unnamed namespaces in header files. Registration
# macros are typically OK, so we allow use of "namespace {" on lines
# that end with backslashes.
if (file_extension in HEADER_EXTENSIONS
and Search(r'\bnamespace\s*{', line)
and line[-1] != '\\'):
error(filename, linenum, 'build/namespaces', 4,
'Do not use unnamed namespaces in header files. See '
'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
' for more information.')
def CheckCStyleCast(filename, linenum, line, raw_line, cast_type, pattern,
error):
"""Checks for a C-style cast by looking for the pattern.
This also handles sizeof(type) warnings, due to similarity of content.
Args:
filename: The name of the current file.
linenum: The number of the line to check.
line: The line of code to check.
raw_line: The raw line of code to check, with comments.
cast_type: The string for the C++ cast to recommend. This is either
reinterpret_cast, static_cast, or const_cast, depending.
pattern: The regular expression used to find C-style casts.
error: The function to call with any errors found.
Returns:
True if an error was emitted.
False otherwise.
"""
match = Search(pattern, line)
if not match:
return False
# e.g., sizeof(int)
sizeof_match = Match(r'.*sizeof\s*$', line[0:match.start(1) - 1])
if sizeof_match:
error(filename, linenum, 'runtime/sizeof', 1,
'Using sizeof(type). Use sizeof(varname) instead if possible')
return True
remainder = line[match.end(0):]
# The close paren is for function pointers as arguments to a function.
# eg, void foo(void (*bar)(int));
# The semicolon check is a more basic function check; also possibly a
# function pointer typedef.
# eg, void foo(int); or void foo(int) const;
# The equals check is for function pointer assignment.
# eg, void *(*foo)(int) = ...
# The > is for MockCallback<...> ...
#
# Right now, this will only catch cases where there's a single argument, and
# it's unnamed. It should probably be expanded to check for multiple
# arguments with some unnamed.
function_match = Match(r'\s*(\)|=|(const)?\s*(;|\{|throw\(\)|>))', remainder)
if function_match:
if ((not function_match.group(3) or
function_match.group(3) == ';' or
('MockCallback<' not in raw_line and
'/*' not in raw_line)) and
'SIGNAL' not in raw_line and
'SLOT' not in raw_line):
error(filename, linenum, 'readability/function', 3,
'All parameters should be named in a function')
return True
# At this point, all that should be left is actual casts.
error(filename, linenum, 'readability/casting', 4,
'Using C-style cast. Use %s<%s>(...) instead' %
(cast_type, match.group(1)))
return True
_HEADERS_CONTAINING_TEMPLATES = (
('<deque>', ('deque',)),
('<functional>', ('unary_function', 'binary_function',
'plus', 'minus', 'multiplies', 'divides', 'modulus',
'negate',
'equal_to', 'not_equal_to', 'greater', 'less',
'greater_equal', 'less_equal',
'logical_and', 'logical_or', 'logical_not',
'unary_negate', 'not1', 'binary_negate', 'not2',
'bind1st', 'bind2nd',
'pointer_to_unary_function',
'pointer_to_binary_function',
'ptr_fun',
'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
'mem_fun_ref_t',
'const_mem_fun_t', 'const_mem_fun1_t',
'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
'mem_fun_ref',
)),
('<limits>', ('numeric_limits',)),
('<list>', ('list',)),
('<map>', ('map', 'multimap',)),
('<memory>', ('allocator',)),
('<queue>', ('queue', 'priority_queue',)),
('<set>', ('set', 'multiset',)),
('<stack>', ('stack',)),
('<string>', ('char_traits', 'basic_string',)),
('<utility>', ('pair',)),
('<vector>', ('vector',)),
# gcc extensions.
# Note: std::hash is their hash, ::hash is our hash
('<hash_map>', ('hash_map', 'hash_multimap',)),
('<hash_set>', ('hash_set', 'hash_multiset',)),
('<slist>', ('slist',)),
)
_RE_PATTERN_STRING = re.compile(r'\bstring\b')
_re_pattern_algorithm_header = []
for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap',
'transform'):
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
# type::max().
_re_pattern_algorithm_header.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template,
'<algorithm>'))
_re_pattern_templates = []
for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
for _template in _templates:
_re_pattern_templates.append(
(re.compile(r'(\<|\b)' + _template + r'\s*\<'),
_template + '<>',
_header))
def FilesBelongToSameModule(filename_cc, filename_h):
"""Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cpp, foo_test.cpp and foo_unittest.cpp belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cpp', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cpp and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cpp file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
"""
if not filename_cc.endswith('.cpp'):
return (False, '')
filename_cc = filename_cc[:-len('.cpp')]
if filename_cc.endswith('_unittest'):
filename_cc = filename_cc[:-len('_unittest')]
elif filename_cc.endswith('_test'):
filename_cc = filename_cc[:-len('_test')]
filename_cc = filename_cc.replace('/public/', '/')
filename_cc = filename_cc.replace('/internal/', '/')
if not filename_h.endswith('.h'):
return (False, '')
filename_h = filename_h[:-len('.h')]
if filename_h.endswith('-inl'):
filename_h = filename_h[:-len('-inl')]
filename_h = filename_h.replace('/public/', '/')
filename_h = filename_h.replace('/internal/', '/')
files_belong_to_same_module = filename_cc.endswith(filename_h)
common_path = ''
if files_belong_to_same_module:
common_path = filename_cc[:-len(filename_h)]
return files_belong_to_same_module, common_path
def UpdateIncludeState(filename, include_state, io=codecs):
"""Fill up the include_state with new includes found from the file.
Args:
filename: the name of the header to read.
include_state: an _IncludeState instance in which the headers are inserted.
io: The io factory to use to read the file. Provided for testability.
Returns:
True if a header was succesfully added. False otherwise.
"""
headerfile = None
try:
headerfile = io.open(filename, 'r', 'utf8', 'replace')
except IOError:
return False
linenum = 0
for line in headerfile:
linenum += 1
clean_line = CleanseComments(line)
match = _RE_PATTERN_INCLUDE.search(clean_line)
if match:
include = match.group(2)
# The value formatting is cute, but not really used right now.
# What matters here is that the key is in include_state.
include_state.setdefault(include, '%s:%d' % (filename, linenum))
return True
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
io=codecs):
"""Reports for missing stl includes.
This function will output warnings to make sure you are including the headers
necessary for the stl containers and functions that you use. We only give one
reason to include a header. For example, if you use both equal_to<> and
less<> in a .h file, only one (the latter in the file) of these will be
reported as a reason to include the <functional>.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
include_state: An _IncludeState instance.
error: The function to call with any errors found.
io: The IO factory to use to read the header file. Provided for unittest
injection.
"""
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
for linenum in range(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
continue
# String is special -- it is a non-templatized type in STL.
matched = _RE_PATTERN_STRING.search(line)
if matched:
# Don't warn about strings in non-STL namespaces:
# (We check only the first match per line; good enough.)
prefix = line[:matched.start()]
if prefix.endswith('std::') or not prefix.endswith('::'):
required['<string>'] = (linenum, 'string')
for pattern, template, header in _re_pattern_algorithm_header:
if pattern.search(line):
required[header] = (linenum, template)
# The following function is just a speed up, no semantics are changed.
if not '<' in line: # Reduces the cpu time usage by skipping lines.
continue
for pattern, template, header in _re_pattern_templates:
if pattern.search(line):
required[header] = (linenum, template)
# The policy is that if you #include something in foo.h you don't need to
# include it again in foo.cpp. Here, we will look at possible includes.
# Let's copy the include_state so it is only messed up within this function.
include_state = include_state.copy()
# Did we find the header for this file (if any) and succesfully load it?
header_found = False
# Use the absolute path so that matching works properly.
abs_filename = FileInfo(filename).FullName()
# For Emacs's flymake.
# If cpplint is invoked from Emacs's flymake, a temporary file is generated
# by flymake and that file name might end with '_flymake.cpp'. In that case,
# restore original file name here so that the corresponding header file can be
# found.
# e.g. If the file name is 'foo_flymake.cpp', we should search for 'foo.h'
# instead of 'foo_flymake.h'
abs_filename = re.sub(r'_flymake\.cpp$', '.cpp', abs_filename)
# include_state is modified during iteration, so we iterate over a copy of
# the keys.
header_keys = list(include_state.keys())
for header in header_keys:
(same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
fullpath = common_path + header
if same_module and UpdateIncludeState(fullpath, include_state, io):
header_found = True
# If we can't find the header file for a .cpp, assume it's because we don't
# know where to look. In that case we'll give up as we're not sure they
# didn't include it in the .h file.
# TODO(unknown): Do a better job of finding .h files so we are confident that
# not having the .h file means there isn't one.
if filename.endswith('.cpp') and not header_found:
return
# All the lines have been processed, report the errors found.
for required_header_unstripped in required:
template = required[required_header_unstripped][1]
if required_header_unstripped.strip('<>"') not in include_state:
error(filename, required[required_header_unstripped][0],
'build/include_what_you_use', 4,
'Add #include ' + required_header_unstripped + ' for ' + template)
_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
"""Check that make_pair's template arguments are deduced.
G++ 4.6 in C++0x mode fails badly if make_pair's template arguments are
specified explicitly, and such use isn't intended in any case.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
raw = clean_lines.raw_lines
line = raw[linenum]
match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
if match:
error(filename, linenum, 'build/explicit_make_pair',
4, # 4 = high confidence
'Omit template arguments from make_pair OR use pair directly OR'
' if appropriate, construct a pair directly')
def ProcessLine(filename, file_extension,
clean_lines, line, include_state, function_state,
class_state, error, extra_check_functions=[]):
"""Processes a single line in the file.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (dot not included) of the file.
clean_lines: An array of strings, each representing a line of the file,
with comments stripped.
line: Number of line being processed.
include_state: An _IncludeState instance in which the headers are inserted.
function_state: A _FunctionState instance which counts function lines, etc.
class_state: A _ClassState instance which maintains information about
the current stack of nested class declarations being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
extra_check_functions: An array of additional check functions that will be
run on each source line. Each function takes 4
arguments: filename, clean_lines, line, error
"""
raw_lines = clean_lines.raw_lines
ParseNolintSuppressions(filename, raw_lines[line], line, error)
CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
CheckStyle(filename, clean_lines, line, file_extension, class_state, error)
CheckLanguage(filename, clean_lines, line, file_extension, include_state,
error)
CheckForNonStandardConstructs(filename, clean_lines, line,
class_state, error)
CheckPosixThreading(filename, clean_lines, line, error)
CheckInvalidIncrement(filename, clean_lines, line, error)
CheckMakePairUsesDeduction(filename, clean_lines, line, error)
for check_fn in extra_check_functions:
check_fn(filename, clean_lines, line, error)
def ProcessFileData(filename, file_extension, lines, error,
extra_check_functions=[]):
"""Performs lint checks and reports any errors to the given error function.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (dot not included) of the file.
lines: An array of strings, each representing a line of the file, with the
last element being empty if the file is terminated with a newline.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
extra_check_functions: An array of additional check functions that will be
run on each source line. Each function takes 4
arguments: filename, clean_lines, line, error
"""
lines = (['// marker so line numbers and indices both start at 1'] + lines +
['// marker so line numbers end in a known way'])
include_state = _IncludeState()
function_state = _FunctionState()
class_state = _ClassState()
ResetNolintSuppressions()
CheckForCopyright(filename, lines, error)
if file_extension in HEADER_EXTENSIONS:
CheckForHeaderGuard(filename, lines, error)
RemoveMultiLineComments(filename, lines, error)
clean_lines = CleansedLines(lines)
for line in range(clean_lines.NumLines()):
ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, class_state, error,
extra_check_functions)
class_state.CheckFinished(filename, error)
CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
# We check here rather than inside ProcessLine so that we see raw
# lines rather than "cleaned" lines.
CheckForUnicodeReplacementCharacters(filename, lines, error)
CheckForNewlineAtEOF(filename, lines, error)
def ProcessFile(filename, vlevel, extra_check_functions=[]):
"""Does google-lint on a single file.
Args:
filename: The name of the file to parse.
vlevel: The level of errors to report. Every error of confidence
>= verbose_level will be reported. 0 is a good default.
extra_check_functions: An array of additional check functions that will be
run on each source line. Each function takes 4
arguments: filename, clean_lines, line, error
"""
_SetVerboseLevel(vlevel)
try:
# Support the UNIX convention of using "-" for stdin. Note that
# we are not opening the file with universal newline support
# (which codecs doesn't support anyway), so the resulting lines do
# contain trailing '\r' characters if we are reading a file that
# has CRLF endings.
# If after the split a trailing '\r' is present, it is removed
# and a a warning is issued below if this file is processed.
if filename == '-':
lines = codecs.StreamReaderWriter(sys.stdin,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace').read().split('\n')
else:
lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
carriage_return_found = False
# Remove trailing '\r'.
for linenum in range(len(lines)):
if lines[linenum].endswith('\r'):
lines[linenum] = lines[linenum].rstrip('\r')
carriage_return_found = True
except IOError:
sys.stderr.write(
"Skipping input '%s': Can't open for reading\n" % filename)
return
# Note, if no dot is found, this will give the entire filename as the ext.
file_extension = filename[filename.rfind('.') + 1:]
# When reading from stdin, the extension is unknown, so no cpplint tests
# should rely on the extension.
if (filename != '-' and file_extension not in EXTENSIONS):
sys.stderr.write('Ignoring %s; extension not in %s\n' % (filename, EXTENSIONS))
else:
ProcessFileData(filename, file_extension, lines, Error,
extra_check_functions)
if carriage_return_found:
# Use 0 for linenum since outputting only one error for potentially
# several lines.
Error(filename, 0, 'whitespace/carriage-return', 1,
'One or more unexpected \\r (^M) found;'
'better to use only a \\n')
sys.stderr.write('Done processing %s\n' % filename)
def PrintUsage(message):
"""Prints a brief usage string and exits, optionally with an error message.
Args:
message: The optional error message.
"""
sys.stderr.write(_USAGE)
if message:
sys.exit('\nFATAL ERROR: ' + message)
else:
sys.exit(1)
def PrintCategories():
"""Prints a list of all the error-categories used by error messages.
These are the categories used to filter messages via --filter.
"""
sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES))
sys.exit(0)
def ParseArguments(args):
"""Parses the command line arguments.
This may set the output format and verbosity level as side-effects.
Args:
args: The command line arguments:
Returns:
The list of filenames to lint.
"""
try:
(opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
'project=',
'counting=',
'filter='])
except getopt.GetoptError:
PrintUsage('Invalid arguments.')
project_name = _ProjectName()
verbosity = _VerboseLevel()
output_format = _OutputFormat()
filters = ''
counting_style = ''
for (opt, val) in opts:
if opt == '--help':
PrintUsage(None)
elif opt == '--output':
if not val in ('emacs', 'vs7'):
PrintUsage('The only allowed output formats are emacs and vs7.')
output_format = val
elif opt == '--project':
project_name = val
elif opt == '--verbose':
verbosity = int(val)
elif opt == '--filter':
filters = val
if not filters:
PrintCategories()
elif opt == '--counting':
if val not in ('total', 'toplevel', 'detailed'):
PrintUsage('Valid counting options are total, toplevel, and detailed')
counting_style = val
if not filenames:
PrintUsage('No files were specified.')
_SetOutputFormat(output_format)
_SetProjectName(project_name)
_SetVerboseLevel(verbosity)
_SetFilters(filters)
_SetCountingStyle(counting_style)
return filenames
def main():
filenames = ParseArguments(sys.argv[1:])
backup_err = sys.stderr
try:
# Change stderr to write with replacement characters so we don't die
# if we try to print something containing non-ASCII characters.
sys.stderr = codecs.StreamReader(sys.stderr,
'replace')
_cpplint_state.ResetErrorCounts()
for filename in filenames:
ProcessFile(filename, _cpplint_state.verbose_level)
_cpplint_state.PrintErrorCounts()
finally:
sys.stderr = backup_err
sys.exit(_cpplint_state.error_count > 0)
if __name__ == '__main__':
main()
| gpl-3.0 | -4,753,485,601,099,081,000 | 38.70629 | 132 | 0.650187 | false |
hfinucane/ansible | lib/ansible/plugins/connection/ssh.py | 5 | 28512 | # (c) 2012, Michael DeHaan <[email protected]>
# Copyright 2015 Abhijit Menon-Sen <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import pipes
import pty
import select
import subprocess
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.unicode import to_bytes, to_unicode, to_str
from ansible.compat.six import text_type, binary_type
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
SSHPASS_AVAILABLE = None
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
return self
@staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for arg in command:
if 'controlpersist' in arg.lower():
controlpersist = True
elif 'controlpath' in arg.lower():
controlpath = True
return controlpersist, controlpath
def _add_args(self, explanation, args):
"""
Adds the given args to self._command and displays a caller-supplied
explanation of why they were added.
"""
self._command += args
display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(args), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
self._command = []
## First, the command name.
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])]
self._command += [binary]
## Next, additional arguments based on the configuration.
# sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option.
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
self._command += ['-b', '-']
self._command += ['-C']
if self._play_context.verbosity > 3:
self._command += ['-vvv']
elif binary == 'ssh':
# Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q.
self._command += ['-q']
# Next, we add [ssh_connection]ssh_args from ansible.cfg.
if self._play_context.ssh_args:
args = self._split_ssh_args(self._play_context.ssh_args)
self._add_args("ansible.cfg set ssh_args", args)
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
self._add_args(
"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled",
("-o", "StrictHostKeyChecking=no")
)
if self._play_context.port is not None:
self._add_args(
"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set",
("-o", "Port={0}".format(self._play_context.port))
)
key = self._play_context.private_key_file
if key:
self._add_args(
"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set",
("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))
)
if not self._play_context.password:
self._add_args(
"ansible_password/ansible_ssh_pass not set", (
"-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"
)
)
user = self._play_context.remote_user
if user:
self._add_args(
"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set",
("-o", "User={0}".format(to_bytes(self._play_context.remote_user)))
)
self._add_args(
"ANSIBLE_TIMEOUT/timeout set",
("-o", "ConnectTimeout={0}".format(self._play_context.timeout))
)
# Add in any common or binary-specific arguments from the PlayContext
# (i.e. inventory or task settings or overrides on the command line).
for opt in ['ssh_common_args', binary + '_extra_args']:
attr = getattr(self._play_context, opt, None)
if attr is not None:
args = self._split_ssh_args(attr)
self._add_args("PlayContext set %s" % opt, args)
# Check if ControlPersist is enabled and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(self._command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath('$HOME/.ansible/cp')
# The directory must exist and be writable.
makedirs_safe(cpdir, 0o700)
if not os.access(cpdir, os.W_OK):
raise AnsibleError("Cannot write to ControlPath %s" % cpdir)
args = ("-o", "ControlPath={0}".format(
to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))
)
self._add_args("found only ControlPersist; added ControlPath", args)
## Finally, we add any caller-supplied extras.
if other_args:
self._command += other_args
return self._command
def _send_initial_data(self, fh, in_data):
'''
Writes initial data to the stdin filehandle of the subprocess and closes
it. (The handle must be closed; otherwise, for example, "sftp -b -" will
just hang forever waiting for more commands.)
'''
display.debug('Sending initial data')
try:
fh.write(in_data)
fh.close()
except (OSError, IOError):
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
display.debug('Sent initial data (%d bytes)' % len(in_data))
# Used by _run() to kill processes on failures
@staticmethod
def _terminate_process(p):
""" Terminate a process, ignoring errors """
try:
p.terminate()
except (OSError, IOError):
pass
# This is separate from _run() because we need to do the same thing for stdout
# and stderr.
def _examine_output(self, source, state, chunk, sudoable):
'''
Takes a string, extracts complete lines from it, tests to see if they
are a prompt, error message, etc., and sets appropriate flags in self.
Prompt and success lines are removed.
Returns the processed (i.e. possibly-edited) output and the unprocessed
remainder (to be processed with the next chunk) as strings.
'''
output = []
for l in chunk.splitlines(True):
suppress_output = False
#display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
if self._play_context.prompt and self.check_password_prompt(l):
display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_prompt'] = True
suppress_output = True
elif self._play_context.success_key and self.check_become_success(l):
display.debug("become_success: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_success'] = True
suppress_output = True
elif sudoable and self.check_incorrect_password(l):
display.debug("become_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_error'] = True
elif sudoable and self.check_missing_password(l):
display.debug("become_nopasswd_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_nopasswd_error'] = True
if not suppress_output:
output.append(l)
# The chunk we read was most likely a series of complete lines, but just
# in case the last line was incomplete (and not a prompt, which we would
# have removed from the output), we retain it to be processed with the
# next chunk.
remainder = ''
if output and not output[-1].endswith('\n'):
remainder = output[-1]
output = output[:-1]
return ''.join(output), remainder
def _run(self, cmd, in_data, sudoable=True):
'''
Starts the command and communicates with it until it ends.
'''
display_cmd = map(to_unicode, map(pipes.quote, cmd))
display.vvv(u'SSH: EXEC {0}'.format(u' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
# to use a pseudo-tty (ssh will have been invoked with -tt). If we are
# pipelining data, or can't create a pty, we fall back to using plain
# old pipes.
p = None
if isinstance(cmd, (text_type, binary_type)):
cmd = to_bytes(cmd)
else:
cmd = list(map(to_bytes, cmd))
if not in_data:
try:
# Make sure stdin is a proper pty to avoid tcgetattr errors
master, slave = pty.openpty()
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
os.close(slave)
except (OSError, IOError):
p = None
if not p:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
# If we are using SSH password authentication, write the password into
# the pipe we opened in _build_command.
if self._play_context.password:
os.close(self.sshpass_pipe[0])
os.write(self.sshpass_pipe[1], "{0}\n".format(to_bytes(self._play_context.password)))
os.close(self.sshpass_pipe[1])
## SSH state machine
#
# Now we read and accumulate output from the running process until it
# exits. Depending on the circumstances, we may also need to write an
# escalation password and/or pipelined input to the process.
states = [
'awaiting_prompt', 'awaiting_escalation', 'ready_to_send', 'awaiting_exit'
]
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
state = states.index('ready_to_send')
if b'ssh' in cmd:
if self._play_context.prompt:
# We're requesting escalation with a password, so we have to
# wait for a password prompt.
state = states.index('awaiting_prompt')
display.debug('Initial state: %s: %s' % (states[state], self._play_context.prompt))
elif self._play_context.become and self._play_context.success_key:
# We're requesting escalation without a password, so we have to
# detect success/failure before sending any initial data.
state = states.index('awaiting_escalation')
display.debug('Initial state: %s: %s' % (states[state], self._play_context.success_key))
# We store accumulated stdout and stderr output from the process here,
# but strip any privilege escalation prompt/confirmation lines first.
# Output is accumulated into tmp_*, complete lines are extracted into
# an array, then checked and removed or copied to stdout or stderr. We
# set any flags based on examining the output in self._flags.
stdout = stderr = ''
tmp_stdout = tmp_stderr = ''
self._flags = dict(
become_prompt=False, become_success=False,
become_error=False, become_nopasswd_error=False
)
# select timeout should be longer than the connect timeout, otherwise
# they will race each other when we can't connect, and the connect
# timeout usually fails
timeout = 2 + self._play_context.timeout
rpipes = [p.stdout, p.stderr]
for fd in rpipes:
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
# If we can send initial data without waiting for anything, we do so
# before we call select.
if states[state] == 'ready_to_send' and in_data:
self._send_initial_data(stdin, in_data)
state += 1
while True:
rfd, wfd, efd = select.select(rpipes, [], [], timeout)
# We pay attention to timeouts only while negotiating a prompt.
if not rfd:
if state <= states.index('awaiting_escalation'):
# If the process has already exited, then it's not really a
# timeout; we'll let the normal error handling deal with it.
if p.poll() is not None:
break
self._terminate_process(p)
raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, stdout))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
if p.stdout in rfd:
chunk = p.stdout.read()
if chunk == '':
rpipes.remove(p.stdout)
tmp_stdout += chunk
display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
if p.stderr in rfd:
chunk = p.stderr.read()
if chunk == '':
rpipes.remove(p.stderr)
tmp_stderr += chunk
display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
# Afterwards, we can accumulate output without looking at it.
if state < states.index('ready_to_send'):
if tmp_stdout:
output, unprocessed = self._examine_output('stdout', states[state], tmp_stdout, sudoable)
stdout += output
tmp_stdout = unprocessed
if tmp_stderr:
output, unprocessed = self._examine_output('stderr', states[state], tmp_stderr, sudoable)
stderr += output
tmp_stderr = unprocessed
else:
stdout += tmp_stdout
stderr += tmp_stderr
tmp_stdout = tmp_stderr = ''
# If we see a privilege escalation prompt, we send the password.
# (If we're expecting a prompt but the escalation succeeds, we
# didn't need the password and can carry on regardless.)
if states[state] == 'awaiting_prompt':
if self._flags['become_prompt']:
display.debug('Sending become_pass in response to prompt')
stdin.write('{0}\n'.format(to_bytes(self._play_context.become_pass )))
self._flags['become_prompt'] = False
state += 1
elif self._flags['become_success']:
state += 1
# We've requested escalation (with or without a password), now we
# wait for an error message or a successful escalation.
if states[state] == 'awaiting_escalation':
if self._flags['become_success']:
display.debug('Escalation succeeded')
self._flags['become_success'] = False
state += 1
elif self._flags['become_error']:
display.debug('Escalation failed')
self._terminate_process(p)
self._flags['become_error'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
elif self._flags['become_nopasswd_error']:
display.debug('Escalation requires password')
self._terminate_process(p)
self._flags['become_nopasswd_error'] = False
raise AnsibleError('Missing %s password' % self._play_context.become_method)
elif self._flags['become_prompt']:
# This shouldn't happen, because we should see the "Sorry,
# try again" message first.
display.debug('Escalation prompt repeated')
self._terminate_process(p)
self._flags['become_prompt'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
# Once we're sure that the privilege escalation prompt, if any, has
# been dealt with, we can send any initial data and start waiting
# for output.
if states[state] == 'ready_to_send':
if in_data:
self._send_initial_data(stdin, in_data)
state += 1
# Now we're awaiting_exit: has the child process exited? If it has,
# and we've read all available output from it, we're done.
if p.poll() is not None:
if not rpipes or not rfd:
break
# When ssh has ControlMaster (+ControlPath/Persist) enabled, the
# first connection goes into the background and we never see EOF
# on stderr. If we see EOF on stdout and the process has exited,
# we're probably done. We call select again with a zero timeout,
# just to make certain we don't miss anything that may have been
# written to stderr between the time we called select() and when
# we learned that the process had finished.
if p.stdout not in rpipes:
timeout = 0
continue
# If the process has not yet exited, but we've already read EOF from
# its stdout and stderr (and thus removed both from rpipes), we can
# just wait for it to exit.
elif not rpipes:
p.wait()
break
# Otherwise there may still be outstanding data to read.
# close stdin after process is terminated and stdout/stderr are read
# completely (see also issue #848)
stdin.close()
if C.HOST_KEY_CHECKING:
if cmd[0] == b"sshpass" and p.returncode == 6:
raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support this. Please add this host\'s fingerprint to your known_hosts file to manage this host.')
controlpersisterror = 'Bad configuration option: ControlPersist' in stderr or 'unknown configuration option: ControlPersist' in stderr
if p.returncode != 0 and controlpersisterror:
raise AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" (or ssh_args in [ssh_connection] section of the config file) before running again')
if p.returncode == 255 and in_data:
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
return (p.returncode, stdout, stderr)
def _exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the remote host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.vvv(u"ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
# interactive-mode ("unexpected indent" mainly because of empty lines)
if in_data:
cmd = self._build_command('ssh', self.host, cmd)
else:
cmd = self._build_command('ssh', '-tt', self.host, cmd)
(returncode, stdout, stderr) = self._run(cmd, in_data, sudoable=sudoable)
return (returncode, stdout, stderr)
#
# Main public methods
#
def exec_command(self, *args, **kwargs):
"""
Wrapper around _exec_command to retry in the case of an ssh failure
Will retry if:
* an exception is caught
* ssh returns 255
Will not retry if
* remaining_tries is <2
* retries limit reached
"""
remaining_tries = int(C.ANSIBLE_SSH_RETRIES) + 1
cmd_summary = "%s..." % args[0]
for attempt in range(remaining_tries):
try:
return_tuple = self._exec_command(*args, **kwargs)
# 0 = success
# 1-254 = remote command return code
# 255 = failure from the ssh command itself
if return_tuple[0] != 255:
break
else:
raise AnsibleConnectionFailure("Failed to connect to the host via ssh.")
except (AnsibleConnectionFailure, Exception) as e:
if attempt == remaining_tries - 1:
raise
else:
pause = 2 ** attempt - 1
if pause > 30:
pause = 30
if isinstance(e, AnsibleConnectionFailure):
msg = "ssh_retry: attempt: %d, ssh return code is 255. cmd (%s), pausing for %d seconds" % (attempt, cmd_summary, pause)
else:
msg = "ssh_retry: attempt: %d, caught exception(%s) from cmd (%s), pausing for %d seconds" % (attempt, e, cmd_summary, pause)
display.vv(msg)
time.sleep(pause)
continue
return return_tuple
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.host)
if not os.path.exists(to_bytes(in_path, errors='strict')):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_str(in_path)))
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', in_path, u'{0}:{1}'.format(host, pipes.quote(out_path)))
in_data = None
else:
cmd = self._build_command('sftp', to_bytes(host))
in_data = u"put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
in_data = to_bytes(in_data, nonstring='passthru')
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(to_str(out_path), to_str(stdout), to_str(stderr)))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', u'{0}:{1}'.format(host, pipes.quote(in_path)), out_path)
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = u"get {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
in_data = to_bytes(in_data, nonstring='passthru')
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file from {0}:\n{1}\n{2}".format(in_path, stdout, stderr))
def close(self):
# If we have a persistent ssh connection (ControlPersist), we can ask it
# to stop listening. Otherwise, there's nothing to do here.
# TODO: reenable once winrm issues are fixed
# temporarily disabled as we are forced to currently close connections after every task because of winrm
# if self._connected and self._persistent:
# cmd = self._build_command('ssh', '-O', 'stop', self.host)
#
# cmd = map(to_bytes, cmd)
# p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# stdout, stderr = p.communicate()
self._connected = False
| gpl-3.0 | -6,112,751,134,697,203,000 | 40.867841 | 246 | 0.58435 | false |
jmacmahon/invenio | modules/bibauthorid/lib/bibauthorid_scheduler.py | 3 | 5558 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#############################################################################################
#This has been temporarily deprecated, please use schedule_workes from general utils instead#
#############################################################################################
import re
import os
import sys
from itertools import dropwhile, chain
from invenio.bibauthorid_general_utils import print_tortoise_memory_log
from invenio import bibauthorid_config as bconfig
from invenio.bibauthorid_general_utils import is_eq, update_status, update_status_final
#python2.4 compatibility
from invenio.bibauthorid_general_utils import bai_all as all
def to_number(stry):
return int(re.sub("\D", "", stry))
def dict_by_file(fpath):
fp = open(fpath)
content = fp.read()
fp.close()
return dict(x.split(':') for x in content.split("\n")[:-1])
def get_free_memory():
mem = dict_by_file("/proc/meminfo")
return sum(map(to_number, (mem['MemFree'], mem['Buffers'], mem['Cached'])))
def get_total_memory():
mem = dict_by_file("/proc/meminfo")
return to_number(mem['MemTotal'])
def get_peak_mem():
pid = os.getpid()
mem = dict_by_file("/proc/%d/status" % pid)
return map(to_number, (mem["VmPeak"], mem["VmHWM"]))
#matrix_coefs = [1133088., 0., 1.5]
#wedge_coefs = [800000., 0., 2.]
matrix_coefs = [1000., 500., 0.01]
wedge_coefs = [1000., 500., 0.02]
def get_biggest_job_below(lim, arr):
return dropwhile(lambda x: x[1] < lim, enumerate(chain(arr, [lim]))).next()[0] - 1
def get_cores_count():
import multiprocessing
return multiprocessing.cpu_count()
def schedule(jobs, sizs, estimator, memfile_path=None):
if bconfig.DEBUG_PROCESS_PEAK_MEMORY and memfile_path:
def register_memory_usage():
pid = os.getpid()
peak = get_peak_mem()
fp = open(memfile_path, 'a')
print_tortoise_memory_log(
{'pid' : pid,
'peak1': peak[0],
'peak2': peak[1],
'est' : sizs[idx],
'bibs' : bibs[idx]
},
fp
)
fp.close()
else:
def register_memory_usage():
pass
def run_job(idx):
try:
sys.stdout = output_killer
jobs[idx]()
register_memory_usage()
os._exit(os.EX_OK)
except Exception, e:
f = open('/tmp/exception-%s' % str(os.getpid()), "w")
f.write(str(e) + '\n')
f.close()
os._exit(os.EX_SOFTWARE)
max_workers = get_cores_count()
pid_2_idx = {}
#free = get_free_memory()
initial = get_total_memory()
free = initial
output_killer = open(os.devnull, 'w')
ret_status = [None] * len(jobs)
bibs = sizs
sizs = map(estimator, sizs)
free_idxs = range(len(jobs))
assert len(jobs) == len(sizs) == len(ret_status) == len(bibs) == len(free_idxs)
done = 0.
total = sum(sizs)
biggest = max(sizs)
update_status(0., "0 / %d" % len(jobs))
too_big = [idx for idx in free_idxs if sizs[idx] > free]
for idx in too_big:
pid = os.fork()
if pid == 0: # child
run_job(idx)
else: # parent
done += sizs[idx]
del free_idxs[idx]
cpid, status = os.wait()
update_status(done / total, "%d / %d" % (len(jobs) - len(free_idxs), len(jobs)))
ret_status[idx] = status
assert cpid == pid
while free_idxs or pid_2_idx:
while len(pid_2_idx) < max_workers:
idx = get_biggest_job_below(free, (sizs[idx] for idx in free_idxs))
if idx != -1:
job_idx = free_idxs[idx]
pid = os.fork()
if pid == 0: # child
os.nice(int((float(sizs[idx]) * 20.0 / biggest)))
run_job(job_idx)
else: # parent
pid_2_idx[pid] = job_idx
assert free > sizs[job_idx]
free -= sizs[job_idx]
del free_idxs[idx]
else:
break
pid, status = os.wait()
assert pid in pid_2_idx
idx = pid_2_idx[pid]
freed = sizs[idx]
done += freed
ret_status[idx] = status
free += freed
del pid_2_idx[pid]
update_status(done / total, "%d / %d" % (len(jobs) - len(free_idxs) - len(pid_2_idx), len(jobs)))
update_status_final("%d / %d" % (len(jobs), len(jobs)))
assert is_eq(free, initial)
assert not pid_2_idx
assert not free_idxs
assert len(jobs) == len(sizs) == len(ret_status) == len(bibs)
assert all(stat != None for stat in ret_status)
return ret_status
| gpl-2.0 | 5,470,678,222,827,662,000 | 31.502924 | 105 | 0.550558 | false |
dilgerma/compose | compose/service.py | 4 | 29748 | from __future__ import unicode_literals
from __future__ import absolute_import
from collections import namedtuple
import logging
import re
import os
import sys
from operator import attrgetter
import six
from docker.errors import APIError
from docker.utils import create_host_config, LogConfig
from docker.utils.ports import build_port_bindings, split_port
from . import __version__
from .config import DOCKER_CONFIG_KEYS, merge_environment
from .const import (
DEFAULT_TIMEOUT,
LABEL_CONTAINER_NUMBER,
LABEL_ONE_OFF,
LABEL_PROJECT,
LABEL_SERVICE,
LABEL_VERSION,
LABEL_CONFIG_HASH,
)
from .container import Container
from .legacy import check_for_legacy_containers
from .progress_stream import stream_output, StreamOutputError
from .utils import json_hash, parallel_execute
from .config.validation import VALID_NAME_CHARS
log = logging.getLogger(__name__)
DOCKER_START_KEYS = [
'cap_add',
'cap_drop',
'devices',
'dns',
'dns_search',
'env_file',
'extra_hosts',
'read_only',
'net',
'log_driver',
'log_opt',
'mem_limit',
'memswap_limit',
'pid',
'privileged',
'restart',
'volumes_from',
'security_opt',
]
class BuildError(Exception):
def __init__(self, service, reason):
self.service = service
self.reason = reason
class ConfigError(ValueError):
pass
class NeedsBuildError(Exception):
def __init__(self, service):
self.service = service
class NoSuchImageError(Exception):
pass
VolumeSpec = namedtuple('VolumeSpec', 'external internal mode')
ServiceName = namedtuple('ServiceName', 'project service number')
ConvergencePlan = namedtuple('ConvergencePlan', 'action containers')
class Service(object):
def __init__(self, name, client=None, project='default', links=None, external_links=None, volumes_from=None, net=None, **options):
if not re.match('^%s+$' % VALID_NAME_CHARS, project):
raise ConfigError('Invalid project name "%s" - only %s are allowed' % (project, VALID_NAME_CHARS))
self.name = name
self.client = client
self.project = project
self.links = links or []
self.external_links = external_links or []
self.volumes_from = volumes_from or []
self.net = net or None
self.options = options
def containers(self, stopped=False, one_off=False):
containers = filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off)})])
if not containers:
check_for_legacy_containers(
self.client,
self.project,
[self.name],
)
return containers
def get_container(self, number=1):
"""Return a :class:`compose.container.Container` for this service. The
container must be active, and match `number`.
"""
labels = self.labels() + ['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]
for container in self.client.containers(filters={'label': labels}):
return Container.from_ps(self.client, container)
raise ValueError("No container found for %s_%s" % (self.name, number))
def start(self, **options):
for c in self.containers(stopped=True):
self.start_container_if_stopped(c, **options)
# TODO: remove these functions, project takes care of starting/stopping,
def stop(self, **options):
for c in self.containers():
log.info("Stopping %s..." % c.name)
c.stop(**options)
def kill(self, **options):
for c in self.containers():
log.info("Killing %s..." % c.name)
c.kill(**options)
def restart(self, **options):
for c in self.containers():
log.info("Restarting %s..." % c.name)
c.restart(**options)
# end TODO
def scale(self, desired_num, timeout=DEFAULT_TIMEOUT):
"""
Adjusts the number of containers to the specified number and ensures
they are running.
- creates containers until there are at least `desired_num`
- stops containers until there are at most `desired_num` running
- starts containers until there are at least `desired_num` running
- removes all stopped containers
"""
if self.custom_container_name() and desired_num > 1:
log.warn('The "%s" service is using the custom container name "%s". '
'Docker requires each container to have a unique name. '
'Remove the custom name to scale the service.'
% (self.name, self.custom_container_name()))
if self.specifies_host_port():
log.warn('The "%s" service specifies a port on the host. If multiple containers '
'for this service are created on a single host, the port will clash.'
% self.name)
def create_and_start(service, number):
container = service.create_container(number=number, quiet=True)
container.start()
return container
running_containers = self.containers(stopped=False)
num_running = len(running_containers)
if desired_num == num_running:
# do nothing as we already have the desired number
log.info('Desired container number already achieved')
return
if desired_num > num_running:
# we need to start/create until we have desired_num
all_containers = self.containers(stopped=True)
if num_running != len(all_containers):
# we have some stopped containers, let's start them up again
stopped_containers = sorted([c for c in all_containers if not c.is_running], key=attrgetter('number'))
num_stopped = len(stopped_containers)
if num_stopped + num_running > desired_num:
num_to_start = desired_num - num_running
containers_to_start = stopped_containers[:num_to_start]
else:
containers_to_start = stopped_containers
parallel_execute(
objects=containers_to_start,
obj_callable=lambda c: c.start(),
msg_index=lambda c: c.name,
msg="Starting"
)
num_running += len(containers_to_start)
num_to_create = desired_num - num_running
next_number = self._next_container_number()
container_numbers = [
number for number in range(
next_number, next_number + num_to_create
)
]
parallel_execute(
objects=container_numbers,
obj_callable=lambda n: create_and_start(service=self, number=n),
msg_index=lambda n: n,
msg="Creating and starting"
)
if desired_num < num_running:
num_to_stop = num_running - desired_num
sorted_running_containers = sorted(running_containers, key=attrgetter('number'))
containers_to_stop = sorted_running_containers[-num_to_stop:]
parallel_execute(
objects=containers_to_stop,
obj_callable=lambda c: c.stop(timeout=timeout),
msg_index=lambda c: c.name,
msg="Stopping"
)
self.remove_stopped()
def remove_stopped(self, **options):
containers = [c for c in self.containers(stopped=True) if not c.is_running]
parallel_execute(
objects=containers,
obj_callable=lambda c: c.remove(**options),
msg_index=lambda c: c.name,
msg="Removing"
)
def create_container(self,
one_off=False,
do_build=True,
previous_container=None,
number=None,
quiet=False,
**override_options):
"""
Create a container for this service. If the image doesn't exist, attempt to pull
it.
"""
self.ensure_image_exists(
do_build=do_build,
)
container_options = self._get_container_create_options(
override_options,
number or self._next_container_number(one_off=one_off),
one_off=one_off,
previous_container=previous_container,
)
if 'name' in container_options and not quiet:
log.info("Creating %s..." % container_options['name'])
return Container.create(self.client, **container_options)
def ensure_image_exists(self,
do_build=True):
try:
self.image()
return
except NoSuchImageError:
pass
if self.can_be_built():
if do_build:
self.build()
else:
raise NeedsBuildError(self)
else:
self.pull()
def image(self):
try:
return self.client.inspect_image(self.image_name)
except APIError as e:
if e.response.status_code == 404 and e.explanation and 'No such image' in str(e.explanation):
raise NoSuchImageError("Image '{}' not found".format(self.image_name))
else:
raise
@property
def image_name(self):
if self.can_be_built():
return self.full_name
else:
return self.options['image']
def convergence_plan(self,
allow_recreate=True,
force_recreate=False):
if force_recreate and not allow_recreate:
raise ValueError("force_recreate and allow_recreate are in conflict")
containers = self.containers(stopped=True)
if not containers:
return ConvergencePlan('create', [])
if not allow_recreate:
return ConvergencePlan('start', containers)
if force_recreate or self._containers_have_diverged(containers):
return ConvergencePlan('recreate', containers)
stopped = [c for c in containers if not c.is_running]
if stopped:
return ConvergencePlan('start', stopped)
return ConvergencePlan('noop', containers)
def _containers_have_diverged(self, containers):
config_hash = None
try:
config_hash = self.config_hash()
except NoSuchImageError as e:
log.debug(
'Service %s has diverged: %s',
self.name, six.text_type(e),
)
return True
has_diverged = False
for c in containers:
container_config_hash = c.labels.get(LABEL_CONFIG_HASH, None)
if container_config_hash != config_hash:
log.debug(
'%s has diverged: %s != %s',
c.name, container_config_hash, config_hash,
)
has_diverged = True
return has_diverged
def execute_convergence_plan(self,
plan,
do_build=True,
timeout=DEFAULT_TIMEOUT):
(action, containers) = plan
if action == 'create':
container = self.create_container(
do_build=do_build,
)
self.start_container(container)
return [container]
elif action == 'recreate':
return [
self.recreate_container(
c,
timeout=timeout
)
for c in containers
]
elif action == 'start':
for c in containers:
self.start_container_if_stopped(c)
return containers
elif action == 'noop':
for c in containers:
log.info("%s is up-to-date" % c.name)
return containers
else:
raise Exception("Invalid action: {}".format(action))
def recreate_container(self,
container,
timeout=DEFAULT_TIMEOUT):
"""Recreate a container.
The original container is renamed to a temporary name so that data
volumes can be copied to the new container, before the original
container is removed.
"""
log.info("Recreating %s..." % container.name)
try:
container.stop(timeout=timeout)
except APIError as e:
if (e.response.status_code == 500
and e.explanation
and 'no such process' in str(e.explanation)):
pass
else:
raise
# Use a hopefully unique container name by prepending the short id
self.client.rename(
container.id,
'%s_%s' % (container.short_id, container.name))
new_container = self.create_container(
do_build=False,
previous_container=container,
number=container.labels.get(LABEL_CONTAINER_NUMBER),
quiet=True,
)
self.start_container(new_container)
container.remove()
return new_container
def start_container_if_stopped(self, container):
if container.is_running:
return container
else:
log.info("Starting %s..." % container.name)
return self.start_container(container)
def start_container(self, container):
container.start()
return container
def remove_duplicate_containers(self, timeout=DEFAULT_TIMEOUT):
for c in self.duplicate_containers():
log.info('Removing %s...' % c.name)
c.stop(timeout=timeout)
c.remove()
def duplicate_containers(self):
containers = sorted(
self.containers(stopped=True),
key=lambda c: c.get('Created'),
)
numbers = set()
for c in containers:
if c.number in numbers:
yield c
else:
numbers.add(c.number)
def config_hash(self):
return json_hash(self.config_dict())
def config_dict(self):
return {
'options': self.options,
'image_id': self.image()['Id'],
}
def get_dependency_names(self):
net_name = self.get_net_name()
return (self.get_linked_names() +
self.get_volumes_from_names() +
([net_name] if net_name else []))
def get_linked_names(self):
return [s.name for (s, _) in self.links]
def get_volumes_from_names(self):
return [s.name for s in self.volumes_from if isinstance(s, Service)]
def get_net_name(self):
if isinstance(self.net, Service):
return self.net.name
else:
return
def get_container_name(self, number, one_off=False):
# TODO: Implement issue #652 here
return build_container_name(self.project, self.name, number, one_off)
# TODO: this would benefit from github.com/docker/docker/pull/11943
# to remove the need to inspect every container
def _next_container_number(self, one_off=False):
containers = filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=True,
filters={'label': self.labels(one_off=one_off)})
])
numbers = [c.number for c in containers]
return 1 if not numbers else max(numbers) + 1
def _get_links(self, link_to_self):
links = []
for service, link_name in self.links:
for container in service.containers():
links.append((container.name, link_name or service.name))
links.append((container.name, container.name))
links.append((container.name, container.name_without_project))
if link_to_self:
for container in self.containers():
links.append((container.name, self.name))
links.append((container.name, container.name))
links.append((container.name, container.name_without_project))
for external_link in self.external_links:
if ':' not in external_link:
link_name = external_link
else:
external_link, link_name = external_link.split(':')
links.append((external_link, link_name))
return links
def _get_volumes_from(self):
volumes_from = []
for volume_source in self.volumes_from:
if isinstance(volume_source, Service):
containers = volume_source.containers(stopped=True)
if not containers:
volumes_from.append(volume_source.create_container().id)
else:
volumes_from.extend(map(attrgetter('id'), containers))
elif isinstance(volume_source, Container):
volumes_from.append(volume_source.id)
return volumes_from
def _get_net(self):
if not self.net:
return None
if isinstance(self.net, Service):
containers = self.net.containers()
if len(containers) > 0:
net = 'container:' + containers[0].id
else:
log.warning("Warning: Service %s is trying to use reuse the network stack "
"of another service that is not running." % (self.net.name))
net = None
elif isinstance(self.net, Container):
net = 'container:' + self.net.id
else:
net = self.net
return net
def _get_container_create_options(
self,
override_options,
number,
one_off=False,
previous_container=None):
add_config_hash = (not one_off and not override_options)
container_options = dict(
(k, self.options[k])
for k in DOCKER_CONFIG_KEYS if k in self.options)
container_options.update(override_options)
if self.custom_container_name() and not one_off:
container_options['name'] = self.custom_container_name()
else:
container_options['name'] = self.get_container_name(number, one_off)
if add_config_hash:
config_hash = self.config_hash()
if 'labels' not in container_options:
container_options['labels'] = {}
container_options['labels'][LABEL_CONFIG_HASH] = config_hash
log.debug("Added config hash: %s" % config_hash)
if 'detach' not in container_options:
container_options['detach'] = True
# If a qualified hostname was given, split it into an
# unqualified hostname and a domainname unless domainname
# was also given explicitly. This matches the behavior of
# the official Docker CLI in that scenario.
if ('hostname' in container_options
and 'domainname' not in container_options
and '.' in container_options['hostname']):
parts = container_options['hostname'].partition('.')
container_options['hostname'] = parts[0]
container_options['domainname'] = parts[2]
if 'ports' in container_options or 'expose' in self.options:
ports = []
all_ports = container_options.get('ports', []) + self.options.get('expose', [])
for port_range in all_ports:
internal_range, _ = split_port(port_range)
for port in internal_range:
port = str(port)
if '/' in port:
port = tuple(port.split('/'))
ports.append(port)
container_options['ports'] = ports
override_options['binds'] = merge_volume_bindings(
container_options.get('volumes') or [],
previous_container)
if 'volumes' in container_options:
container_options['volumes'] = dict(
(parse_volume_spec(v).internal, {})
for v in container_options['volumes'])
container_options['environment'] = merge_environment(
self.options.get('environment'),
override_options.get('environment'))
if previous_container:
container_options['environment']['affinity:container'] = ('=' + previous_container.id)
container_options['image'] = self.image_name
container_options['labels'] = build_container_labels(
container_options.get('labels', {}),
self.labels(one_off=one_off),
number)
# Delete options which are only used when starting
for key in DOCKER_START_KEYS:
container_options.pop(key, None)
container_options['host_config'] = self._get_container_host_config(
override_options,
one_off=one_off)
return container_options
def _get_container_host_config(self, override_options, one_off=False):
options = dict(self.options, **override_options)
port_bindings = build_port_bindings(options.get('ports') or [])
privileged = options.get('privileged', False)
cap_add = options.get('cap_add', None)
cap_drop = options.get('cap_drop', None)
log_config = LogConfig(
type=options.get('log_driver', 'json-file'),
config=options.get('log_opt', None)
)
pid = options.get('pid', None)
security_opt = options.get('security_opt', None)
dns = options.get('dns', None)
if isinstance(dns, six.string_types):
dns = [dns]
dns_search = options.get('dns_search', None)
if isinstance(dns_search, six.string_types):
dns_search = [dns_search]
restart = parse_restart_spec(options.get('restart', None))
extra_hosts = build_extra_hosts(options.get('extra_hosts', None))
read_only = options.get('read_only', None)
devices = options.get('devices', None)
return create_host_config(
links=self._get_links(link_to_self=one_off),
port_bindings=port_bindings,
binds=options.get('binds'),
volumes_from=self._get_volumes_from(),
privileged=privileged,
network_mode=self._get_net(),
devices=devices,
dns=dns,
dns_search=dns_search,
restart_policy=restart,
cap_add=cap_add,
cap_drop=cap_drop,
mem_limit=options.get('mem_limit'),
memswap_limit=options.get('memswap_limit'),
log_config=log_config,
extra_hosts=extra_hosts,
read_only=read_only,
pid_mode=pid,
security_opt=security_opt
)
def build(self, no_cache=False):
log.info('Building %s...' % self.name)
path = six.binary_type(self.options['build'])
build_output = self.client.build(
path=path,
tag=self.image_name,
stream=True,
rm=True,
pull=False,
nocache=no_cache,
dockerfile=self.options.get('dockerfile', None),
)
try:
all_events = stream_output(build_output, sys.stdout)
except StreamOutputError as e:
raise BuildError(self, unicode(e))
# Ensure the HTTP connection is not reused for another
# streaming command, as the Docker daemon can sometimes
# complain about it
self.client.close()
image_id = None
for event in all_events:
if 'stream' in event:
match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
if match:
image_id = match.group(1)
if image_id is None:
raise BuildError(self, event if all_events else 'Unknown')
return image_id
def can_be_built(self):
return 'build' in self.options
@property
def full_name(self):
"""
The tag to give to images built for this service.
"""
return '%s_%s' % (self.project, self.name)
def labels(self, one_off=False):
return [
'{0}={1}'.format(LABEL_PROJECT, self.project),
'{0}={1}'.format(LABEL_SERVICE, self.name),
'{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False")
]
def custom_container_name(self):
return self.options.get('container_name')
def specifies_host_port(self):
for port in self.options.get('ports', []):
if ':' in str(port):
return True
return False
def pull(self):
if 'image' not in self.options:
return
repo, tag = parse_repository_tag(self.options['image'])
tag = tag or 'latest'
log.info('Pulling %s (%s:%s)...' % (self.name, repo, tag))
output = self.client.pull(
repo,
tag=tag,
stream=True,
)
stream_output(output, sys.stdout)
# Names
def build_container_name(project, service, number, one_off=False):
bits = [project, service]
if one_off:
bits.append('run')
return '_'.join(bits + [str(number)])
# Images
def parse_repository_tag(s):
if ":" not in s:
return s, ""
repo, tag = s.rsplit(":", 1)
if "/" in tag:
return s, ""
return repo, tag
# Volumes
def merge_volume_bindings(volumes_option, previous_container):
"""Return a list of volume bindings for a container. Container data volumes
are replaced by those from the previous container.
"""
volume_bindings = dict(
build_volume_binding(parse_volume_spec(volume))
for volume in volumes_option or []
if ':' in volume)
if previous_container:
volume_bindings.update(
get_container_data_volumes(previous_container, volumes_option))
return volume_bindings.values()
def get_container_data_volumes(container, volumes_option):
"""Find the container data volumes that are in `volumes_option`, and return
a mapping of volume bindings for those volumes.
"""
volumes = []
volumes_option = volumes_option or []
container_volumes = container.get('Volumes') or {}
image_volumes = container.image_config['ContainerConfig'].get('Volumes') or {}
for volume in set(volumes_option + image_volumes.keys()):
volume = parse_volume_spec(volume)
# No need to preserve host volumes
if volume.external:
continue
volume_path = container_volumes.get(volume.internal)
# New volume, doesn't exist in the old container
if not volume_path:
continue
# Copy existing volume from old container
volume = volume._replace(external=volume_path)
volumes.append(build_volume_binding(volume))
return dict(volumes)
def build_volume_binding(volume_spec):
return volume_spec.internal, "{}:{}:{}".format(*volume_spec)
def parse_volume_spec(volume_config):
parts = volume_config.split(':')
if len(parts) > 3:
raise ConfigError("Volume %s has incorrect format, should be "
"external:internal[:mode]" % volume_config)
if len(parts) == 1:
external = None
internal = os.path.normpath(parts[0])
else:
external = os.path.normpath(parts[0])
internal = os.path.normpath(parts[1])
mode = parts[2] if len(parts) == 3 else 'rw'
return VolumeSpec(external, internal, mode)
# Labels
def build_container_labels(label_options, service_labels, number, one_off=False):
labels = label_options or {}
labels.update(label.split('=', 1) for label in service_labels)
labels[LABEL_CONTAINER_NUMBER] = str(number)
labels[LABEL_VERSION] = __version__
return labels
# Restart policy
def parse_restart_spec(restart_config):
if not restart_config:
return None
parts = restart_config.split(':')
if len(parts) > 2:
raise ConfigError("Restart %s has incorrect format, should be "
"mode[:max_retry]" % restart_config)
if len(parts) == 2:
name, max_retry_count = parts
else:
name, = parts
max_retry_count = 0
return {'Name': name, 'MaximumRetryCount': int(max_retry_count)}
# Extra hosts
def build_extra_hosts(extra_hosts_config):
if not extra_hosts_config:
return {}
if isinstance(extra_hosts_config, list):
extra_hosts_dict = {}
for extra_hosts_line in extra_hosts_config:
if not isinstance(extra_hosts_line, six.string_types):
raise ConfigError(
"extra_hosts_config \"%s\" must be either a list of strings or a string->string mapping," %
extra_hosts_config
)
host, ip = extra_hosts_line.split(':')
extra_hosts_dict.update({host.strip(): ip.strip()})
extra_hosts_config = extra_hosts_dict
if isinstance(extra_hosts_config, dict):
return extra_hosts_config
raise ConfigError(
"extra_hosts_config \"%s\" must be either a list of strings or a string->string mapping," %
extra_hosts_config
)
| apache-2.0 | -3,282,623,253,966,092,000 | 31.475983 | 134 | 0.568105 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.