code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rds
version_added: "1.3"
short_description: create, delete, or modify an Amazon rds instance
description:
- Creates, deletes, or modifies rds instances. When creating an instance it can be either a new instance or a read-only replica of an existing instance. This module has a dependency on python-boto >= 2.5. The 'promote' command requires boto >= 2.18.0. Certain features such as tags rely on boto.rds2 (boto >= 2.26.0)
options:
command:
description:
- Specifies the action to take.
required: true
choices: [ 'create', 'replicate', 'delete', 'facts', 'modify' , 'promote', 'snapshot', 'reboot', 'restore' ]
instance_name:
description:
- Database instance identifier. Required except when using command=facts or command=delete on just a snapshot
required: false
default: null
source_instance:
description:
- Name of the database to replicate. Used only when command=replicate.
required: false
default: null
db_engine:
description:
- The type of database. Used only when command=create.
required: false
default: null
choices: [ 'MySQL', 'oracle-se1', 'oracle-se', 'oracle-ee', 'sqlserver-ee', 'sqlserver-se', 'sqlserver-ex', 'sqlserver-web', 'postgres']
size:
description:
- Size in gigabytes of the initial storage for the DB instance. Used only when command=create or command=modify.
required: false
default: null
instance_type:
description:
- The instance type of the database. Must be specified when command=create. Optional when command=replicate, command=modify or command=restore. If not specified then the replica inherits the same instance type as the source instance.
required: false
default: null
username:
description:
- Master database username. Used only when command=create.
required: false
default: null
password:
description:
- Password for the master database username. Used only when command=create or command=modify.
required: false
default: null
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
db_name:
description:
- Name of a database to create within the instance. If not specified then no database is created. Used only when command=create.
required: false
default: null
engine_version:
description:
- Version number of the database engine to use. Used only when command=create. If not specified then the current Amazon RDS default engine version is used.
required: false
default: null
parameter_group:
description:
- Name of the DB parameter group to associate with this instance. If omitted then the RDS default DBParameterGroup will be used. Used only when command=create or command=modify.
required: false
default: null
license_model:
description:
- The license model for this DB instance. Used only when command=create or command=restore.
required: false
default: null
choices: [ 'license-included', 'bring-your-own-license', 'general-public-license', 'postgresql-license' ]
multi_zone:
description:
- Specifies if this is a Multi-availability-zone deployment. Can not be used in conjunction with zone parameter. Used only when command=create or command=modify.
choices: [ "yes", "no" ]
required: false
default: null
iops:
description:
- Specifies the number of IOPS for the instance. Used only when command=create or command=modify. Must be an integer greater than 1000.
required: false
default: null
security_groups:
description:
- Comma separated list of one or more security groups. Used only when command=create or command=modify.
required: false
default: null
vpc_security_groups:
description:
- Comma separated list of one or more vpc security group ids. Also requires `subnet` to be specified. Used only when command=create or command=modify.
required: false
default: null
port:
description:
- Port number that the DB instance uses for connections. Defaults to 3306 for mysql. Must be changed to 1521 for Oracle, 1433 for SQL Server, 5432 for PostgreSQL. Used only when command=create or command=replicate.
required: false
default: null
upgrade:
description:
- Indicates that minor version upgrades should be applied automatically. Used only when command=create or command=replicate.
required: false
default: no
choices: [ "yes", "no" ]
option_group:
description:
- The name of the option group to use. If not specified then the default option group is used. Used only when command=create.
required: false
default: null
maint_window:
description:
- "Maintenance window in format of ddd:hh24:mi-ddd:hh24:mi. (Example: Mon:22:00-Mon:23:15) If not specified then a random maintenance window is assigned. Used only when command=create or command=modify."
required: false
default: null
backup_window:
description:
- Backup window in format of hh24:mi-hh24:mi. If not specified then a random backup window is assigned. Used only when command=create or command=modify.
required: false
default: null
backup_retention:
description:
- "Number of days backups are retained. Set to 0 to disable backups. Default is 1 day. Valid range: 0-35. Used only when command=create or command=modify."
required: false
default: null
zone:
description:
- availability zone in which to launch the instance. Used only when command=create, command=replicate or command=restore.
required: false
default: null
aliases: ['aws_zone', 'ec2_zone']
subnet:
description:
- VPC subnet group. If specified then a VPC instance is created. Used only when command=create.
required: false
default: null
snapshot:
description:
- Name of snapshot to take. When command=delete, if no snapshot name is provided then no snapshot is taken. If used with command=delete with no instance_name, the snapshot is deleted. Used with command=facts, command=delete or command=snapshot.
required: false
default: null
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
wait:
description:
- When command=create, replicate, modify or restore then wait for the database to enter the 'available' state. When command=delete wait for the database to be terminated.
required: false
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
apply_immediately:
description:
- Used only when command=modify. If enabled, the modifications will be applied as soon as possible rather than waiting for the next preferred maintenance window.
default: no
choices: [ "yes", "no" ]
force_failover:
description:
- Used only when command=reboot. If enabled, the reboot is done using a MultiAZ failover.
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "2.0"
new_instance_name:
description:
- Name to rename an instance to. Used only when command=modify.
required: false
default: null
version_added: "1.5"
character_set_name:
description:
- Associate the DB instance with a specified character set. Used with command=create.
required: false
default: null
version_added: "1.9"
publicly_accessible:
description:
- explicitly set whether the resource should be publicly accessible or not. Used with command=create, command=replicate. Requires boto >= 2.26.0
required: false
default: null
version_added: "1.9"
tags:
description:
- tags dict to apply to a resource. Used with command=create, command=replicate, command=restore. Requires boto >= 2.26.0
required: false
default: null
version_added: "1.9"
requirements:
- "python >= 2.6"
- "boto"
author:
- "Bruce Pennypacker (@bpennypacker)"
- "Will Thames (@willthames)"
'''
# FIXME: the command stuff needs a 'state' like alias to make things consistent -- MPD
EXAMPLES = '''
# Basic mysql provisioning example
- rds:
command: create
instance_name: new-database
db_engine: MySQL
size: 10
instance_type: db.m1.small
username: mysql_admin
password: 1nsecure
tags:
Environment: testing
Application: cms
# Create a read-only replica and wait for it to become available
- rds:
command: replicate
instance_name: new-database-replica
source_instance: new_database
wait: yes
wait_timeout: 600
# Delete an instance, but create a snapshot before doing so
- rds:
command: delete
instance_name: new-database
snapshot: new_database_snapshot
# Get facts about an instance
- rds:
command: facts
instance_name: new-database
register: new_database_facts
# Rename an instance and wait for the change to take effect
- rds:
command: modify
instance_name: new-database
new_instance_name: renamed-database
wait: yes
# Reboot an instance and wait for it to become available again
- rds
command: reboot
instance_name: database
wait: yes
'''
import sys
import time
try:
import boto.rds
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
import boto.rds2
has_rds2 = True
except ImportError:
has_rds2 = False
class RDSException(Exception):
def __init__(self, exc):
if hasattr(exc, 'error_message') and exc.error_message:
self.message = exc.error_message
self.code = exc.error_code
elif hasattr(exc, 'body') and 'Error' in exc.body:
self.message = exc.body['Error']['Message']
self.code = exc.body['Error']['Code']
else:
self.message = str(exc)
self.code = 'Unknown Error'
class RDSConnection:
def __init__(self, module, region, **aws_connect_params):
try:
self.connection = connect_to_aws(boto.rds, region, **aws_connect_params)
except boto.exception.BotoServerError, e:
module.fail_json(msg=e.error_message)
def get_db_instance(self, instancename):
try:
return RDSDBInstance(self.connection.get_all_dbinstances(instancename)[0])
except boto.exception.BotoServerError, e:
return None
def get_db_snapshot(self, snapshotid):
try:
return RDSSnapshot(self.connection.get_all_dbsnapshots(snapshot_id=snapshotid)[0])
except boto.exception.BotoServerError, e:
return None
def create_db_instance(self, instance_name, size, instance_class, db_engine,
username, password, **params):
params['engine'] = db_engine
try:
result = self.connection.create_dbinstance(instance_name, size, instance_class,
username, password, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def create_db_instance_read_replica(self, instance_name, source_instance, **params):
try:
result = self.connection.createdb_instance_read_replica(instance_name, source_instance, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def delete_db_instance(self, instance_name, **params):
try:
result = self.connection.delete_dbinstance(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def delete_db_snapshot(self, snapshot):
try:
result = self.connection.delete_dbsnapshot(snapshot)
return RDSSnapshot(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def modify_db_instance(self, instance_name, **params):
try:
result = self.connection.modify_dbinstance(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def reboot_db_instance(self, instance_name, **params):
try:
result = self.connection.reboot_dbinstance(instance_name)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def restore_db_instance_from_db_snapshot(self, instance_name, snapshot, instance_type, **params):
try:
result = self.connection.restore_dbinstance_from_dbsnapshot(snapshot, instance_name, instance_type, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def create_db_snapshot(self, snapshot, instance_name, **params):
try:
result = self.connection.create_dbsnapshot(snapshot, instance_name)
return RDSSnapshot(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def promote_read_replica(self, instance_name, **params):
try:
result = self.connection.promote_read_replica(instance_name, **params)
return RDSDBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
class RDS2Connection:
def __init__(self, module, region, **aws_connect_params):
try:
self.connection = connect_to_aws(boto.rds2, region, **aws_connect_params)
except boto.exception.BotoServerError, e:
module.fail_json(msg=e.error_message)
def get_db_instance(self, instancename):
try:
dbinstances = self.connection.describe_db_instances(db_instance_identifier=instancename)['DescribeDBInstancesResponse']['DescribeDBInstancesResult']['DBInstances']
result = RDS2DBInstance(dbinstances[0])
return result
except boto.rds2.exceptions.DBInstanceNotFound, e:
return None
except Exception, e:
raise e
def get_db_snapshot(self, snapshotid):
try:
snapshots = self.connection.describe_db_snapshots(db_snapshot_identifier=snapshotid, snapshot_type='manual')['DescribeDBSnapshotsResponse']['DescribeDBSnapshotsResult']['DBSnapshots']
result = RDS2Snapshot(snapshots[0])
return result
except boto.rds2.exceptions.DBSnapshotNotFound, e:
return None
def create_db_instance(self, instance_name, size, instance_class, db_engine,
username, password, **params):
try:
result = self.connection.create_db_instance(instance_name, size, instance_class,
db_engine, username, password, **params)['CreateDBInstanceResponse']['CreateDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def create_db_instance_read_replica(self, instance_name, source_instance, **params):
try:
result = self.connection.create_db_instance_read_replica(instance_name, source_instance, **params)['CreateDBInstanceReadReplicaResponse']['CreateDBInstanceReadReplicaResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def delete_db_instance(self, instance_name, **params):
try:
result = self.connection.delete_db_instance(instance_name, **params)['DeleteDBInstanceResponse']['DeleteDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def delete_db_snapshot(self, snapshot):
try:
result = self.connection.delete_db_snapshot(snapshot)['DeleteDBSnapshotResponse']['DeleteDBSnapshotResult']['DBSnapshot']
return RDS2Snapshot(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def modify_db_instance(self, instance_name, **params):
try:
result = self.connection.modify_db_instance(instance_name, **params)['ModifyDBInstanceResponse']['ModifyDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def reboot_db_instance(self, instance_name, **params):
try:
result = self.connection.reboot_db_instance(instance_name, **params)['RebootDBInstanceResponse']['RebootDBInstanceResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def restore_db_instance_from_db_snapshot(self, instance_name, snapshot, instance_type, **params):
try:
result = self.connection.restore_db_instance_from_db_snapshot(instance_name, snapshot, **params)['RestoreDBInstanceFromDBSnapshotResponse']['RestoreDBInstanceFromDBSnapshotResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def create_db_snapshot(self, snapshot, instance_name, **params):
try:
result = self.connection.create_db_snapshot(snapshot, instance_name, **params)['CreateDBSnapshotResponse']['CreateDBSnapshotResult']['DBSnapshot']
return RDS2Snapshot(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
def promote_read_replica(self, instance_name, **params):
try:
result = self.connection.promote_read_replica(instance_name, **params)['PromoteReadReplicaResponse']['PromoteReadReplicaResult']['DBInstance']
return RDS2DBInstance(result)
except boto.exception.BotoServerError, e:
raise RDSException(e)
class RDSDBInstance:
def __init__(self, dbinstance):
self.instance = dbinstance
self.name = dbinstance.id
self.status = dbinstance.status
def get_data(self):
d = {
'id' : self.name,
'create_time' : self.instance.create_time,
'status' : self.status,
'availability_zone' : self.instance.availability_zone,
'backup_retention' : self.instance.backup_retention_period,
'backup_window' : self.instance.preferred_backup_window,
'maintenance_window' : self.instance.preferred_maintenance_window,
'multi_zone' : self.instance.multi_az,
'instance_type' : self.instance.instance_class,
'username' : self.instance.master_username,
'iops' : self.instance.iops
}
# Endpoint exists only if the instance is available
if self.status == 'available':
d["endpoint"] = self.instance.endpoint[0]
d["port"] = self.instance.endpoint[1]
if self.instance.vpc_security_groups is not None:
d["vpc_security_groups"] = ','.join(x.vpc_group for x in self.instance.vpc_security_groups)
else:
d["vpc_security_groups"] = None
else:
d["endpoint"] = None
d["port"] = None
d["vpc_security_groups"] = None
# ReadReplicaSourceDBInstanceIdentifier may or may not exist
try:
d["replication_source"] = self.instance.ReadReplicaSourceDBInstanceIdentifier
except Exception, e:
d["replication_source"] = None
return d
class RDS2DBInstance:
def __init__(self, dbinstance):
self.instance = dbinstance
if 'DBInstanceIdentifier' not in dbinstance:
self.name = None
else:
self.name = self.instance.get('DBInstanceIdentifier')
self.status = self.instance.get('DBInstanceStatus')
def get_data(self):
d = {
'id': self.name,
'create_time': self.instance['InstanceCreateTime'],
'status': self.status,
'availability_zone': self.instance['AvailabilityZone'],
'backup_retention': self.instance['BackupRetentionPeriod'],
'maintenance_window': self.instance['PreferredMaintenanceWindow'],
'multi_zone': self.instance['MultiAZ'],
'instance_type': self.instance['DBInstanceClass'],
'username': self.instance['MasterUsername'],
'iops': self.instance['Iops'],
'replication_source': self.instance['ReadReplicaSourceDBInstanceIdentifier']
}
if self.instance["VpcSecurityGroups"] is not None:
d['vpc_security_groups'] = ','.join(x['VpcSecurityGroupId'] for x in self.instance['VpcSecurityGroups'])
if self.status == 'available':
d['endpoint'] = self.instance["Endpoint"]["Address"]
d['port'] = self.instance["Endpoint"]["Port"]
else:
d['endpoint'] = None
d['port'] = None
return d
class RDSSnapshot:
def __init__(self, snapshot):
self.snapshot = snapshot
self.name = snapshot.id
self.status = snapshot.status
def get_data(self):
d = {
'id' : self.name,
'create_time' : self.snapshot.snapshot_create_time,
'status' : self.status,
'availability_zone' : self.snapshot.availability_zone,
'instance_id' : self.snapshot.instance_id,
'instance_created' : self.snapshot.instance_create_time,
}
# needs boto >= 2.21.0
if hasattr(self.snapshot, 'snapshot_type'):
d["snapshot_type"] = self.snapshot.snapshot_type
if hasattr(self.snapshot, 'iops'):
d["iops"] = self.snapshot.iops
return d
class RDS2Snapshot:
def __init__(self, snapshot):
if 'DeleteDBSnapshotResponse' in snapshot:
self.snapshot = snapshot['DeleteDBSnapshotResponse']['DeleteDBSnapshotResult']['DBSnapshot']
else:
self.snapshot = snapshot
self.name = self.snapshot.get('DBSnapshotIdentifier')
self.status = self.snapshot.get('Status')
def get_data(self):
d = {
'id' : self.name,
'create_time' : self.snapshot['SnapshotCreateTime'],
'status' : self.status,
'availability_zone' : self.snapshot['AvailabilityZone'],
'instance_id' : self.snapshot['DBInstanceIdentifier'],
'instance_created' : self.snapshot['InstanceCreateTime'],
'snapshot_type' : self.snapshot['SnapshotType'],
'iops' : self.snapshot['Iops'],
}
return d
def await_resource(conn, resource, status, module):
wait_timeout = module.params.get('wait_timeout') + time.time()
while wait_timeout > time.time() and resource.status != status:
time.sleep(5)
if wait_timeout <= time.time():
module.fail_json(msg="Timeout waiting for RDS resource %s" % resource.name)
if module.params.get('command') == 'snapshot':
# Temporary until all the rds2 commands have their responses parsed
if resource.name is None:
module.fail_json(msg="There was a problem waiting for RDS snapshot %s" % resource.snapshot)
resource = conn.get_db_snapshot(resource.name)
else:
# Temporary until all the rds2 commands have their responses parsed
if resource.name is None:
module.fail_json(msg="There was a problem waiting for RDS instance %s" % resource.instance)
resource = conn.get_db_instance(resource.name)
if resource is None:
break
return resource
def create_db_instance(module, conn):
subnet = module.params.get('subnet')
required_vars = ['instance_name', 'db_engine', 'size', 'instance_type', 'username', 'password']
valid_vars = ['backup_retention', 'backup_window',
'character_set_name', 'db_name', 'engine_version',
'instance_type', 'iops', 'license_model', 'maint_window',
'multi_zone', 'option_group', 'parameter_group','port',
'subnet', 'upgrade', 'zone']
if module.params.get('subnet'):
valid_vars.append('vpc_security_groups')
else:
valid_vars.append('security_groups')
if has_rds2:
valid_vars.extend(['publicly_accessible', 'tags'])
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
if result:
changed = False
else:
try:
result = conn.create_db_instance(instance_name, module.params.get('size'),
module.params.get('instance_type'), module.params.get('db_engine'),
module.params.get('username'), module.params.get('password'), **params)
changed = True
except RDSException, e:
module.fail_json(msg="Failed to create instance: %s" % e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def replicate_db_instance(module, conn):
required_vars = ['instance_name', 'source_instance']
valid_vars = ['instance_type', 'port', 'upgrade', 'zone']
if has_rds2:
valid_vars.extend(['iops', 'option_group', 'publicly_accessible', 'tags'])
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
source_instance = module.params.get('source_instance')
result = conn.get_db_instance(instance_name)
if result:
changed = False
else:
try:
result = conn.create_db_instance_read_replica(instance_name, source_instance, **params)
changed = True
except RDSException, e:
module.fail_json(msg="Failed to create replica instance: %s " % e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def delete_db_instance_or_snapshot(module, conn):
required_vars = []
valid_vars = ['instance_name', 'snapshot', 'skip_final_snapshot']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
if not instance_name:
result = conn.get_db_snapshot(snapshot)
else:
result = conn.get_db_instance(instance_name)
if not result:
module.exit_json(changed=False)
if result.status == 'deleting':
module.exit_json(changed=False)
try:
if instance_name:
if snapshot:
params["skip_final_snapshot"] = False
if has_rds2:
params["final_db_snapshot_identifier"] = snapshot
else:
params["final_snapshot_id"] = snapshot
else:
params["skip_final_snapshot"] = True
result = conn.delete_db_instance(instance_name, **params)
else:
result = conn.delete_db_snapshot(snapshot)
except RDSException, e:
module.fail_json(msg="Failed to delete instance: %s" % e.message)
# If we're not waiting for a delete to complete then we're all done
# so just return
if not module.params.get('wait'):
module.exit_json(changed=True)
try:
resource = await_resource(conn, result, 'deleted', module)
module.exit_json(changed=True)
except RDSException, e:
if e.code == 'DBInstanceNotFound':
module.exit_json(changed=True)
else:
module.fail_json(msg=e.message)
except Exception, e:
module.fail_json(msg=str(e))
def facts_db_instance_or_snapshot(module, conn):
required_vars = []
valid_vars = ['instance_name', 'snapshot']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
if instance_name and snapshot:
module.fail_json(msg="Facts must be called with either instance_name or snapshot, not both")
if instance_name:
resource = conn.get_db_instance(instance_name)
if not resource:
module.fail_json(msg="DB instance %s does not exist" % instance_name)
if snapshot:
resource = conn.get_db_snapshot(snapshot)
if not resource:
module.fail_json(msg="DB snapshot %s does not exist" % snapshot)
module.exit_json(changed=False, instance=resource.get_data())
def modify_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = ['apply_immediately', 'backup_retention', 'backup_window',
'db_name', 'engine_version', 'instance_type', 'iops', 'license_model',
'maint_window', 'multi_zone', 'new_instance_name',
'option_group', 'parameter_group', 'password', 'size', 'upgrade']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
new_instance_name = module.params.get('new_instance_name')
try:
result = conn.modify_db_instance(instance_name, **params)
except RDSException, e:
module.fail_json(msg=e.message)
if params.get('apply_immediately'):
if new_instance_name:
# Wait until the new instance name is valid
new_instance = None
while not new_instance:
new_instance = conn.get_db_instance(new_instance_name)
time.sleep(5)
# Found instance but it briefly flicks to available
# before rebooting so let's wait until we see it rebooting
# before we check whether to 'wait'
result = await_resource(conn, new_instance, 'rebooting', module)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
# guess that this changed the DB, need a way to check
module.exit_json(changed=True, instance=resource.get_data())
def promote_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = ['backup_retention', 'backup_window']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
if result.get_data().get('replication_source'):
changed = False
else:
try:
result = conn.promote_read_replica(instance_name, **params)
except RDSException, e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def snapshot_db_instance(module, conn):
required_vars = ['instance_name', 'snapshot']
valid_vars = ['tags']
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
snapshot = module.params.get('snapshot')
changed = False
result = conn.get_db_snapshot(snapshot)
if not result:
try:
result = conn.create_db_snapshot(snapshot, instance_name, **params)
changed = True
except RDSException, e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_snapshot(snapshot)
module.exit_json(changed=changed, snapshot=resource.get_data())
def reboot_db_instance(module, conn):
required_vars = ['instance_name']
valid_vars = []
if has_rds2:
valid_vars.append('force_failover')
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
result = conn.get_db_instance(instance_name)
changed = False
try:
result = conn.reboot_db_instance(instance_name, **params)
changed = True
except RDSException, e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def restore_db_instance(module, conn):
required_vars = ['instance_name', 'snapshot']
valid_vars = ['db_name', 'iops', 'license_model', 'multi_zone',
'option_group', 'port', 'publicly_accessible',
'subnet', 'tags', 'upgrade', 'zone']
if has_rds2:
valid_vars.append('instance_type')
else:
required_vars.append('instance_type')
params = validate_parameters(required_vars, valid_vars, module)
instance_name = module.params.get('instance_name')
instance_type = module.params.get('instance_type')
snapshot = module.params.get('snapshot')
changed = False
result = conn.get_db_instance(instance_name)
if not result:
try:
result = conn.restore_db_instance_from_db_snapshot(instance_name, snapshot, instance_type, **params)
changed = True
except RDSException, e:
module.fail_json(msg=e.message)
if module.params.get('wait'):
resource = await_resource(conn, result, 'available', module)
else:
resource = conn.get_db_instance(instance_name)
module.exit_json(changed=changed, instance=resource.get_data())
def validate_parameters(required_vars, valid_vars, module):
command = module.params.get('command')
for v in required_vars:
if not module.params.get(v):
module.fail_json(msg="Parameter %s required for %s command" % (v, command))
# map to convert rds module options to boto rds and rds2 options
optional_params = {
'port': 'port',
'db_name': 'db_name',
'zone': 'availability_zone',
'maint_window': 'preferred_maintenance_window',
'backup_window': 'preferred_backup_window',
'backup_retention': 'backup_retention_period',
'multi_zone': 'multi_az',
'engine_version': 'engine_version',
'upgrade': 'auto_minor_version_upgrade',
'subnet': 'db_subnet_group_name',
'license_model': 'license_model',
'option_group': 'option_group_name',
'iops': 'iops',
'new_instance_name': 'new_instance_id',
'apply_immediately': 'apply_immediately',
}
# map to convert rds module options to boto rds options
optional_params_rds = {
'db_engine': 'engine',
'password': 'master_password',
'parameter_group': 'param_group',
'instance_type': 'instance_class',
}
# map to convert rds module options to boto rds2 options
optional_params_rds2 = {
'tags': 'tags',
'publicly_accessible': 'publicly_accessible',
'parameter_group': 'db_parameter_group_name',
'character_set_name': 'character_set_name',
'instance_type': 'db_instance_class',
'password': 'master_user_password',
'new_instance_name': 'new_db_instance_identifier',
'force_failover': 'force_failover',
}
if has_rds2:
optional_params.update(optional_params_rds2)
sec_group = 'db_security_groups'
else:
optional_params.update(optional_params_rds)
sec_group = 'security_groups'
# Check for options only supported with rds2
for k in set(optional_params_rds2.keys()) - set(optional_params_rds.keys()):
if module.params.get(k):
module.fail_json(msg="Parameter %s requires boto.rds (boto >= 2.26.0)" % k)
params = {}
for (k, v) in optional_params.items():
if module.params.get(k) and k not in required_vars:
if k in valid_vars:
params[v] = module.params[k]
else:
module.fail_json(msg="Parameter %s is not valid for %s command" % (k, command))
if module.params.get('security_groups'):
params[sec_group] = module.params.get('security_groups').split(',')
vpc_groups = module.params.get('vpc_security_groups')
if vpc_groups:
if has_rds2:
params['vpc_security_group_ids'] = vpc_groups
else:
groups_list = []
for x in vpc_groups:
groups_list.append(boto.rds.VPCSecurityGroupMembership(vpc_group=x))
params['vpc_security_groups'] = groups_list
# Convert tags dict to list of tuples that rds2 expects
if 'tags' in params:
params['tags'] = module.params['tags'].items()
return params
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
command = dict(choices=['create', 'replicate', 'delete', 'facts', 'modify', 'promote', 'snapshot', 'reboot', 'restore'], required=True),
instance_name = dict(required=False),
source_instance = dict(required=False),
db_engine = dict(choices=['MySQL', 'oracle-se1', 'oracle-se', 'oracle-ee', 'sqlserver-ee', 'sqlserver-se', 'sqlserver-ex', 'sqlserver-web', 'postgres'], required=False),
size = dict(required=False),
instance_type = dict(aliases=['type'], required=False),
username = dict(required=False),
password = dict(no_log=True, required=False),
db_name = dict(required=False),
engine_version = dict(required=False),
parameter_group = dict(required=False),
license_model = dict(choices=['license-included', 'bring-your-own-license', 'general-public-license', 'postgresql-license'], required=False),
multi_zone = dict(type='bool', default=False),
iops = dict(required=False),
security_groups = dict(required=False),
vpc_security_groups = dict(type='list', required=False),
port = dict(required=False),
upgrade = dict(type='bool', default=False),
option_group = dict(required=False),
maint_window = dict(required=False),
backup_window = dict(required=False),
backup_retention = dict(required=False),
zone = dict(aliases=['aws_zone', 'ec2_zone'], required=False),
subnet = dict(required=False),
wait = dict(type='bool', default=False),
wait_timeout = dict(type='int', default=300),
snapshot = dict(required=False),
apply_immediately = dict(type='bool', default=False),
new_instance_name = dict(required=False),
tags = dict(type='dict', required=False),
publicly_accessible = dict(required=False),
character_set_name = dict(required=False),
force_failover = dict(type='bool', required=False, default=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
invocations = {
'create': create_db_instance,
'replicate': replicate_db_instance,
'delete': delete_db_instance_or_snapshot,
'facts': facts_db_instance_or_snapshot,
'modify': modify_db_instance,
'promote': promote_db_instance,
'snapshot': snapshot_db_instance,
'reboot': reboot_db_instance,
'restore': restore_db_instance,
}
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region not specified. Unable to determine region from EC2_REGION.")
# connect to the rds endpoint
if has_rds2:
conn = RDS2Connection(module, region, **aws_connect_params)
else:
conn = RDSConnection(module, region, **aws_connect_params)
invocations[module.params.get('command')](module, conn)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| evax/ansible-modules-core | cloud/amazon/rds.py | Python | gpl-3.0 | 42,330 |
#!/usr/bin/env python
""" cdecl.py - parse c declarations
(c) 2002, 2003, 2004, 2005 Simon Burton <[email protected]>
Released under GNU LGPL license.
version 0.xx
"""
import string
class Node(list):
" A node in a parse tree "
def __init__(self,*items,**kw):
list.__init__( self, items )
self.lock1 = 0 # these two should be properties (simplifies serializing)
self.lock2 = 0
self.verbose = 0
for key in kw.keys():
self.__dict__[key] = kw[key]
def __str__(self):
attrs = []
for item in self:
if isinstance(item,Node):
attrs.append( str(item) )
else:
attrs.append( repr(item) )
attrs = ','.join(attrs)
return "%s(%s)"%(self.__class__.__name__,attrs)
def safe_repr( self, tank ):
tank[ str(self) ] = None
attrs = []
for item in self:
if isinstance(item,Node):
attrs.append( item.safe_repr(tank) ) # can we use repr here ?
else:
attrs.append( repr(item) )
# this is the dangerous bit:
for key, val in self.__dict__.items():
if isinstance(val,Node):
if str(val) not in tank:
attrs.append( '%s=%s'%(key,val.safe_repr(tank)) )
else:
attrs.append( '%s=%s'%(key,repr(val)) )
attrs = ','.join(attrs)
return "%s(%s)"%(self.__class__.__name__,attrs)
def __repr__(self):
#attrs = ','.join( [repr(item) for item in self] + \
# [ '%s=%s'%(key,repr(val)) for key,val in self.__dict__.items() ] )
#return "%s%s"%(self.__class__.__name__,tuple(attrs))
return self.safe_repr({})
def __eq__(self,other):
if not isinstance(other,Node):
return 0
if len(self)!=len(other):
return 0
for i in range(len(self)):
if not self[i]==other[i]:
return 0
return 1
def __ne__(self,other):
return not self==other
def filter(self,cls):
return [x for x in self if isinstance(x,cls)]
#return filter( lambda x:isinstance(x,cls), self )
def deepfilter(self,cls):
" bottom-up "
return [x for x in self.nodes() if isinstance(x,cls)]
def find(self,cls):
for x in self:
if isinstance(x,cls):
return x
return None
def deepfind(self,cls):
" bottom-up isinstance search "
for x in self:
if isinstance(x,Node):
if isinstance(x,cls):
return x
node = x.deepfind(cls)
if node is not None:
return node
if isinstance(self,cls):
return self
return None
def leaves(self):
for i in self:
if isinstance( i, Node ):
for j in i.leaves():
yield j
else:
yield i
def nodes(self):
" bottom-up iteration "
for i in self:
if isinstance( i, Node ):
for j in i.nodes():
yield j
yield self
def deeplen(self):
i=0
if not self.lock2:
self.lock2=1
for item in self:
i+=1
if isinstance(item,Node):
i+=item.deeplen()
self.lock2=0
else:
i+=1
return i
def deepstr(self,level=0,comment=False,nl='\n',indent=' '):
if self.deeplen() < 4:
nl = ""; indent = ""
#else:
#nl="\n"; indent = " "
s = []
if not self.lock1:
self.lock1=1
for item in self:
if isinstance(item,Node):
s.append( indent*(level+1)+item.deepstr(level+1,False,nl,indent) )
else:
s.append( indent*(level+1)+repr(item) )
self.lock1=0
else:
for item in self:
if isinstance(item,Node):
s.append( indent*(level+1)+"<recursion...>" )
else:
s.append( indent*(level+1)+"%s"%repr(item) )
s = "%s(%s)"%(self.__class__.__name__,nl+string.join(s,","+nl))
if comment:
s = '#' + s.replace('\n','\n#')
return s
def clone(self):
items = []
for item in self:
if isinstance(item,Node):
item = item.clone()
items.append(item)
# we skip any attributes...
return self.__class__(*items)
def fastclone(self):
# XX is it faster ???
#print "clone"
nodes = [self]
idxs = [0]
itemss = [ [] ]
while nodes:
assert len(nodes)==len(idxs)==len(itemss)
node = nodes[-1]
items = itemss[-1]
assert idxs[-1] == len(items)
while idxs[-1]==len(node):
# pop
_node = node.__class__( *items )
_node.__dict__.update( node.__dict__ )
nodes.pop(-1)
idxs.pop(-1)
itemss.pop(-1)
if not nodes:
#for node0 in self.nodes():
#for node1 in _node.nodes():
#assert node0 is not node1
#assert _node == self
return _node # Done !!
node = nodes[-1]
items = itemss[-1]
items.append(_node) # set
idxs[-1] += 1
assert idxs[-1] == len(items)
#assert idxs[-1] < len(node), str( (node,nodes,idxs,itemss) )
_node = node[ idxs[-1] ]
# while idxs[-1]<len(node):
if isinstance(_node,Node):
# push
nodes.append( _node )
idxs.append( 0 )
itemss.append( [] )
else:
# next
items.append(_node)
idxs[-1] += 1
assert idxs[-1] == len(items)
def expose(self,cls):
' expose children of any <cls> instance '
# children first
for x in self:
if isinstance(x,Node):
x.expose(cls)
# now the tricky bit
i=0
while i < len(self):
if isinstance(self[i],cls):
node=self.pop(i)
for x in node:
assert not isinstance(x,cls)
# pass on some attributes
if hasattr(node,'lines') and not hasattr(x,'lines'):
x.lines=node.lines
if hasattr(node,'file') and not hasattr(x,'file'):
x.file=node.file
self.insert(i,x) # expose
i=i+1
assert i<=len(self)
else:
i=i+1
def get_parent( self, item ): # XX 25% CPU time here XX
assert self != item
if item in self:
return self
for child in self:
if isinstance(child, Node):
parent = child.get_parent(item)
if parent is not None:
return parent
return None
def expose_node( self, item ):
assert self != item
parent = self.get_parent(item)
idx = parent.index( item )
parent[idx:idx+1] = item[:]
def delete(self,cls):
' delete any <cls> subtree '
for x in self:
if isinstance(x,Node):
x.delete(cls)
# now the tricky bit
i=0
while i < len(self):
if isinstance(self[i],cls):
self.pop(i)
else:
i=i+1
def deeprm(self,item):
' remove any items matching <item> '
for x in self:
if isinstance(x,Node):
x.deeprm(item)
# now the tricky bit
i=0
while i < len(self):
if self[i] == item:
self.pop(i)
else:
i=i+1
def idem(self,cls):
" <cls> is made idempotent "
# children first
for x in self:
if isinstance(x,Node):
x.idem(cls)
if isinstance(self,cls):
# now the tricky bit
i=0
while i < len(self):
if isinstance(self[i],cls):
node = self.pop(i)
for x in node:
assert not isinstance(x,cls)
self.insert(i,x) # idempotent
i=i+1
assert i<=len(self)
else:
i=i+1
if __name__=="__main__":
node = Node( 'a', Node(1,2), Node(Node(Node(),1)) )
print node
print node.clone()
| jpflori/mpir | yasm/tools/python-yasm/pyxelator/node.py | Python | gpl-3.0 | 8,966 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Lexing error finder
~~~~~~~~~~~~~~~~~~~
For the source files given on the command line, display
the text where Error tokens are being generated, along
with some context.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys, os
try:
import pygments
except ImportError:
# try parent path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pygments.lexer import RegexLexer
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error, Text, _TokenType
class DebuggingRegexLexer(RegexLexer):
"""Make the state stack, position and current match instance attributes."""
def get_tokens_unprocessed(self, text, stack=('root',)):
"""
Split ``text`` into (tokentype, text) pairs.
``stack`` is the inital stack (default: ``['root']``)
"""
self.pos = 0
tokendefs = self._tokens
self.statestack = list(stack)
statetokens = tokendefs[self.statestack[-1]]
while 1:
for rexmatch, action, new_state in statetokens:
self.m = m = rexmatch(text, self.pos)
if m:
if type(action) is _TokenType:
yield self.pos, action, m.group()
else:
for item in action(self, m):
yield item
self.pos = m.end()
if new_state is not None:
# state transition
if isinstance(new_state, tuple):
for state in new_state:
if state == '#pop':
self.statestack.pop()
elif state == '#push':
self.statestack.append(self.statestack[-1])
else:
self.statestack.append(state)
elif isinstance(new_state, int):
# pop
del self.statestack[new_state:]
elif new_state == '#push':
self.statestack.append(self.statestack[-1])
else:
assert False, 'wrong state def: %r' % new_state
statetokens = tokendefs[self.statestack[-1]]
break
else:
try:
if text[self.pos] == '\n':
# at EOL, reset state to 'root'
self.pos += 1
self.statestack = ['root']
statetokens = tokendefs['root']
yield self.pos, Text, u'\n'
continue
yield self.pos, Error, text[self.pos]
self.pos += 1
except IndexError:
break
def main(fn, lexer=None):
if lexer is not None:
lx = get_lexer_by_name(lexer)
else:
try:
lx = get_lexer_for_filename(os.path.basename(fn))
except ValueError:
try:
name, rest = fn.split('_', 1)
lx = get_lexer_by_name(name)
except ValueError:
raise AssertionError('no lexer found for file %r' % fn)
debug_lexer = False
# does not work for e.g. ExtendedRegexLexers
if lx.__class__.__bases__ == (RegexLexer,):
lx.__class__.__bases__ = (DebuggingRegexLexer,)
debug_lexer = True
lno = 1
text = file(fn, 'U').read()
text = text.strip('\n') + '\n'
text = text.decode('latin1')
tokens = []
states = []
def show_token(tok, state):
reprs = map(repr, tok)
print ' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0],
if debug_lexer:
print ' ' + ' ' * (29-len(reprs[0])) + repr(state),
print
for type, val in lx.get_tokens(text):
lno += val.count('\n')
if type == Error:
print 'Error parsing', fn, 'on line', lno
print 'Previous tokens' + (debug_lexer and ' and states' or '') + ':'
if showall:
for tok, state in zip(tokens, states):
show_token(tok, state)
else:
for i in range(len(tokens) - num, len(tokens)):
show_token(tokens[i], states[i])
print 'Error token:'
l = len(repr(val))
print ' ' + repr(val),
if debug_lexer and hasattr(lx, 'statestack'):
print ' ' * (60-l) + repr(lx.statestack),
print
print
return 1
tokens.append((type,val))
if debug_lexer:
if hasattr(lx, 'statestack'):
states.append(lx.statestack[:])
else:
states.append(None)
if showall:
for tok, state in zip(tokens, states):
show_token(tok, state)
return 0
num = 10
showall = False
lexer = None
if __name__ == '__main__':
import getopt
opts, args = getopt.getopt(sys.argv[1:], 'n:l:a')
for opt, val in opts:
if opt == '-n':
num = int(val)
elif opt == '-a':
showall = True
elif opt == '-l':
lexer = val
ret = 0
for f in args:
ret += main(f, lexer)
sys.exit(bool(ret))
| johnny-bui/pygments-sablecc | scripts/find_error.py | Python | bsd-2-clause | 5,593 |
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=20)
def __unicode__(self):
return self.name
class Book(models.Model):
name = models.CharField(max_length=20)
authors = models.ManyToManyField(Author)
def __unicode__(self):
return self.name
| LethusTI/supportcenter | vendor/django/tests/regressiontests/signals_regress/models.py | Python | gpl-3.0 | 323 |
""" Tests for library reindex command """
import ddt
from django.core.management import call_command, CommandError
import mock
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from common.test.utils import nostderr
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from opaque_keys import InvalidKeyError
from contentstore.management.commands.reindex_library import Command as ReindexCommand
from contentstore.courseware_index import SearchIndexingError
@ddt.ddt
class TestReindexLibrary(ModuleStoreTestCase):
""" Tests for library reindex command """
def setUp(self):
""" Setup method - create libraries and courses """
super(TestReindexLibrary, self).setUp()
self.store = modulestore()
self.first_lib = LibraryFactory.create(
org="test", library="lib1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_lib = LibraryFactory.create(
org="test", library="lib2", display_name="run2", default_store=ModuleStoreEnum.Type.split
)
self.first_course = CourseFactory.create(
org="test", course="course1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_course = CourseFactory.create(
org="test", course="course2", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
REINDEX_PATH_LOCATION = 'contentstore.management.commands.reindex_library.LibrarySearchIndexer.do_library_reindex'
MODULESTORE_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.modulestore'
YESNO_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.query_yes_no'
def _get_lib_key(self, library):
""" Get's library key as it is passed to indexer """
return library.location.library_key
def _build_calls(self, *libraries):
""" BUilds a list of mock.call instances representing calls to reindexing method """
return [mock.call(self.store, self._get_lib_key(lib)) for lib in libraries]
def test_given_no_arguments_raises_command_error(self):
""" Test that raises CommandError for incorrect arguments """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* requires one or more arguments .*"):
call_command('reindex_library')
@ddt.data('qwerty', 'invalid_key', 'xblock-v1:qwe+rty')
def test_given_invalid_lib_key_raises_not_found(self, invalid_key):
""" Test that raises InvalidKeyError for invalid keys """
with self.assertRaises(InvalidKeyError):
call_command('reindex_library', invalid_key)
def test_given_course_key_raises_command_error(self):
""" Test that raises CommandError if course key is passed """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.first_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.second_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command(
'reindex_library',
unicode(self.second_course.id),
unicode(self._get_lib_key(self.first_lib))
)
def test_given_id_list_indexes_libraries(self):
""" Test that reindexes libraries when given single library key or a list of library keys """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', unicode(self._get_lib_key(self.first_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.first_lib))
patched_index.reset_mock()
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.second_lib))
patched_index.reset_mock()
call_command(
'reindex_library',
unicode(self._get_lib_key(self.first_lib)),
unicode(self._get_lib_key(self.second_lib))
)
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries(self):
""" Test that reindexes all libraries when --all key is given and confirmed """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = True
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertItemsEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries_cancelled(self):
""" Test that does not reindex anything when --all key is given and cancelled """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = False
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
patched_index.assert_not_called()
def test_fail_fast_if_reindex_fails(self):
""" Test that fails on first reindexing exception """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index:
patched_index.side_effect = SearchIndexingError("message", [])
with self.assertRaises(SearchIndexingError):
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
| ahmadiga/min_edx | cms/djangoapps/contentstore/management/commands/tests/test_reindex_library.py | Python | agpl-3.0 | 6,713 |
# stdlib
import logging
import unittest
# project
from dogstream.cassandra import parse_cassandra
logger = logging.getLogger(__name__)
class TestCassandraDogstream(unittest.TestCase):
def testStart(self):
events = parse_cassandra(logger, " INFO [main] 2012-12-11 21:46:26,995 StorageService.java (line 687) Bootstrap/Replace/Move completed! Now serving reads.")
self.assertTrue(events is None)
def testInfo(self):
events = parse_cassandra(logger, " INFO [CompactionExecutor:35] 2012-12-02 21:15:03,738 AutoSavingCache.java (line 268) Saved KeyCache (5 items) in 3 ms")
self.assertTrue(events is None)
def testWarn(self):
events = parse_cassandra(logger, " WARN [MemoryMeter:1] 2012-12-03 20:07:47,158 Memtable.java (line 197) setting live ratio to minimum of 1.0 instead of 0.9416553595658074")
self.assertTrue(events is None)
def testError(self):
for line in """\
ERROR [CompactionExecutor:518] 2012-12-11 21:35:29,686 AbstractCassandraDaemon.java (line 135) Exception in thread Thread[CompactionExecutor:518,1,RMI Runtime]
java.util.concurrent.RejectedExecutionException
at java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:1768)
at java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:767)
at java.util.concurrent.ScheduledThreadPoolExecutor.delayedExecute(ScheduledThreadPoolExecutor.java:215)
at java.util.concurrent.ScheduledThreadPoolExecutor.schedule(ScheduledThreadPoolExecutor.java:397)
at java.util.concurrent.ScheduledThreadPoolExecutor.submit(ScheduledThreadPoolExecutor.java:470)
at org.apache.cassandra.io.sstable.SSTableDeletingTask.schedule(SSTableDeletingTask.java:67)
at org.apache.cassandra.io.sstable.SSTableReader.releaseReference(SSTableReader.java:806)
at org.apache.cassandra.db.DataTracker.removeOldSSTablesSize(DataTracker.java:358)
at org.apache.cassandra.db.DataTracker.postReplace(DataTracker.java:330)
at org.apache.cassandra.db.DataTracker.replace(DataTracker.java:324)
at org.apache.cassandra.db.DataTracker.replaceCompactedSSTables(DataTracker.java:253)
at org.apache.cassandra.db.ColumnFamilyStore.replaceCompactedSSTables(ColumnFamilyStore.java:992)
at org.apache.cassandra.db.compaction.CompactionTask.execute(CompactionTask.java:200)
at org.apache.cassandra.db.compaction.CompactionManager$1.runMayThrow(CompactionManager.java:154)
at org.apache.cassandra.utils.WrappedRunnable.run(WrappedRunnable.java:30)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
at java.util.concurrent.FutureTask.run(FutureTask.java:138)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
at java.lang.Thread.run(Thread.java:662)""".splitlines():
events = parse_cassandra(logger, line)
self.assertTrue(events is None)
def testCompactionStart(self):
events = parse_cassandra(logger, " INFO [CompactionExecutor:2] 2012-12-11 21:46:27,012 CompactionTask.java (line 109) Compacting [SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-11-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-9-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-12-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-10-Data.db')]")
self.assertEquals(events, [{'alert_type': 'info', 'event_type': 'cassandra.compaction', 'timestamp': 1355262387, 'msg_title': "Compacting [SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-1", 'msg_text': "Compacting [SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-11-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-9-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-12-Data.db'), SSTableReader(path='/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-10-Data.db')]", 'auto_priority': 0}])
def testCompactionEnd(self):
events = parse_cassandra(logger, "INFO [CompactionExecutor:2] 2012-12-11 21:46:27,095 CompactionTask.java (line 221) Compacted to [/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-13-Data.db,]. 880 to 583 (~66% of original) bytes for 4 keys at 0.007831MB/s. Time: 71ms.")
self.assertEquals(events, [{'alert_type': 'info', 'event_type': 'cassandra.compaction', 'timestamp': 1355262387, 'msg_title': 'Compacted to [/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-13-Data.db,]. 880 ', 'msg_text': 'Compacted to [/var/lib/cassandra/data/system/LocationInfo/system-LocationInfo-he-13-Data.db,]. 880 to 583 (~66% of original) bytes for 4 keys at 0.007831MB/s. Time: 71ms.', 'auto_priority': 0}])
| GabrielNicolasAvellaneda/dd-agent | tests/checks/mock/test_cassandra.py | Python | bsd-3-clause | 5,234 |
#
# QAPI visitor generator
#
# Copyright IBM, Corp. 2011
#
# Authors:
# Anthony Liguori <[email protected]>
# Michael Roth <[email protected]>
#
# This work is licensed under the terms of the GNU GPLv2.
# See the COPYING.LIB file in the top-level directory.
from ordereddict import OrderedDict
from qapi import *
import sys
import os
import getopt
import errno
def generate_visit_struct_body(field_prefix, members):
ret = ""
if len(field_prefix):
field_prefix = field_prefix + "."
for argname, argentry, optional, structured in parse_args(members):
if optional:
ret += mcgen('''
visit_start_optional(m, (obj && *obj) ? &(*obj)->%(c_prefix)shas_%(c_name)s : NULL, "%(name)s", errp);
if ((*obj)->%(prefix)shas_%(c_name)s) {
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
c_name=c_var(argname), name=argname)
push_indent()
if structured:
ret += mcgen('''
visit_start_struct(m, NULL, "", "%(name)s", 0, errp);
''',
name=argname)
ret += generate_visit_struct_body(field_prefix + argname, argentry)
ret += mcgen('''
visit_end_struct(m, errp);
''')
else:
ret += mcgen('''
visit_type_%(type)s(m, (obj && *obj) ? &(*obj)->%(c_prefix)s%(c_name)s : NULL, "%(name)s", errp);
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
type=type_name(argentry), c_name=c_var(argname),
name=argname)
if optional:
pop_indent()
ret += mcgen('''
}
visit_end_optional(m, errp);
''')
return ret
def generate_visit_struct(name, members):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), errp);
''',
name=name)
push_indent()
ret += generate_visit_struct_body("", members)
pop_indent()
ret += mcgen('''
visit_end_struct(m, errp);
}
''')
return ret
def generate_visit_list(name, members):
return mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp)
{
GenericList *i, **head = (GenericList **)obj;
visit_start_list(m, name, errp);
for (*head = i = visit_next_list(m, head, errp); i; i = visit_next_list(m, &i, errp)) {
%(name)sList *native_i = (%(name)sList *)i;
visit_type_%(name)s(m, &native_i->value, NULL, errp);
}
visit_end_list(m, errp);
}
''',
name=name)
def generate_visit_enum(name, members):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp)
{
visit_type_enum(m, (int *)obj, %(name)s_lookup, "%(name)s", name, errp);
}
''',
name=name)
def generate_visit_union(name, members):
ret = generate_visit_enum('%sKind' % name, members.keys())
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
}
''',
name=name)
return ret
def generate_declaration(name, members, genlist=True):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp);
''',
name=name)
if genlist:
ret += mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp);
''',
name=name)
return ret
def generate_decl_enum(name, members, genlist=True):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp);
''',
name=name)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "p:o:", ["prefix=", "output-dir="])
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
output_dir = ""
prefix = ""
c_file = 'qapi-visit.c'
h_file = 'qapi-visit.h'
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
try:
os.makedirs(output_dir)
except os.error, e:
if e.errno != errno.EEXIST:
raise
fdef = open(c_file, 'w')
fdecl = open(h_file, 'w')
fdef.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor functions
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "%(header)s"
''',
header=basename(h_file)))
fdecl.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor function
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "qapi/qapi-visit-core.h"
#include "%(prefix)sqapi-types.h"
''',
prefix=prefix, guard=guardname(h_file)))
exprs = parse_schema(sys.stdin)
for expr in exprs:
if expr.has_key('type'):
ret = generate_visit_struct(expr['type'], expr['data'])
ret += generate_visit_list(expr['type'], expr['data'])
fdef.write(ret)
ret = generate_declaration(expr['type'], expr['data'])
fdecl.write(ret)
elif expr.has_key('union'):
ret = generate_visit_union(expr['union'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum('%sKind' % expr['union'], expr['data'].keys())
ret += generate_declaration(expr['union'], expr['data'])
fdecl.write(ret)
elif expr.has_key('enum'):
ret = generate_visit_enum(expr['enum'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum(expr['enum'], expr['data'])
fdecl.write(ret)
fdecl.write('''
#endif
''')
fdecl.flush()
fdecl.close()
fdef.flush()
fdef.close()
| KernelAnalysisPlatform/KlareDbg | tracers/qemu/decaf/scripts/qapi-visit.py | Python | gpl-3.0 | 6,227 |
"""
Test for User Creation from Micro-Sites
"""
from django.test import TestCase
from student.models import UserSignupSource
import mock
import json
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
FAKE_MICROSITE = {
"SITE_NAME": "openedx.localhost",
"university": "fakeuniversity",
"course_org_filter": "fakeorg",
"REGISTRATION_EXTRA_FIELDS": {
"address1": "required",
"city": "required",
"state": "required",
"country": "required",
"company": "required",
"title": "required"
},
"extended_profile_fields": [
"address1", "state", "company", "title"
]
}
def fake_site_name(name, default=None):
"""
create a fake microsite site name
"""
if name == 'SITE_NAME':
return 'openedx.localhost'
else:
return default
def fake_microsite_get_value(name, default=None):
"""
create a fake microsite site name
"""
return FAKE_MICROSITE.get(name, default)
class TestMicrosite(TestCase):
"""Test for Account Creation from a white labeled Micro-Sites"""
def setUp(self):
super(TestMicrosite, self).setUp()
self.username = "test_user"
self.url = reverse("create_account")
self.params = {
"username": self.username,
"email": "[email protected]",
"password": "testpass",
"name": "Test User",
"honor_code": "true",
"terms_of_service": "true",
}
self.extended_params = dict(self.params.items() + {
"address1": "foo",
"city": "foo",
"state": "foo",
"country": "foo",
"company": "foo",
"title": "foo"
}.items())
@mock.patch("microsite_configuration.microsite.get_value", fake_site_name)
def test_user_signup_source(self):
"""
test to create a user form the microsite and see that it record has been
saved in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
def test_user_signup_from_non_micro_site(self):
"""
test to create a user form the non-microsite. The record should not be saved
in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_missing_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 400)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_including_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.extended_params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(username=self.username)
meta = json.loads(user.profile.meta)
self.assertEqual(meta['address1'], 'foo')
self.assertEqual(meta['state'], 'foo')
self.assertEqual(meta['company'], 'foo')
self.assertEqual(meta['title'], 'foo')
| tiagochiavericosta/edx-platform | common/djangoapps/student/tests/test_microsite.py | Python | agpl-3.0 | 3,772 |
# -*- coding: latin-1 -*-
#
# Copyright (C) AB Strakt
# Copyright (C) Jean-Paul Calderone
# See LICENSE for details.
"""
Simple SSL client, using blocking I/O
"""
from OpenSSL import SSL
import sys, os, select, socket
def verify_cb(conn, cert, errnum, depth, ok):
# This obviously has to be updated
print 'Got certificate: %s' % cert.get_subject()
return ok
if len(sys.argv) < 3:
print 'Usage: python[2] client.py HOST PORT'
sys.exit(1)
dir = os.path.dirname(sys.argv[0])
if dir == '':
dir = os.curdir
# Initialize context
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_verify(SSL.VERIFY_PEER, verify_cb) # Demand a certificate
ctx.use_privatekey_file (os.path.join(dir, 'client.pkey'))
ctx.use_certificate_file(os.path.join(dir, 'client.cert'))
ctx.load_verify_locations(os.path.join(dir, 'CA.cert'))
# Set up client
sock = SSL.Connection(ctx, socket.socket(socket.AF_INET, socket.SOCK_STREAM))
sock.connect((sys.argv[1], int(sys.argv[2])))
while 1:
line = sys.stdin.readline()
if line == '':
break
try:
sock.send(line)
sys.stdout.write(sock.recv(1024))
sys.stdout.flush()
except SSL.Error:
print 'Connection died unexpectedly'
break
sock.shutdown()
sock.close()
| mhnatiuk/phd_sociology_of_religion | scrapper/build/pyOpenSSL/examples/simple/client.py | Python | gpl-2.0 | 1,260 |
class A:
pass
class B(A):
def m(self, x):
"""
Parameters:
x (int): number
"""
return x
| asedunov/intellij-community | python/testData/refactoring/pullup/abstractMethodDocStringIndentationPreserved.py | Python | apache-2.0 | 141 |
#
# This is minimal MicroPython variant of run-tests script, which uses
# .exp files as generated by run-tests --write-exp. It is useful to run
# testsuite on systems which have neither CPython3 nor unix shell.
# This script is intended to be run by the same interpreter executable
# which is to be tested, so should use minimal language functionality.
#
import sys
import uos as os
tests = [
"basics", "micropython", "float", "import", "io",
" misc", "unicode", "extmod", "unix"
]
if sys.platform == 'win32':
MICROPYTHON = "micropython.exe"
else:
MICROPYTHON = "micropython"
def should_skip(test):
if test.startswith("native"):
return True
if test.startswith("viper"):
return True
test_count = 0
passed_count = 0
skip_count = 0
for suite in tests:
#print("Running in: %s" % suite)
if sys.platform == 'win32':
# dir /b prints only contained filenames, one on a line
# http://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/dir.mspx
r = os.system("dir /b %s/*.py >tests.lst" % suite)
else:
r = os.system("ls %s/*.py | xargs -n1 basename >tests.lst" % suite)
assert r == 0
with open("tests.lst") as f:
testcases = f.readlines()
testcases = [l[:-1] for l in testcases]
assert testcases, "No tests found in dir '%s', which is implausible" % suite
#print(testcases)
for t in testcases:
if t == "native_check.py":
continue
qtest = "%s/%s" % (suite, t)
if should_skip(t):
print("skip " + qtest)
skip_count += 1
continue
exp = None
try:
f = open(qtest + ".exp")
exp = f.read()
f.close()
except OSError:
pass
if exp is not None:
#print("run " + qtest)
r = os.system(MICROPYTHON + " %s >.tst.out" % qtest)
if r == 0:
f = open(".tst.out")
out = f.read()
f.close()
else:
out = "CRASH"
if out == "SKIP\n":
print("skip " + qtest)
skip_count += 1
else:
if out == exp:
print("pass " + qtest)
passed_count += 1
else:
print("FAIL " + qtest)
test_count += 1
else:
skip_count += 1
print("%s tests performed" % test_count)
print("%s tests passed" % passed_count)
if test_count != passed_count:
print("%s tests failed" % (test_count - passed_count))
if skip_count:
print("%s tests skipped" % skip_count)
| infinnovation/micropython | tests/run-tests-exp.py | Python | mit | 2,697 |
# $Id: 311_srtp1_recv_avp.py 2036 2008-06-20 17:43:55Z nanang $
import inc_sip as sip
import inc_sdp as sdp
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=tester
c=IN IP4 127.0.0.1
t=0 0
m=audio 4000 RTP/AVP 0 101
a=rtpmap:0 PCMU/8000
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:WnD7c1ksDGs+dIefCEo8omPg4uO8DYIinNGL5yxQ
a=crypto:2 AES_CM_128_HMAC_SHA1_32 inline:t0r0/apkukU7JjjfR0mY8GEimBq4OiPEm9eKSFOx
"""
args = "--null-audio --auto-answer 200 --max-calls 1 --use-srtp 1 --srtp-secure 0"
include = ["m=audio \d+ RTP/AVP", "a=crypto"]
exclude = []
sendto_cfg = sip.SendtoCfg( "Callee has SRTP optional and receive RTP/AVP with crypto, should accept with RTP/AVP & crypto",
pjsua_args=args, sdp=sdp, resp_code=200,
resp_inc=include, resp_exc=exclude)
| lxki/pjsip | tests/pjsua/scripts-sendto/311_srtp1_recv_avp.py | Python | gpl-2.0 | 826 |
""" Test the change_enrollment command line script."""
import ddt
import unittest
from uuid import uuid4
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from enrollment.api import get_enrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt.ddt
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EnrollManagementCommandTest(SharedModuleStoreTestCase):
"""
Test the enroll_user_in_course management command
"""
@classmethod
def setUpClass(cls):
super(EnrollManagementCommandTest, cls).setUpClass()
cls.course = CourseFactory.create(org='fooX', number='007')
def setUp(self):
super(EnrollManagementCommandTest, self).setUp()
self.course_id = unicode(self.course.id)
self.username = 'ralph' + uuid4().hex
self.user_email = self.username + '@example.com'
UserFactory(username=self.username, email=self.user_email)
def test_enroll_user(self):
command_args = [
'--course', self.course_id,
'--email', self.user_email,
]
call_command(
'enroll_user_in_course',
*command_args
)
user_enroll = get_enrollment(self.username, self.course_id)
self.assertTrue(user_enroll['is_active'])
def test_enroll_user_twice(self):
"""
Ensures the command is idempotent.
"""
command_args = [
'--course', self.course_id,
'--email', self.user_email,
]
for _ in range(2):
call_command(
'enroll_user_in_course',
*command_args
)
# Second run does not impact the first run (i.e., the
# user is still enrolled, no exception was raised, etc)
user_enroll = get_enrollment(self.username, self.course_id)
self.assertTrue(user_enroll['is_active'])
@ddt.data(['--email', 'foo'], ['--course', 'bar'], ['--bad-param', 'baz'])
def test_not_enough_args(self, arg):
"""
When the command is missing certain arguments, it should
raise an exception
"""
command_args = arg
with self.assertRaises(CommandError):
call_command(
'enroll_user_in_course',
*command_args
)
| defance/edx-platform | common/djangoapps/enrollment/management/tests/test_enroll_user_in_course.py | Python | agpl-3.0 | 2,563 |
"""engine.SCons.Variables.PackageVariable
This file defines the option type for SCons implementing 'package
activation'.
To be used whenever a 'package' may be enabled/disabled and the
package path may be specified.
Usage example:
Examples:
x11=no (disables X11 support)
x11=yes (will search for the package installation dir)
x11=/usr/local/X11 (will check this path for existance)
To replace autoconf's --with-xxx=yyy
opts = Variables()
opts.Add(PackageVariable('x11',
'use X11 installed here (yes = search some places',
'yes'))
...
if env['x11'] == True:
dir = ... search X11 in some standard places ...
env['x11'] = dir
if env['x11']:
... build with x11 ...
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PackageVariable.py 5134 2010/08/16 23:02:40 bdeegan"
__all__ = ['PackageVariable',]
import SCons.Errors
__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search')
__disable_strings = ('0', 'no', 'false', 'off', 'disable')
def _converter(val):
"""
"""
lval = val.lower()
if lval in __enable_strings: return True
if lval in __disable_strings: return False
#raise ValueError("Invalid value for boolean option: %s" % val)
return val
def _validator(key, val, env, searchfunc):
# NB: searchfunc is currenty undocumented and unsupported
"""
"""
# todo: write validator, check for path
import os
if env[key] is True:
if searchfunc:
env[key] = searchfunc(key, val)
elif env[key] and not os.path.exists(val):
raise SCons.Errors.UserError(
'Path does not exist for option %s: %s' % (key, val))
def PackageVariable(key, help, default, searchfunc=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (seperated by space).
"""
help = '\n '.join(
(help, '( yes | no | /path/to/%s )' % key))
return (key, help, default,
lambda k, v, e: _validator(k,v,e,searchfunc),
_converter)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| xifle/greensc | tools/scons/scons-local-2.0.1/SCons/Variables/PackageVariable.py | Python | gpl-3.0 | 3,612 |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines an enum for classifying RPC methods by streaming semantics."""
import enum
@enum.unique
class Cardinality(enum.Enum):
"""Describes the streaming semantics of an RPC method."""
UNARY_UNARY = 'request-unary/response-unary'
UNARY_STREAM = 'request-unary/response-streaming'
STREAM_UNARY = 'request-streaming/response-unary'
STREAM_STREAM = 'request-streaming/response-streaming'
| kidaa/kythe | third_party/grpc/src/python/src/grpc/framework/common/cardinality.py | Python | apache-2.0 | 1,930 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Sample Stats Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import sample_stats
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
rng = np.random.RandomState(0)
class PercentileTestWithLowerInterpolation(test.TestCase):
_interpolation = "lower"
def test_one_dim_odd_input(self):
x = [1., 5., 3., 2., 4.]
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation, axis=0)
with self.test_session():
pct = sample_stats.percentile(
x, q=q, interpolation=self._interpolation, axis=[0])
self.assertAllEqual((), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_one_dim_even_input(self):
x = [1., 5., 3., 2., 4., 5.]
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation)
with self.test_session():
pct = sample_stats.percentile(x, q=q, interpolation=self._interpolation)
self.assertAllEqual((), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_two_dim_odd_input_axis_0(self):
x = np.array([[-1., 50., -3.5, 2., -1], [0., 0., 3., 2., 4.]]).T
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation, axis=0)
with self.test_session():
# Get dim 1 with negative and positive indices.
pct_neg_index = sample_stats.percentile(
x, q=q, interpolation=self._interpolation, axis=[0])
pct_pos_index = sample_stats.percentile(
x, q=q, interpolation=self._interpolation, axis=[0])
self.assertAllEqual((2,), pct_neg_index.get_shape())
self.assertAllEqual((2,), pct_pos_index.get_shape())
self.assertAllClose(expected_percentile, pct_neg_index.eval())
self.assertAllClose(expected_percentile, pct_pos_index.eval())
def test_two_dim_even_axis_0(self):
x = np.array([[1., 2., 4., 50.], [1., 2., -4., 5.]]).T
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation, axis=0)
with self.test_session():
pct = sample_stats.percentile(
x, q=q, interpolation=self._interpolation, axis=[0])
self.assertAllEqual((2,), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_two_dim_even_input_and_keep_dims_true(self):
x = np.array([[1., 2., 4., 50.], [1., 2., -4., 5.]]).T
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation, keepdims=True, axis=0)
with self.test_session():
pct = sample_stats.percentile(
x,
q=q,
interpolation=self._interpolation,
keep_dims=True,
axis=[0])
self.assertAllEqual((1, 2), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_four_dimensional_input(self):
x = rng.rand(2, 3, 4, 5)
for axis in [None, 0, 1, -2, (0,), (-1,), (-1, 1), (3, 1), (-3, 0)]:
expected_percentile = np.percentile(
x, q=0.77, interpolation=self._interpolation, axis=axis)
with self.test_session():
pct = sample_stats.percentile(
x,
q=0.77,
interpolation=self._interpolation,
axis=axis)
self.assertAllEqual(expected_percentile.shape, pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_four_dimensional_input_and_keepdims(self):
x = rng.rand(2, 3, 4, 5)
for axis in [None, 0, 1, -2, (0,), (-1,), (-1, 1), (3, 1), (-3, 0)]:
expected_percentile = np.percentile(
x,
q=0.77,
interpolation=self._interpolation,
axis=axis,
keepdims=True)
with self.test_session():
pct = sample_stats.percentile(
x,
q=0.77,
interpolation=self._interpolation,
axis=axis,
keep_dims=True)
self.assertAllEqual(expected_percentile.shape, pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_four_dimensional_input_x_static_ndims_but_dynamic_sizes(self):
x = rng.rand(2, 3, 4, 5)
x_ph = array_ops.placeholder(dtypes.float64, shape=[None, None, None, None])
for axis in [None, 0, 1, -2, (0,), (-1,), (-1, 1), (3, 1), (-3, 0)]:
expected_percentile = np.percentile(
x, q=0.77, interpolation=self._interpolation, axis=axis)
with self.test_session():
pct = sample_stats.percentile(
x_ph,
q=0.77,
interpolation=self._interpolation,
axis=axis)
self.assertAllClose(expected_percentile, pct.eval(feed_dict={x_ph: x}))
def test_four_dimensional_input_and_keepdims_x_static_ndims_dynamic_sz(self):
x = rng.rand(2, 3, 4, 5)
x_ph = array_ops.placeholder(dtypes.float64, shape=[None, None, None, None])
for axis in [None, 0, 1, -2, (0,), (-1,), (-1, 1), (3, 1), (-3, 0)]:
expected_percentile = np.percentile(
x,
q=0.77,
interpolation=self._interpolation,
axis=axis,
keepdims=True)
with self.test_session():
pct = sample_stats.percentile(
x_ph,
q=0.77,
interpolation=self._interpolation,
axis=axis,
keep_dims=True)
self.assertAllClose(expected_percentile, pct.eval(feed_dict={x_ph: x}))
def test_with_integer_dtype(self):
x = [1, 5, 3, 2, 4]
for q in [0, 10, 25, 49.9, 50, 50.01, 90, 95, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation)
with self.test_session():
pct = sample_stats.percentile(x, q=q, interpolation=self._interpolation)
self.assertEqual(dtypes.int32, pct.dtype)
self.assertAllEqual((), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
class PercentileTestWithHigherInterpolation(
PercentileTestWithLowerInterpolation):
_interpolation = "higher"
class PercentileTestWithNearestInterpolation(test.TestCase):
"""Test separately because np.round and tf.round make different choices."""
_interpolation = "nearest"
def test_one_dim_odd_input(self):
x = [1., 5., 3., 2., 4.]
for q in [0, 10.1, 25.1, 49.9, 50.1, 50.01, 89, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation)
with self.test_session():
pct = sample_stats.percentile(x, q=q, interpolation=self._interpolation)
self.assertAllEqual((), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_one_dim_even_input(self):
x = [1., 5., 3., 2., 4., 5.]
for q in [0, 10.1, 25.1, 49.9, 50.1, 50.01, 89, 100]:
expected_percentile = np.percentile(
x, q=q, interpolation=self._interpolation)
with self.test_session():
pct = sample_stats.percentile(x, q=q, interpolation=self._interpolation)
self.assertAllEqual((), pct.get_shape())
self.assertAllClose(expected_percentile, pct.eval())
def test_invalid_interpolation_raises(self):
x = [1., 5., 3., 2., 4.]
with self.assertRaisesRegexp(ValueError, "interpolation"):
sample_stats.percentile(x, q=0.5, interpolation="bad")
def test_vector_q_raises_static(self):
x = [1., 5., 3., 2., 4.]
with self.assertRaisesRegexp(ValueError, "Expected.*ndims"):
sample_stats.percentile(x, q=[0.5])
def test_vector_q_raises_dynamic(self):
x = [1., 5., 3., 2., 4.]
q_ph = array_ops.placeholder(dtypes.float32)
pct = sample_stats.percentile(x, q=q_ph, validate_args=True)
with self.test_session():
with self.assertRaisesOpError("rank"):
pct.eval(feed_dict={q_ph: [0.5]})
if __name__ == "__main__":
test.main()
| npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/distributions/python/kernel_tests/sample_stats_test.py | Python | apache-2.0 | 9,019 |
class MyClass():
@classmethod
def foo_method(cls):
spam = "eggs" | caot/intellij-community | python/testData/refactoring/extractsuperclass/moveAndMakeAbstractImportExistsPy3/source_module.py | Python | apache-2.0 | 80 |
#===- enumerations.py - Python LLVM Enumerations -------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
LLVM Enumerations
=================
This file defines enumerations from LLVM.
Each enumeration is exposed as a list of 2-tuples. These lists are consumed by
dedicated types elsewhere in the package. The enumerations are centrally
defined in this file so they are easier to locate and maintain.
"""
__all__ = [
'Attributes',
'OpCodes',
'TypeKinds',
'Linkages',
'Visibility',
'CallConv',
'IntPredicate',
'RealPredicate',
'LandingPadClauseTy',
]
Attributes = [
('ZExt', 1 << 0),
('MSExt', 1 << 1),
('NoReturn', 1 << 2),
('InReg', 1 << 3),
('StructRet', 1 << 4),
('NoUnwind', 1 << 5),
('NoAlias', 1 << 6),
('ByVal', 1 << 7),
('Nest', 1 << 8),
('ReadNone', 1 << 9),
('ReadOnly', 1 << 10),
('NoInline', 1 << 11),
('AlwaysInline', 1 << 12),
('OptimizeForSize', 1 << 13),
('StackProtect', 1 << 14),
('StackProtectReq', 1 << 15),
('Alignment', 31 << 16),
('NoCapture', 1 << 21),
('NoRedZone', 1 << 22),
('ImplicitFloat', 1 << 23),
('Naked', 1 << 24),
('InlineHint', 1 << 25),
('StackAlignment', 7 << 26),
('ReturnsTwice', 1 << 29),
('UWTable', 1 << 30),
('NonLazyBind', 1 << 31),
]
OpCodes = [
('Ret', 1),
('Br', 2),
('Switch', 3),
('IndirectBr', 4),
('Invoke', 5),
('Unreachable', 7),
('Add', 8),
('FAdd', 9),
('Sub', 10),
('FSub', 11),
('Mul', 12),
('FMul', 13),
('UDiv', 14),
('SDiv', 15),
('FDiv', 16),
('URem', 17),
('SRem', 18),
('FRem', 19),
('Shl', 20),
('LShr', 21),
('AShr', 22),
('And', 23),
('Or', 24),
('Xor', 25),
('Alloca', 26),
('Load', 27),
('Store', 28),
('GetElementPtr', 29),
('Trunc', 30),
('ZExt', 31),
('SExt', 32),
('FPToUI', 33),
('FPToSI', 34),
('UIToFP', 35),
('SIToFP', 36),
('FPTrunc', 37),
('FPExt', 38),
('PtrToInt', 39),
('IntToPtr', 40),
('BitCast', 41),
('ICmp', 42),
('FCmpl', 43),
('PHI', 44),
('Call', 45),
('Select', 46),
('UserOp1', 47),
('UserOp2', 48),
('AArg', 49),
('ExtractElement', 50),
('InsertElement', 51),
('ShuffleVector', 52),
('ExtractValue', 53),
('InsertValue', 54),
('Fence', 55),
('AtomicCmpXchg', 56),
('AtomicRMW', 57),
('Resume', 58),
('LandingPad', 59),
]
TypeKinds = [
('Void', 0),
('Half', 1),
('Float', 2),
('Double', 3),
('X86_FP80', 4),
('FP128', 5),
('PPC_FP128', 6),
('Label', 7),
('Integer', 8),
('Function', 9),
('Struct', 10),
('Array', 11),
('Pointer', 12),
('Vector', 13),
('Metadata', 14),
('X86_MMX', 15),
]
Linkages = [
('External', 0),
('AvailableExternally', 1),
('LinkOnceAny', 2),
('LinkOnceODR', 3),
('WeakAny', 4),
('WeakODR', 5),
('Appending', 6),
('Internal', 7),
('Private', 8),
('DLLImport', 9),
('DLLExport', 10),
('ExternalWeak', 11),
('Ghost', 12),
('Common', 13),
('LinkerPrivate', 14),
('LinkerPrivateWeak', 15),
('LinkerPrivateWeakDefAuto', 16),
]
Visibility = [
('Default', 0),
('Hidden', 1),
('Protected', 2),
]
CallConv = [
('CCall', 0),
('FastCall', 8),
('ColdCall', 9),
('X86StdcallCall', 64),
('X86FastcallCall', 65),
]
IntPredicate = [
('EQ', 32),
('NE', 33),
('UGT', 34),
('UGE', 35),
('ULT', 36),
('ULE', 37),
('SGT', 38),
('SGE', 39),
('SLT', 40),
('SLE', 41),
]
RealPredicate = [
('PredicateFalse', 0),
('OEQ', 1),
('OGT', 2),
('OGE', 3),
('OLT', 4),
('OLE', 5),
('ONE', 6),
('ORD', 7),
('UNO', 8),
('UEQ', 9),
('UGT', 10),
('UGE', 11),
('ULT', 12),
('ULE', 13),
('UNE', 14),
('PredicateTrue', 15),
]
LandingPadClauseTy = [
('Catch', 0),
('Filter', 1),
]
| vinutah/apps | tools/llvm/llvm_39/opt/bindings/python/llvm/enumerations.py | Python | gpl-3.0 | 4,249 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class email_template_preview(osv.osv_memory):
_inherit = "email.template"
_name = "email_template.preview"
_description = "Email Template Preview"
def _get_records(self, cr, uid, context=None):
"""
Return Records of particular Email Template's Model
"""
if context is None:
context = {}
template_id = context.get('template_id', False)
if not template_id:
return []
email_template = self.pool.get('email.template')
template = email_template.browse(cr, uid, int(template_id), context=context)
template_object = template.model_id
model = self.pool[template_object.model]
record_ids = model.search(cr, uid, [], 0, 10, 'id', context=context)
default_id = context.get('default_res_id')
if default_id and default_id not in record_ids:
record_ids.insert(0, default_id)
return model.name_get(cr, uid, record_ids, context)
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
result = super(email_template_preview, self).default_get(cr, uid, fields, context=context)
email_template = self.pool.get('email.template')
template_id = context.get('template_id')
if 'res_id' in fields and not result.get('res_id'):
records = self._get_records(cr, uid, context=context)
result['res_id'] = records and records[0][0] or False # select first record as a Default
if template_id and 'model_id' in fields and not result.get('model_id'):
result['model_id'] = email_template.read(cr, uid, int(template_id), ['model_id'], context).get('model_id', False)
return result
_columns = {
'res_id': fields.selection(_get_records, 'Sample Document'),
'partner_ids': fields.many2many('res.partner', string='Recipients'),
}
def on_change_res_id(self, cr, uid, ids, res_id, context=None):
if context is None:
context = {'value': {}}
if not res_id or not context.get('template_id'):
return {'value': {}}
email_template = self.pool.get('email.template')
template_id = context.get('template_id')
template = email_template.browse(cr, uid, template_id, context=context)
# generate and get template values
mail_values = email_template.generate_email(cr, uid, template_id, res_id, context=context)
vals = dict((field, mail_values.get(field, False)) for field in ('email_from', 'email_to', 'email_cc', 'reply_to', 'subject', 'body_html', 'partner_to', 'partner_ids', 'attachment_ids'))
vals['name'] = template.name
return {'value': vals}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| diogocs1/comps | web/addons/email_template/wizard/email_template_preview.py | Python | apache-2.0 | 3,851 |
# -*- coding: utf-8 -*-
#
# PyTips documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 26 20:55:10 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyTips'
copyright = u'2012, Hank Gay'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0-alpha.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0-alpha.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyTipsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PyTips.tex', u'PyTips Documentation',
u'Hank Gay', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pytips', u'PyTips Documentation',
[u'Hank Gay'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'PyTips', u'PyTips Documentation',
u'Hank Gay', 'PyTips', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| gthank/pytips | docs/source/conf.py | Python | isc | 7,815 |
#!/usr/bin/env python
# coding: utf-8
from .src import *
def plugin_loaded():
distractionless.plugin_loaded(reload=False)
def plugin_unloaded():
distractionless.plugin_unloaded()
| jrappen/sublime-distractionless | main.py | Python | isc | 195 |
"""
This version of julian is currently in development and is not considered stable.
""" | ithinksw/philo | philo/contrib/julian/__init__.py | Python | isc | 89 |
# -*- coding: UTF-8 -*-
#
# generated by wxGlade 0.6.8 on Thu Apr 2 20:01:32 2015
#
import wx
# begin wxGlade: dependencies
# end wxGlade
# begin wxGlade: extracode
# end wxGlade
class SocketFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: SocketFrame.__init__
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.button_aquarium = wx.ToggleButton(self, wx.ID_ANY, "Aquarium")
self.button_kitchen = wx.ToggleButton(self, wx.ID_ANY, u"K\xfcche")
self.button_bedroom = wx.ToggleButton(self, wx.ID_ANY, "Schlafstube")
self.button_back = wx.Button(self, wx.ID_ANY, u"Zur\xfcck")
self.button_livingroom = wx.ToggleButton(self, wx.ID_ANY, "Wohnstube")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnBtnAquarium, self.button_aquarium)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnBtnKitchen, self.button_kitchen)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnBtnBedroom, self.button_bedroom)
self.Bind(wx.EVT_BUTTON, self.OnBtnBack, self.button_back)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnBtnLivingroom, self.button_livingroom)
# end wxGlade
def __set_properties(self):
# begin wxGlade: SocketFrame.__set_properties
self.SetTitle("frame_1")
self.SetSize((483, 273))
# end wxGlade
def __do_layout(self):
# begin wxGlade: SocketFrame.__do_layout
grid_sizer_2 = wx.GridSizer(2, 3, 0, 0)
grid_sizer_2.Add(self.button_aquarium, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 5)
grid_sizer_2.Add(self.button_kitchen, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 5)
grid_sizer_2.Add(self.button_bedroom, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 5)
grid_sizer_2.Add(self.button_back, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 5)
grid_sizer_2.Add(self.button_livingroom, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 5)
self.SetSizer(grid_sizer_2)
self.Layout()
# end wxGlade
def OnBtnAquarium(self, event): # wxGlade: SocketFrame.<event_handler>
print "Event handler 'OnBtnAquarium' not implemented!"
event.Skip()
def OnBtnKitchen(self, event): # wxGlade: SocketFrame.<event_handler>
print "Event handler 'OnBtnKitchen' not implemented!"
event.Skip()
def OnBtnBedroom(self, event): # wxGlade: SocketFrame.<event_handler>
print "Event handler 'OnBtnBedroom' not implemented!"
event.Skip()
def OnBtnBack(self, event): # wxGlade: SocketFrame.<event_handler>
print "Event handler 'OnBtnBack' not implemented!"
event.Skip()
def OnBtnLivingroom(self, event): # wxGlade: SocketFrame.<event_handler>
print "Event handler 'OnBtnLivingroom' not implemented!"
event.Skip()
# end of class SocketFrame
| xenobyter/xbWeatherSocket | SocketFrame.py | Python | isc | 3,029 |
# coding=utf-8
import logging; logger = logging.getLogger("robots.introspection")
import threading
introspection = None
# disable introspection for now
if False:
try:
import Pyro4
import Pyro4.errors
uri = "PYRONAME:robots.introspection" # uses name server
try:
introspection = Pyro4.Proxy(uri)
introspection.initiate(str(0)) # 0 is the action ID of the main process
logger.info("Connection to the introspection server established.")
except Pyro4.errors.CommunicationError:
logger.warning("Introspection server not running. No introspection.")
introspection = None
except Pyro4.errors.NamingError:
logger.warning("Introspection server not running (no name server). No introspection.")
introspection = None
except ImportError:
pass
| chili-epfl/pyrobots | src/robots/introspection.py | Python | isc | 885 |
from django.db import models
from django.utils.translation import ugettext as _
from common import models as common_models
from hosts import models as hosts_models
class Project(common_models.TimestampedModel):
name = models.CharField(_('Name'), max_length=254)
description = models.TextField(_('Name'), blank=True, null=True)
class Meta:
verbose_name = _('Project')
verbose_name_plural = _('Projects')
def __str__(self):
return self.name
class Stage(common_models.TimestampedModel):
project = models.ForeignKey(Project, verbose_name=_('Project'))
name = models.CharField(_('Name'), max_length=254)
class Meta:
verbose_name = _('Stage')
verbose_name_plural = _('Stages')
def __str__(self):
return self.name
class HostConfig(common_models.TimestampedModel):
stage = models.ForeignKey(Stage, verbose_name=_('Stage'))
host = models.ForeignKey(hosts_models.Host, verbose_name=('Host'))
ssh_config = models.ForeignKey(hosts_models.SSHConfig, verbose_name=_('SSH Config'))
is_active = models.BooleanField(_('Is active'))
| mQuadrics/videplo | projects/models.py | Python | mit | 1,122 |
import unittest
from click.testing import CliRunner
from make_dataset import main
class TestMain(unittest.TestCase):
def test_main_runs(self):
runner = CliRunner()
result = runner.invoke(main, ['.', '.'])
assert result.exit_code == 0
| dddTESTxx/Gym-Final | src/data/tests.py | Python | mit | 268 |
# -*- encoding: utf-8 -*-
'''
Created on: 2015
Author: Mizael Martinez
'''
from pyfann import libfann
from login import Login
from escribirArchivo import EscribirArchivo
import inspect, sys, os
sys.path.append("../model")
from baseDatos import BaseDatos
class CtrlEntrenarRNANormalizado:
def __init__(self):
self.__coneccion=1
self.__tasa_aprendizaje=0.7
self.__numero_entradas=0
self.__numero_salidas=0
self.__neuronas_capa_oculta=0
self.__error_deseado=0
self.__epocas=0
self.__iteraciones_entre_reporte=1000
self.__red=None
self.__error_real=0
self.__url_prueba=None
self.__url_guardar=None
self.__path="../files/"
#self.__path="files/"
self.__bd=BaseDatos()
self.__interfaz=None
def entrenar(self):
print("Entrenando ...")
self.__red=libfann.neural_net()
self.__red.create_sparse_array(self.__coneccion,(self.__numero_entradas,self.__neuronas_capa_oculta,self.__numero_salidas))
self.__red.set_learning_rate(self.__tasa_aprendizaje)
self.__red.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.train_on_file(self.__path+self.__url_prueba, self.__epocas,self.__iteraciones_entre_reporte, self.__error_deseado)
self.__error_real=self.__red.get_MSE()
datos={"numerodeneuronas":self.__neuronas_capa_oculta,"error":self.__error_real,"tipo":"normalizado"}
id=self.__bd.agregarEntrenamiento(datos)
print("id: %s"%(str(id)))
self.__url_guardar="mizael_rna%s.net"%(id)
self.__bd.actualizarRegistroEntrenamiento(self.__url_guardar,id)
self.__red.save(self.__path + self.__url_guardar)
if self.__interfaz != None:
self.__interfaz.lineEdit_4.setText("%s"%str(self.__error_real))
def entrenarGamma(self):
print("Entrenando Gamma...")
self.__red=libfann.neural_net()
self.__red.create_sparse_array(self.__coneccion,(self.__numero_entradas,self.__neuronas_capa_oculta,self.__numero_salidas))
self.__red.set_learning_rate(self.__tasa_aprendizaje)
self.__red.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.set_activation_function_output(libfann.LINEAR)
self.__red.train_on_file(self.__path+self.__url_prueba, self.__epocas,self.__iteraciones_entre_reporte, self.__error_deseado)
self.__error_real=self.__red.get_MSE()
datos={"numerodeneuronas":self.__neuronas_capa_oculta,"error":self.__error_real,"tipo":"gamma"}
id=self.__bd.agregarEntrenamiento(datos)
print("id: %s"%(str(id)))
self.__url_guardar="mizael_rna%s.net"%(id)
self.__bd.actualizarRegistroEntrenamiento(self.__url_guardar,id)
self.__red.save(self.__path + self.__url_guardar)
if self.__interfaz != None:
self.__interfaz.lineEdit_4.setText("%s"%str(self.__error_real))
def setConeccion(self,conexion):
self.__coneccion=conexion
def setTasaAprendizaje(self,tasa_aprendizaje):
self.__tasa_aprendizaje=tasa_aprendizaje
def setNumeroEntradas(self,numero_entradas):
self.__numero_entradas=numero_entradas
def setNumeroSalidas(self,numero_salidas):
self.__numero_salidas=numero_salidas
def setNeuronasCapaOculta(self,neuronas_capa_oculta):
self.__neuronas_capa_oculta=neuronas_capa_oculta
def setErrorDeseado(self,error_deseado):
self.__error_deseado=error_deseado
def setEpocas(self,epocas):
self.__epocas=epocas
def setIteracionesEntreReporte(self,iteraciones_entre_reporte):
self.__iteraciones_entre_reporte=iteraciones_entre_reporte
def setErrorReal(self,error_real):
self.__error_real=error_real
def setUrlPrueba(self,url_prueba):
self.__url_prueba=url_prueba
def setUrlGuardar(self,url_guardar):
self.__url_guardar=url_guardar
def setInterfaz(self,interfaz):
self.__interfaz=interfaz
def getConeccion(self):
return self.__coneccion
def getTasaAprendizaje(self):
return self.__tasa_aprendizaje
def getNumeroEntradas(self):
return self.__numero_entradas
def getNumeroSalidas(self):
return self.__numero_salidas
def getNeuronasCapaOculta(self):
return self.__neuronas_capa_oculta
def getErrorDeseado(self):
return self.__error_deseado
def getEpocas(self):
return self.__epocas
def getIteracionesEntreReporte(self):
return self.__iteraciones_entre_reporte
def getErrorReal(self):
return self.__error_real
def getUrlPrueba(self):
return self.__url_prueba
def getUrlGuardar(self):
return self.__url_guardar
def getInterfaz(self):
return self.__interfaz
'''
#Entrenar para todos los valores
o=CtrlEntrenarRNANormalizado()
o.setConeccion(1)
o.setTasaAprendizaje(0.7)
o.setNumeroEntradas(8)
o.setNumeroSalidas(5)
#Cambian el # de neuronas y error deseado
o.setNeuronasCapaOculta(15)
o.setErrorDeseado(0.001)
#Cambian el # de epocas
o.setEpocas(130000)
o.setIteracionesEntreReporte(10000)
o.setUrlPrueba("rna_normalizado.data")
o.entrenar()
'''
'''
#Entrenar para las Gamma
g=CtrlEntrenarRNANormalizado()
g.setConeccion(1)
g.setTasaAprendizaje(0.7)
g.setNumeroEntradas(2)
g.setNumeroSalidas(1)
#Cambian el # de neuronas y error deseado
g.setNeuronasCapaOculta(150)
g.setErrorDeseado(0.9)
#Cambian el # de epocas
g.setEpocas(30000)
g.setIteracionesEntreReporte(10000)
g.setUrlPrueba("rna_gamma_normalizado.data")
g.entrenarGamma()
''' | martinezmizael/Escribir-con-la-mente | object/entrenarFannNormalizado.py | Python | mit | 5,218 |
from threading import Thread
from flask_mail import Message
from flask import render_template, current_app
from . import mail
from .decorators import async
@async
def send_async_email(app, msg):
with app.app_context():
mail.send(msg)
def send_email(to, subject, template, **kwargs):
app = current_app._get_current_object()
msg = Message(app.config['MAIL_SUBJECT_PREFIX'] + ' ' + subject, sender=app.config['MAIL_SENDER'], recipients=[to])
msg.body = render_template(template + '.txt', **kwargs)
msg.html = render_template(template + '.html', **kwargs)
thr = Thread(target=send_async_email, args=[app, msg])
thr.start()
return thr
| delitamakanda/socialite | app/email.py | Python | mit | 678 |
"""
Useful Utils
==============
"""
from setuptools import setup, find_packages
setup(
name='utilitybelt',
version='0.2.6',
author='Halfmoon Labs',
author_email='[email protected]',
description='Generally useful tools. A python utility belt.',
keywords=('dict dictionary scrub to_dict todict json characters charset '
'hex entropy utility'),
url='https://github.com/onenameio/utilitybelt',
license='MIT',
packages=find_packages(),
install_requires=[
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
zip_safe=False,
)
| onenameio/utilitybelt | setup.py | Python | mit | 747 |
# Copyright 2009 Max Klymyshyn, Sonettic
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import socket
import subprocess
from apnsexceptions import *
from utils import *
class APNSConnectionContext(object):
certificate = None
def __init__(self, certificate = None):
self.certificate = certificate
def connect(self, host, port):
raise APNSNotImplementedMethod, "APNSConnectionContext.connect ssl method not implemented in context"
def write(data = None):
raise APNSNotImplementedMethod, "APNSConnectionContext.write method not implemented"
def read(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.read method not implemented"
def close(self):
raise APNSNotImplementedMethod, "APNSConnectionContext.close method not implemented"
class OpenSSLCommandLine(APNSConnectionContext):
"""
This class execute and send data with openssl command line tool
"""
certificate = None
host = None
port = None
executable = None
debug = False
def __init__(self, certificate = None, executable = None, debug = False):
self.certificate = certificate
self.executable = executable
self.debug = debug
def connect(self, host, port):
self.host = host
self.port = port
def _command(self):
command = "%(executable)s s_client -ssl3 -cert %(cert)s -connect %(host)s:%(port)s" % \
{
'executable' : self.executable,
'cert' : self.certificate,
'host' : self.host,
'port' : self.port
}
return subprocess.Popen(command.split(' '), shell=False, bufsize=256, \
stdin=subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
def write(self, data = None):
pipe = self._command()
std_in = pipe.stdin
std_in.write(data)
std_in.flush()
std_in.close()
std_out = pipe.stdout
if self.debug:
print "-------------- SSL Debug Output --------------"
print command
print "----------------------------------------------"
print std_out.read()
std_out.close()
pipe.wait()
def read(self, blockSize = 1024):
"""
There is method to read data from feedback service.
WARNING! It's not tested and doesn't work yet!
"""
pipe = self._command()
std_out = pipe.stdout
data = std_out.read()
#pipe.wait()
std_out.close()
return data
def context(self):
return self
def close(self):
pass
class SSLModuleConnection(APNSConnectionContext):
"""
This is class which implement APNS connection based on
"ssl" module.
"""
socket = None
certificate = None
connectionContext = None
ssl_module = None
def __init__(self, certificate = None, ssl_module = None):
self.socket = None
self.connectionContext = None
self.certificate = certificate
self.ssl_module = ssl_module
def context(self):
"""
Initialize SSL context.
"""
if self.connectionContext != None:
return self
self.socket = socket.socket()
self.connectionContext = self.ssl_module.wrap_socket(
self.socket,
ssl_version = self.ssl_module.PROTOCOL_TLSv1,
certfile = self.certificate
)
return self
def certificate(self, path):
self.certificate = path
return self
def read(self, blockSize = 1024):
"""
Make connection to the host and port.
"""
return self.connectionContext.read(blockSize)
def write(self, data = None):
"""
Make connection to the host and port.
"""
self.connectionContext.write(data)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.connectionContext.connect((host, port))
def close(self):
"""
Close connection.
"""
self.connectionContext.close()
self.socket.close()
class APNSConnection(APNSConnectionContext):
"""
APNSConnection wrap SSL connection to the Apple Push Notification Server.
"""
debug = False
connectionContext = None
def __init__(self, certificate = None,
ssl_command = "openssl",
force_ssl_command = False,
disable_executable_search = False,
debug = False):
self.connectionContext = None
self.debug = debug
if not os.path.exists(str(certificate)):
raise APNSCertificateNotFoundError, "Apple Push Notification Service Certificate file %s not found." % str(certificate)
try:
if force_ssl_command:
raise ImportError, "There is force_ssl_command forces command line tool"
# use ssl library to handle secure connection
import ssl as ssl_module
self.connectionContext = SSLModuleConnection(certificate, ssl_module = ssl_module)
except:
# use command line openssl tool to handle secure connection
if not disable_executable_search:
executable = find_executable(ssl_command)
else:
executable = ssl_command
if not executable:
raise APNSNoCommandFound, "SSL Executable [%s] not found in your PATH environment" % str(ssl_command)
self.connectionContext = OpenSSLCommandLine(certificate, executable, debug = debug)
self.certificate = str(certificate)
def connect(self, host, port):
"""
Make connection to the host and port.
"""
self.context().connect(host, port)
return self
def certificate(self, path):
self.context().certificate(path)
return self
def write(self, data = None):
self.context().write(data)
def read(self, blockSize = 1024):
return self.context().read(blockSize)
def context(self):
if not self.connectionContext:
raise APNSNoSSLContextFound, "There is no SSL context available in your python environment."
return self.connectionContext.context()
def close(self):
"""
Close connection.
"""
self.context().close()
| Edzvu/Edzvu.github.io | APNSWrapper-0.6.1/APNSWrapper/connection.py | Python | mit | 7,014 |
#This script is for produsing a new list of sites extracted from alexa top site list
import re
prefix = 'http://'
#suffix = '</td><td></td></tr><tr><td>waitForPageToLoad</td><td></td><td>3000</td></tr>'
with open('top100_alexa.txt','r') as f:
newlines = []
for line in f.readlines():
found=re.sub(r'\d+', '', line)
line=found
newlines.append(line.replace(',', ''))
with open('urls.txt', 'w') as f:
for line in newlines:
#f.write('%s%s%s\n' % (prefix, line.rstrip('\n'), suffix))
f.write('%s%s\n' % (prefix, line.rstrip('\n'))) | gizas/CSS_Extractor | replace.py | Python | mit | 580 |
from flask import Flask
from flask_compress import Compress
from .. import db
app = Flask(__name__)
app.config.from_pyfile('../config.py')
from . import views
Compress(app)
@app.before_first_request
def initialize_database():
db.init_db()
| steinitzu/aptfinder | aptfinder/web/__init__.py | Python | mit | 249 |
#!/usr/bin/env python3
# Copyright (c) 2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3
| appop/bitcoin | qa/rpc-tests/test_framework/siphash.py | Python | mit | 2,010 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
#
# Python script to compile and compare locally installed programs with files listed in the repository
# If the file doesn't exist in the repository, delete it, or move it to another folder
#
import os
import platform
import re
import chardet
import argparse
import logging
from subprocess import Popen, PIPE
# Argparse
# add logging todo
parser = argparse.ArgumentParser(description='A script for condensing local repository')
parser.add_argument('-v', '--verbose', help='increase output verbosity', action='store_true')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
logging.info('Begin script')
# Variables
localPath = '.'
def fileList(localPath):
# list files todo
os.chdir(localPath)
listD = ['ls']
fList = Popen(listD, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = fList.communicate()
if args.verbose:
logging.debug('Start listing files')
if output:
print(output.decode(chardet.detect(output)['encoding']))
if err:
print('stderr:', err.decode(chardet.detect(err)['encoding']))
logging.debug('Stop listing files')
def depList():
# list dependencies todo
pass
def main():
if platform.system() == 'Linux':
pass
else:
print('Platform is not Linux. Exiting')
exit()
fileList(localPath)
'''
fileListRe = r'^\w*'
reC = re.compile(fileListRe)
'''
logging.info('End of program')
if __name__ == "__main__":
main()
| TheShellLand/pies | v3/scripts/clean-repo.py | Python | mit | 1,569 |
#!/usr/bin/python
import RPi.GPIO as GPIO
import subprocess
# Starting up
GPIO.setmode(GPIO.BCM)
GPIO.setup(3, GPIO.IN)
# Wait until power button is off
# Recommended to use GPIO.BOTH for cases with switch
GPIO.wait_for_edge(3, GPIO.BOTH)
# Shutting down
subprocess.call(['shutdown', '-h', 'now'], shell=False)
| UBayouski/RaspberryPiPowerButton | power_button.py | Python | mit | 315 |
import os.path
import pygame.image
import time
import ConfigParser
from helpers import *
from modules import Module
class Animation(Module):
def __init__(self, screen, folder, interval = None, autoplay = True):
super(Animation, self).__init__(screen)
if folder[:-1] != '/':
folder = folder + '/'
self.folder = folder
self.screen = screen
try:
if self.is_single_file():
self.load_single()
else: self.load_frames()
if len(self.frames) == 0:
raise Exception('No frames found in animation ' + self.folder)
self.screen.pixel = self.frames[0]
except Exception:
print('Failed to load ' + folder)
raise
self.screen.update()
if interval == None:
try:
self.interval = self.load_interval()
except:
print('No interval info found.')
self.interval = 100
else: self.interval = interval
self.pos = 0
if autoplay:
self.start()
def load_frames(self):
self.frames = []
i = 0
while os.path.isfile(self.folder + str(i) + '.bmp'):
try:
bmp = pygame.image.load(self.folder + str(i) + '.bmp')
except Exception:
print('Error loading ' + str(i) + '.bmp from ' + self.folder)
raise
pixel_array = pygame.PixelArray(bmp)
frame = [[pixel_array[x, y] for y in range(16)] for x in range(16)]
self.frames.append(frame)
i += 1
def is_single_file(self):
return os.path.isfile(self.folder + '0.bmp') and not os.path.isfile(self.folder + '1.bmp')
def load_single(self):
self.frames = []
bmp = pygame.image.load(self.folder + '0.bmp')
framecount = bmp.get_height() / 16
pixel_array = pygame.PixelArray(bmp)
for index in range(framecount):
frame = [[pixel_array[x, y + 16 * index] for y in range(16)] for x in range(16)]
self.frames.append(frame)
def load_interval(self):
cfg = ConfigParser.ConfigParser()
cfg.read(self.folder + 'config.ini')
return cfg.getint('animation', 'hold')
def tick(self):
self.pos += 1
if self.pos >= len(self.frames):
self.pos = 0
self.screen.pixel = self.frames[self.pos]
self.screen.update()
time.sleep(self.interval / 1000.0)
def on_start(self):
print('Starting ' + self.folder)
def play_once(self):
for frame in self.frames:
self.screen.pixel = frame
self.screen.update()
time.sleep(self.interval / 1000.0) | marian42/pixelpi | modules/animation.py | Python | mit | 2,315 |
#!/usr/bin/python
import argparse
###basic parser for parent help statement###
def parentArgs():
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''\
Suzanne's pipeline to identify somatic CNVs from single-cell whole-genome sequencing data
=========================================================================================
You must specify a function to perform:
*preprocess (trim fastq reads to the appropriate length)
*map (map fastq files to the hg38 or mm10 genome)
*count (count number of reads in 25,000 genomic bins)
*segment (run CBS -- requires Matlab!)
*interpret (perform QC assessment and removal of low-quality CNV calls)
# [More functions coming soon...]
''')
parser.print_help()
raise SystemExit
###interpret arguments needed to perform preprocessing of fastq files###
def preprocessArgs():
parser = argparse.ArgumentParser(description='Trim fastq reads to the appropriate length')
#required arguments#
parser.add_argument('FastqDirectory',
help = 'The path to the folder that contains fastq files to be processed')
#optional arguments#
parser.add_argument('-5', '--trim5', metavar='X', type=int, default=0,
help = "Number of 5' bases to trim from fastq reads")
parser.add_argument('-l', '--length', metavar='X', type=int, default=36,
help = 'The desired read length')
parser.add_argument('-r', '--remove', action='store_true',
help = 'Set this flag if you want to delete the full length fastq files (UNTESTED)')
parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False,
help='Path to a file containing a list of fastq files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)')
return parser
###interpret arguments needed to perform mapping of fastq files###
def mapArgs():
parser = argparse.ArgumentParser(description='Map fastq files to the appropriate reference genome')
#required arguments#
parser.add_argument('FastqDirectory',
help = 'The path to the folder that contains fastq files to be processed')
parser.add_argument('MapIndex',
help='The path to the bowtie (v1) mapping references, as you would input if running bowtie directly -- MUST BE HG38 OR MM10')
# parser.add_argument('species', choices=['hg38', 'mm10'],
# help = 'The genome build of the species being assessed')
#optional arguments#
parser.add_argument('-t', '--trim', metavar='X', nargs=2, type=int, default=[0, 0],
help = "Number of 5' and 3' bases to trim from fastq reads during mapping")
parser.add_argument('-o', '--output', metavar='/path/to/output_directory/', default=False,
help = 'A filepath to the desired directory where you would like sam files saved, if not in the same parent directory as the fastq files (UNTESTED)')
parser.add_argument('-x', '--statdir', metavar='/path/to/statistics_directory/', default=False,
help = 'A filepath to the desired directory where you would like mapping statistics saved, if not in the same parent directory as the fastq files (UNTESTED)')
parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False,
help='Path to a file containing a list of fastq files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)')
parser.add_argument('-b', '--bowtie', metavar='/path/to/bowtie1', default='bowtie',
help='Path to the bowtie binary, if not in your PATH variable (UNTESTED)')
parser.add_argument('-m', '--samtools', metavar='/path/to/samtools0.1.19', default='samtools',
help='Path to the samtools (v0.1.19) binary, if not in your PATH variable (UNTESTED)')
return parser
###interpret arguments needed to perform counting of unique.sam files###
def countArgs():
parser = argparse.ArgumentParser(description='Count the reads per genomic bin from unique sam files')
#required arguments#
parser.add_argument('AnalysisDirectory',
help = 'The path to the analysis directory, which contains the Sam/ directory with unique.sam files to be processed')
parser.add_argument('species', choices=['hg38', 'mm10'],
help = 'The genome build of the species being assessed')
#optional arguments#
parser.add_argument('-m', '--mapdir', metavar='/path/to/output_directory/', default=False,
help = 'A filepath to the directory containing the sam files, if not AnalysisDirectory/Sam/ (UNTESTED)')
parser.add_argument('-x', '--statdir', metavar='/path/to/statistics_directory/', default=False,
help = 'A filepath to the desired directory where you would like mapping statistics saved, if not in the same parent directory as the sam files (UNTESTED)')
parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False,
help='Path to a file containing a list of unique.sam files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)')
return parser
###interpret arguments needed to perform normalization and segmentation of bincounts.txt files###
def segmentArgs():
parser = argparse.ArgumentParser(description='Normalize and segment bincounts files to begin CNV identification process')
#required arguments#
parser.add_argument('AnalysisDirectory',
help = 'The path to the analysis directory, which contains the BinCounts/ directory with bincounts.txt files to be processed')
parser.add_argument('species', choices=['hg38', 'mm10'],
help = 'The genome build of the species being assessed')
#optional arguments#
parser.add_argument('-b', '--bincountdir', metavar='/path/to/output_directory/', default=False,
help = 'A filepath to the folder containing the bincount files, if not AnalysisDirectory/BinCounts (UNTESTED)')
parser.add_argument('-i', '--infofile', metavar='/path/to/sample.info.txt', default=False,
help='Path to a .txt file containing information about the samples to be processed (unique name, amplification method, number of cells)\n\tIf not all are identical. This file should not have a header row (UNTESTED)')
parser.add_argument('-c', '--columns', metavar='X X X', default=[0, 1, 2], type=int, nargs=3,
help='The zero-indexed locations of the columns to import from the infofile in the order: name, method, cell number (if not the first 3 columns) (UNTESTED)')
parser.add_argument('-g', '--gconly', action='store_true',
help = 'Set this flag if you only want GC-correction to be performed during normalization (UNTESTED)')
parser.add_argument('-n', '--normalizeonly', action='store_true',
help = 'Set this flag if you do not want CBS to be performed (UNTESTED)')
parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False,
help='Path to a file containing a list of bincounts.txt files to be processed\n\tsample names only, no path or file extension needed (UNTESTED)')
return parser
###interpret arguments needed to perform QC and CNV analysis of each single cell sample###
def interpretArgs():
parser = argparse.ArgumentParser(description='Assess sample quality, filter unreliable CNVs, and generate user-friendly output files')
#required arguments#
parser.add_argument('AnalysisDirectory',
help = 'The path to the folder to save output files')
parser.add_argument('species', choices=['hg38', 'mm10'],
help = 'The genome build of the species being assessed')
#optional arguments#
parser.add_argument('-f', '--nofilter', action='store_true',
help = 'Set this flag if you do not want to perform FUnC filtering of low-quality CNV calls (UNTESTED)')
# parser.add_argument('-i', '--infofile', metavar='/path/to/sample.info.txt', default=False,
# help='Path to a .txt file containing information about the samples to be processed (unique name, number of cells, group)\n\tIf not all are identical. This file should not have a header row (UNTESTED)')
# parser.add_argument('-c', '--columns', metavar='X X X', default=[0, 1, 2], type=int, nargs=3,
# help='The zero-indexed locations of the columns to import from the infofile in the order: name, cell number, group (if not the first 3 columns) (UNTESTED)')
parser.add_argument('-l', '--lowess', metavar='/path/to/lowess.txt/files/', default=False,
help = 'A filepath to the desired directory where all lowess.txt files are saved, if not AnalysisDirectory/Lowess/ (UNTESTED)')
parser.add_argument('-g', '--segments', metavar='/path/to/segments.txt/files/', default=False,
help = 'A filepath to the desired directory where all segments.txt files are saved, if not AnalysisDirectory/Segments/ (UNTESTED)')
parser.add_argument('-r', '--countstats', metavar='/path/to/bincounts.stats.txt/files/', default=False,
help = 'A filepath to the desired directory where all bincounts.stats.txt files are saved, if not AnalysisDirectory/PipelineStats/ (UNTESTED)')
parser.add_argument('-s', '--samples', metavar='/path/to/sample_list.txt', default=False,
help='Path to a file containing a list of sample names to be processed\n\tno path or file extension needed (UNTESTED)')
return parser
def fullParser(input):
functionDict = {
'-h': parentArgs,
'--help': parentArgs,
'preprocess': preprocessArgs,
'map': mapArgs,
'count': countArgs,
'segment': segmentArgs,
'interpret': interpretArgs,
}
if input == []:
parentArgs()
if input[0] not in functionDict.keys():
return input[0], False
parser = functionDict[input[0]]()
args = parser.parse_args(input[1:])
return input[0], args
| suzannerohrback/somaticCNVpipeline | bin/arguments.py | Python | mit | 9,538 |
import sys
import os
import pkg_resources
VERSION = 0.5
script_path = os.path.dirname(sys.argv[0])
def resource_path(relative_path):
base_path = getattr(sys, '_MEIPASS', script_path)
full_path = os.path.join(base_path, relative_path)
if os.path.isfile(full_path):
return full_path
else:
return pkg_resources.resource_filename(__name__, relative_path)
| JiapengLi/pqcom | pqcom/util.py | Python | mit | 387 |
#!/usr/bin/env python3
# Copyright (c) 2012-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import operator
import os
import sys
OUT_CPP = "qt/bitcoinstrings.cpp"
EMPTY = ['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT = os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print(
'Cannot extract strings: xgettext utility is not installed or not configured.',
file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',
file=sys.stderr)
sys.exit(1)
child = Popen([XGETTEXT, '--output=-', '-n',
'--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w', encoding="utf8")
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(
os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", {}),\n'.format('\n'.join(msgid)))
f.write('};\n')
f.close()
| ftrader-bitcoinabc/bitcoin-abc | share/qt/extract_strings_qt.py | Python | mit | 2,747 |
"""
WSGI config for django_mailing project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_mailing.settings")
application = get_wsgi_application()
| Aladom/django-mailing | django_mailing/wsgi.py | Python | mit | 405 |
# -*- coding: utf-8 -*-
#
# Python 101 documentation build configuration file, created by
# sphinx-quickstart on Tue Sep 16 15:47:34 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# the path to the latex executable
pngmath_latex = "/usr/bin/latex"
# the path to the dvipng executable
pngmath_dvipng = "/usr/bin/dvipng"
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Python 101'
copyright = u'2014, Centrum Edukacji Obywatelskiej'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Python101doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Python101.tex', u'Python 101 Documentation',
u'Centrum Edukacji Obywatelskiej', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python101', u'Python 101 Documentation',
[u'Centrum Edukacji Obywatelskiej'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Python101', u'Python 101 Documentation',
u'Centrum Edukacji Obywatelskiej', 'Python101', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
import sphinx
rst_prolog = u"""
.. raw:: html
<!-- Domyślnie ładowane fonty nie zawierają Latin Ext -->
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,400italic,700italic|Roboto+Slab:400,700&subset=latin,latin-ext' rel='stylesheet' type='text/css'>
"""
rst_epilog = u"""
.. include:: /copyright.rst
.. raw:: html
<style>
div.code_no {{ background: #e3e3e3; padding: 6px 12px; }}
span.right {{ float: right; }}
div.highlight, div.highlight-python {{ margin-top: 0px; }}
.rst-content code {{ color: #E74C3C; }}
div[class^="highlight"] pre {{ line-height: normal; }}
</style>
.. |date| date::
.. |time| date:: %H:%M
.. |sphinx_version| replace:: {}
:Utworzony: |date| o |time| w Sphinx |sphinx_version|
:Autorzy: :doc:`Patrz plik "Autorzy" </authors>`
""".format(sphinx.__version__)
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_style = 'css/python101.css'
except:
pass
# def setup(app):
| koduj-z-klasa/python101 | docs/conf.py | Python | mit | 9,897 |
'''
The MIT License (MIT)
Copyright (c) 2014 NTHUOJ team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from django.http import HttpResponse, HttpResponseBadRequest, Http404
from django.shortcuts import render, redirect
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.auth.decorators import login_required
from django.core.servers.basehttp import FileWrapper
from django.utils import timezone
from django.contrib import messages
from utils.render_helper import render_index
from utils.user_info import validate_user, has_problem_auth, has_problem_ownership
from users.models import User
from problem.models import Problem, Tag, Testcase
from problem.forms import ProblemForm, TagForm, TagFilter
from utils import log_info, config_info
from problem.problem_info import *
from utils import log_info
from utils.render_helper import render_index, get_current_page
from utils.rejudge import rejudge_problem
from subprocess import check_call
import os
import json
logger = log_info.get_logger()
# Create your views here.
def problem(request):
user = validate_user(request.user)
can_add_problem = user.has_subjudge_auth()
filter_type = request.GET.get('filter')
tag_filter = TagFilter(request.GET)
if tag_filter.is_valid():
tag_name = tag_filter.cleaned_data['tag_name']
if filter_type == 'mine':
problem_list = get_owner_problem_list(user)
mine = True
else:
problem_list = get_problem_list(user)
mine = False
if tag_name:
problem_list = problem_list.filter(tags__tag_name=tag_name)
for p in problem_list:
p.in_contest = check_in_contest(p)
problems = get_current_page(request, problem_list, 15)
for p in problems:
if p.total_submission != 0:
p.pass_rate = float(p.ac_count) / float(p.total_submission) * 100.0
p.not_pass_rate = 100.0 - p.pass_rate
p.pass_rate = "%.2f" % (p.pass_rate)
p.not_pass_rate = "%.2f" % (p.not_pass_rate)
else:
p.no_submission = True
else:
problems = []
mine = False
return render_index(request, 'problem/panel.html',
{'all_problem': problems, 'mine': mine,
'can_add_problem': can_add_problem, 'tag_filter': tag_filter})
def detail(request, pid):
user = validate_user(request.user)
tag_form = TagForm()
try:
problem = Problem.objects.get(pk=pid)
if not has_problem_auth(user, problem):
logger.warning("%s has no permission to see problem %d" % (user, problem.pk))
raise PermissionDenied()
except Problem.DoesNotExist:
logger.warning('problem %s not found' % (pid))
raise Http404('problem %s does not exist' % (pid))
problem.testcase = get_testcase(problem)
problem = verify_problem_code(problem)
problem.in_contest = check_in_contest(problem)
return render_index(request, 'problem/detail.html', {'problem': problem, 'tag_form': tag_form})
@login_required
def new(request):
if request.method == "POST":
if 'pname' in request.POST and request.POST['pname'].strip() != "":
p = Problem(pname=request.POST['pname'], owner=request.user)
p.save()
logger.info("problem %s created by %s" % (p.pk, request.user))
return redirect("/problem/%d/edit/" % p.pk)
return redirect("/problem/")
@login_required
def edit(request, pid=None):
tag_form = TagForm()
try:
problem = Problem.objects.get(pk=pid)
if not request.user.has_admin_auth() and request.user != problem.owner:
logger.warning("user %s has no permission to edit problem %s" % (request.user, pid))
raise PermissionDenied()
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
testcase = get_testcase(problem)
tags = problem.tags.all()
if request.method == 'GET':
form = ProblemForm(instance=problem)
if request.method == 'POST':
form = ProblemForm(request.POST, request.FILES, instance=problem)
if form.is_valid():
problem = form.save()
problem.sample_in = request.POST['sample_in']
problem.sample_out = request.POST['sample_out']
problem.save()
file_ex = get_problem_file_extension(problem)
if "special_judge_code" in request.FILES:
with open('%s%s%s' % (SPECIAL_PATH, problem.pk, file_ex), 'w') as t_in:
for chunk in request.FILES['special_judge_code'].chunks():
t_in.write(chunk)
if "partial_judge_code" in request.FILES:
with open('%s%s%s' % (PARTIAL_PATH, problem.pk, file_ex), 'w') as t_in:
for chunk in request.FILES['partial_judge_code'].chunks():
t_in.write(chunk)
if "partial_judge_header" in request.FILES:
with open('%s%s.h' % (PARTIAL_PATH, problem.pk), 'w') as t_in:
for chunk in request.FILES['partial_judge_header'].chunks():
t_in.write(chunk)
problem = verify_problem_code(problem)
if problem.has_special_judge_code and \
problem.judge_type != problem.SPECIAL:
os.remove('%s%s%s' % (SPECIAL_PATH, problem.pk, file_ex))
if problem.judge_type != problem.PARTIAL:
if problem.has_partial_judge_code:
os.remove('%s%s%s' % (PARTIAL_PATH, problem.pk, file_ex))
if problem.has_partial_judge_header:
os.remove('%s%s.h' % (PARTIAL_PATH, problem.pk))
logger.info('edit problem, pid = %d by %s' % (problem.pk, request.user))
messages.success(request, 'problem %d edited' % problem.pk)
return redirect('/problem/%d' % (problem.pk))
file_ex = get_problem_file_extension(problem)
problem = verify_problem_code(problem)
return render_index(request, 'problem/edit.html',
{'form': form, 'problem': problem,
'tags': tags, 'tag_form': tag_form,
'testcase': testcase,
'path': {
'TESTCASE_PATH': TESTCASE_PATH,
'SPECIAL_PATH': SPECIAL_PATH,
'PARTIAL_PATH': PARTIAL_PATH, }
})
@login_required
def tag(request, pid):
if request.method == "POST":
tag = request.POST['tag_name']
try:
problem = Problem.objects.get(pk=pid)
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
if not problem.tags.filter(tag_name=tag).exists():
new_tag, created = Tag.objects.get_or_create(tag_name=tag)
problem.tags.add(new_tag)
problem.save()
logger.info("add new tag '%s' to problem %s by %s" % (tag, pid, request.user))
return HttpResponse(json.dumps({'tag_id': new_tag.pk}),
content_type="application/json")
return HttpRequestBadRequest()
return HttpResponse()
@login_required
def delete_tag(request, pid, tag_id):
try:
problem = Problem.objects.get(pk=pid)
tag = Tag.objects.get(pk=tag_id)
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
except Tag.DoesNotExist:
logger.warning("tag %s does not exist" % (tag_id))
raise Http404("tag %s does not exist" % (tag_id))
if not request.user.has_admin_auth() and request.user != problem.owner:
raise PermissionDenied()
logger.info("tag %s deleted by %s" % (tag.tag_name, request.user))
problem.tags.remove(tag)
return HttpResponse()
@login_required
def testcase(request, pid, tid=None):
if request.method == 'POST':
try:
problem = Problem.objects.get(pk=pid)
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
if tid == None:
testcase = Testcase()
testcase.problem = problem
else:
try:
testcase = Testcase.objects.get(pk=tid)
except Testcase.DoesNotExist:
logger.warning("testcase %s does not exist" % (tid))
raise Http404("testcase %s does not exist" % (tid))
if testcase.problem != problem:
logger.warning("testcase %s does not belong to problem %s" % (tid, pid))
raise Http404("testcase %s does not belong to problem %s" % (tid, pid))
has_message = False
if 'time_limit' in request.POST:
testcase.time_limit = request.POST['time_limit']
testcase.memory_limit = request.POST['memory_limit']
testcase.save()
logger.info("testcase saved, tid = %s by %s" % (testcase.pk, request.user))
messages.success(request, "testcase %s saved" % testcase.pk)
has_message = True
if 't_in' in request.FILES:
TESTCASE_PATH = config_info.get_config('path', 'testcase_path')
try:
input_filename = '%s%s.in' % (TESTCASE_PATH, testcase.pk)
output_filename = '%s%s.out' % (TESTCASE_PATH, testcase.pk)
with open(input_filename, 'w') as t_in:
for chunk in request.FILES['t_in'].chunks():
t_in.write(chunk)
check_call(['dos2unix', input_filename])
logger.info("testcase %s.in saved by %s" % (testcase.pk, request.user))
with open(output_filename, 'w') as t_out:
for chunk in request.FILES['t_out'].chunks():
t_out.write(chunk)
check_call(['dos2unix', output_filename])
logger.info("testcase %s.out saved by %s" % (testcase.pk, request.user))
if not has_message:
messages.success(request, "testcase %s saved" % testcase.pk)
except IOError, OSError:
logger.error("saving testcase error")
return HttpResponse(json.dumps({'tid': testcase.pk}),
content_type="application/json")
return HttpResponse()
@login_required
def delete_testcase(request, pid, tid):
try:
problem = Problem.objects.get(pk=pid)
testcase = Testcase.objects.get(pk=tid)
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
except Testcase.DoesNotExist:
logger.warning("testcase %s does not exist" % (tid))
raise Http404("testcase %s does not exist" % (tid))
if not request.user.has_admin_auth() and request.user != problem.owner:
raise PermissionDenied
logger.info("testcase %d deleted" % (testcase.pk))
try:
os.remove('%s%d.in' % (TESTCASE_PATH, testcase.pk))
os.remove('%s%d.out' % (TESTCASE_PATH, testcase.pk))
except IOError, OSError:
logger.error("remove testcase %s error" % (testcase.pk))
logger.info("testcase %d deleted by %s" % (testcase.pk, request.user))
messages.success(request, "testcase %s deleted" % testcase.pk)
testcase.delete()
return HttpResponse()
@login_required
def delete_problem(request, pid):
try:
problem = Problem.objects.get(pk=pid)
except Problem.DoesNotExist:
logger.warning("problem %s does not exist" % (pid))
raise Http404("problem %s does not exist" % (pid))
if not request.user.has_admin_auth() and request.user != problem.owner:
raise PermissionDenied
logger.info("problem %d deleted by %s" % (problem.pk, request.user))
messages.success(request, "problem %d deleted" % problem.pk)
problem.delete()
return redirect('/problem/')
def preview(request):
problem = Problem()
problem.pname = request.POST['pname']
problem.description = request.POST['description']
problem.input= request.POST['input']
problem.output = request.POST['output']
problem.sample_in = request.POST['sample_in']
problem.sample_out = request.POST['sample_out']
problem.tag = request.POST['tags'].split(',')
return render_index(request, 'problem/preview.html', {'problem': problem, 'preview': True})
@login_required
def download_testcase(request, filename):
tid = filename.split('.')[0]
try:
testcase = Testcase.objects.get(pk=tid)
problem = testcase.problem
except:
raise Http404()
if not has_problem_ownership(request.user, problem) and \
not request.user.has_admin_auth():
logger.warning("%s has no permission to see testcase of problem %d" % (request.user, problem.pk))
raise Http404()
try:
f = open(TESTCASE_PATH+filename, "r")
except IOError:
logger.warning("open testcase %s error" % filename)
raise Http404()
response = HttpResponse(FileWrapper(f), content_type="text/plain")
response['Content-Disposition'] = 'attachment; filename=' + filename
return response
@login_required
def download_partial(request, filename):
pid = filename.split('.')[0]
try:
problem = Problem.objects.get(pk=pid)
except:
raise Http404()
if not has_problem_auth(request.user, problem):
logger.warning("%s has no permission to download problem %d partial judge code"
% (request.user, problem.pk))
raise Http404()
try:
f = open(PARTIAL_PATH+filename, "r")
except IOError:
raise Http404()
response = HttpResponse(FileWrapper(f), content_type="text/plain")
response['Content-Disposition'] = 'attachment; filename=' + filename
return response
@login_required
def download_special(request, filename):
pid = filename.split('.')[0]
try:
problem = Problem.objects.get(pk=pid)
except:
raise Http404()
if not has_problem_ownership(request.user, problem) and \
not request.user.has_admin_auth():
logger.warning("%s has no permission to download problem %d special judge code"
% (request.user, problem.pk))
raise Http404()
try:
f = open(SPECIAL_PATH+filename, "r")
except IOError:
raise Http404()
response = HttpResponse(FileWrapper(f), content_type="text/plain")
response['Content-Disposition'] = 'attachment; filename=' + filename
return response
@login_required
def rejudge(request):
pid = request.GET.get('pid')
if pid:
try:
problem = Problem.objects.get(pk=pid)
if not has_problem_ownership(request.user, problem) and \
not request.user.has_admin_auth():
logger.warning("%s has no permission to rejudge problem %d"
% (request.user, problem.pk))
raise PermissionDenied()
rejudge_problem(problem)
logger.info("problem %s rejudged" % problem.pk)
messages.success(request, 'problem %s rejuded' % problem.pk)
except Problem.DoesNotExist:
raise Http404()
return redirect('/problem/')
| Changron/NTHUOJ_web | problem/views.py | Python | mit | 16,676 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Item'
db.create_table('books_item', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('cost', self.gf('django.db.models.fields.DecimalField')(max_digits=8, decimal_places=2)),
('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(blank=True)),
))
db.send_create_signal('books', ['Item'])
# Adding model 'Time'
db.create_table('books_time', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['books.Task'], null=True, blank=True)),
('notes', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('rate_per_hour', self.gf('django.db.models.fields.PositiveIntegerField')(blank=True)),
('time', self.gf('django.db.models.fields.PositiveIntegerField')(blank=True)),
))
db.send_create_signal('books', ['Time'])
def backwards(self, orm):
# Deleting model 'Item'
db.delete_table('books_item')
# Deleting model 'Time'
db.delete_table('books_time')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'books.client': {
'Meta': {'object_name': 'Client'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'street_adress': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'books.expense': {
'Meta': {'object_name': 'Expense'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'books.invoice': {
'Meta': {'object_name': 'Invoice'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Client']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_of_issue': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_number': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'terms': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'books.item': {
'Meta': {'object_name': 'Item'},
'cost': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'})
},
'books.project': {
'Meta': {'object_name': 'Project'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Client']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'rate_per_hour': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'})
},
'books.task': {
'Meta': {'object_name': 'Task'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Project']"}),
'rate_per_hour': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'})
},
'books.tax': {
'Meta': {'object_name': 'Tax'},
'compound_tax': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}),
'rate': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'books.time': {
'Meta': {'object_name': 'Time'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'rate_per_hour': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['books.Task']", 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['books'] | carquois/blobon | blobon/books/migrations/0004_auto__add_item__add_time.py | Python | mit | 9,603 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RandomPasswordGenerator.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| AgapiGit/RandomPasswordGenerator | RandomPasswordGenerator/manage.py | Python | mit | 843 |
import os;
link = "http://media.blizzard.com/heroes/images/battlegrounds/maps/haunted-mines-v2/underground/6/"
column = 0;
rc_column = 0;
while (rc_column == 0):
row = 0;
rc_column = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg')
rc_row = rc_column
while (rc_row == 0):
row += 1
rc_row = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg')
column += 1
p = os.popen('ls -1 *.jpg | tail -n2');
second_last_file = p.readline();
last_file = p.readline();
column_end = last_file[0:4]
row_end = second_last_file[5:9]
print column_end
print row_end
os.system('rm ' + column_end + '*');
os.system('rm *-' + row_end + '.jpg');
column_end = int(column_end) - 1000;
row_end = int(row_end) - 1000;
os.system('mkdir temp')
i = 0;
for r in range(0, row_end):
for c in range(0, column_end):
file_to_move = str(1000 + c) + '-' + str(1000 + row_end - r - 1) + '.jpg'
os.system('cp ' + file_to_move + ' ./temp/' + str(100000 + i) + '.jpg');
i += 1
os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.png');
os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.jpg');
os.system('rm temp -r');
os.system('rm 1*.jpg');
| karellodewijk/wottactics | extra/download_hots_map/haunted-mines-underground/download_hots_map.py | Python | mit | 1,381 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import graph
class TestGraph(unittest.TestCase):
'''
Unit test for graph.py
'''
def setUp(self):
'''
This method sets up the test graph data
'''
test_graph_data = {'1': [], '2': ['1'], '3': ['1', '4'], '4': [],
'5': ['2', '3'], '6': ['3']}
self.g = graph.Graph()
self.g.construct_from_edgeset(test_graph_data)
def test_pred_nodes(self):
preds = set(self.g.pred_nodes('5'))
expected = set(['1', '4', '2', '3', '5'])
self.assertEqual(preds, expected)
def test_succ_nodes(self):
succs = set(self.g.succ_nodes('1'))
expected = set(['1', '2', '3', '5', '6'])
self.assertEqual(succs, expected)
if __name__ == '__main__':
unittest.main()
| hirosassa/graph_search | test_graph.py | Python | mit | 858 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
from routes.campapel.home import returnIndex
from tekton import router
from tekton.gae.middleware.redirect import RedirectResponse
from gaeforms.ndb.form import ModelForm
from gaegraph.model import Node
from google.appengine.ext import ndb
from tekton.gae.middleware.redirect import RedirectResponse
from routes.campapel.modelo import CamPapel, CamPapelForm
@login_not_required
@no_csrf
def form(_resp):
contexto={'salvar_path':router.to_path(salvar)}
return TemplateResponse(contexto,'campapel/form.html')
@login_not_required
def salvar(**prop):
camPapelF=CamPapelForm(**prop)
erros=camPapelF.validate()
if erros:
contexto={'salvar_path':router.to_path(salvar),
'erros':erros,
'camPapel':prop}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapel=camPapelF.fill_model()
camPapel.put()
return RedirectResponse(returnIndex())
@login_not_required
@no_csrf
def editar_form(camPapel_id):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.get_by_id(camPapel_id)
contexto={'salvar_path':router.to_path(editar,camPapel_id),'camPapel':camPapel}
return TemplateResponse(contexto,template_path='campapel/form.html')
@login_not_required
def editar(camPapel_id,**prop):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.get_by_id(camPapel_id)
camPapelF=CamPapelForm(**prop)
erros=camPapelF.validate()
if erros:
contexto={'salvar_path':router.to_path(editar),
'erros':erros,
'camPapel':camPapelF}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapelF.fill_model(camPapel)
camPapel.put()
return RedirectResponse(router.to_path(returnIndex()))
@login_not_required
def deletar(camPapel_id):
chave=ndb.Key(CamPapel,int(camPapel_id))
chave.delete()
return RedirectResponse(router.to_path(returnIndex())) | marcelosandoval/tekton | backend/appengine/routes/campapel/show.py | Python | mit | 2,187 |
from rest_framework import routers
from . import views
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'complaints', views.ComplaintViewSet)
urlpatterns = router.urls
| danjac/ownblock | ownblock/ownblock/apps/complaints/urls.py | Python | mit | 192 |
#! /usr/bin/python3
'''
automate_scrape.py - runs throught the online verision of the text book, 'Automate the boring stuff with python' and pulls out all of the projects and stores them in a file.
'''
import requests, os, bs4, sys
def page_download(web_page, chapter_num, no_project_count, no_chapter_projects):
'''
Downloads the web page. Keeps the varibales, chapter_num, no_project_count, no_chapter_projects to be then used in chapter_loop_and_write.
'''
print('Downloading page {0}'.format(web_page))
res = requests.get(web_page)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text)
chapter_loop_and_write(soup, chapter_num, no_project_count, no_chapter_projects)
def chapter_loop_and_write(downloaded_page, chapter_num, no_project_count, no_chapter_projects):
'''
Takes a downloaded web page and pulls out the practice projects and writes them to a file. It then moves to the next chapter, gets that url. Counts the number of chapters where practice projects could not be found and stores those chapters in a list.
'''
soup = downloaded_page
projects = soup.find_all('div', {'class': "book", 'title' : 'Practice Projects'})
if projects == []:
print('Could not find Projects.')
no_project_count += 1
no_chapter_projects.append('Chapter' + str(chapter_num))
else:
with open('automateProjects.txt', 'a') as f:
for el in projects:
f.write(el.get_text())
print('Writing text to file')
f.close()
chapter_num += 1
if chapter_num == 19:
print('\n{0} chapters where Practice Projects could not be found'.format(no_project_count))
print('Here is the list of those chapters:\n{0}'.format(no_chapter_projects))
print('='*20 + 'Done' + '='*20 +'\n')
return
next_link = soup.find('a', href='/chapter' + str(chapter_num))
web_page = 'http://automatetheboringstuff.com' + next_link.get('href') +'/'
page_download(web_page, chapter_num, no_project_count, no_chapter_projects)
def main():
'''
main
'''
web_page = 'https://automatetheboringstuff.com/chapter1/'
page_download(web_page, 1, 0, [])
if __name__ == '__main__':
sys.exit(main())
| ZorbaTheStrange/automate_scrape | automate_scrape.py | Python | mit | 2,297 |
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Litecoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Gwangcoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"gwangcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
| gwangjin2/gwangcoin-core | share/qt/clean_mac_info_plist.py | Python | mit | 900 |
import numpy as np
import tensorflow as tf
from agent.forward import Forward
from config import *
_EPSILON = 1e-6 # avoid nan
# local network for advantage actor-critic which are also know as A2C
class Framework(object):
def __init__(self, access, state_size, action_size, scope_name):
self.Access = access
self.action_size = action_size
self.action_space = list(range(action_size))
with tf.variable_scope(scope_name):
# placeholder
self.inputs = tf.placeholder(tf.float32, [None] + state_size, "states")
self.actions = tf.placeholder(tf.int32, [None], "actions")
self.targets = tf.placeholder(tf.float32, [None], "discounted_rewards")
# network interface
self.actor = Forward('actor')
self.critic = Forward('critic')
self.policy = tf.nn.softmax(self.actor(self.inputs, self.action_size))
self.value = self.critic(self.inputs, 1)
self.policy_step = tf.squeeze(self.policy, axis=0)
self.greedy_action = tf.argmax(self.policy_step)
# losses
self._build_losses()
# async framework
self._build_async_interface()
self._build_interface()
print('graph %s' % (str(scope_name)))
def _build_losses(self):
# value loss
targets = tf.expand_dims(self.targets, axis=1)
self.advantage = targets - self.value
self.value_loss = tf.reduce_mean(tf.square(self.advantage))
# policy loss
action_gather = tf.one_hot(self.actions, self.action_size)
policy_action = tf.reduce_sum(self.policy * action_gather,
axis=1, keep_dims=True)
log_policy_action = tf.log(policy_action + _EPSILON)
self.policy_loss = -tf.reduce_mean(
tf.stop_gradient(self.advantage) * log_policy_action)
# entropy loss
entropy_loss = tf.reduce_sum(
self.policy * tf.log(self.policy + _EPSILON),
axis=1, keep_dims=True)
self.entropy_loss = tf.reduce_mean(entropy_loss)
# regularization
self.actor_norm = tf.add_n(self.actor.get_regularization()) * ACTOR_NORM_BETA
self.critic_norm = tf.add_n(self.critic.get_regularization()) * CRITIC_NORM_BETA
# total loss
self.actor_loss = self.policy_loss + ENTROPY_BETA * self.entropy_loss + self.actor_norm
self.critic_loss = self.value_loss + self.critic_norm
# interface adjustment parameters
self.a_actor_loss = self.actor_loss
self.a_policy_mean = -tf.reduce_mean(log_policy_action)
self.a_policy_loss = self.policy_loss
self.a_entropy_loss = ENTROPY_BETA * self.entropy_loss
self.a_actor_norm = self.actor_norm
self.a_critic_loss = self.critic_loss
self.a_value_loss = self.value_loss
self.a_critic_norm = self.critic_norm
self.a_value_mean = tf.reduce_mean(self.value)
self.a_advantage = tf.reduce_mean(self.advantage)
def _build_interface(self):
self.a_interface = [self.a_actor_loss,
self.a_actor_grad,
self.a_policy_mean,
self.a_policy_loss,
self.a_entropy_loss,
self.a_actor_norm,
self.a_critic_loss,
self.a_critic_grad,
self.a_value_loss,
self.a_critic_norm,
self.a_value_mean,
self.a_advantage]
def _build_async_interface(self):
global_actor_params, global_critic_params = self.Access.get_trainable()
local_actor_params, local_critic_params = self.get_trainable()
actor_grads = tf.gradients(self.actor_loss, list(local_actor_params))
critic_grads = tf.gradients(self.critic_loss, list(local_critic_params))
# Set up optimizer with global norm clipping.
actor_grads, self.a_actor_grad = tf.clip_by_global_norm(actor_grads, MAX_GRAD_NORM)
critic_grads, self.a_critic_grad = tf.clip_by_global_norm(critic_grads, MAX_GRAD_NORM)
# update Access
actor_apply = self.Access.optimizer_actor.apply_gradients(
zip(list(actor_grads), list(global_actor_params)))
critic_apply = self.Access.optimizer_critic.apply_gradients(
zip(list(critic_grads), list(global_critic_params)))
self.update_global = [actor_apply, critic_apply]
# update ACNet
assign_list = []
for gv, lv in zip(global_actor_params, local_actor_params):
assign_list.append(tf.assign(lv, gv))
for gv, lv in zip(global_critic_params, local_critic_params):
assign_list.append(tf.assign(lv, gv))
self.update_local = assign_list
def get_trainable(self):
return [self.actor.get_variables(), self.critic.get_variables()]
def get_policy(self, sess, inputs):
return sess.run(self.policy, {self.inputs: inputs})
def get_stochastic_action(self, sess, inputs, epsilon=0.95):
# get stochastic action for train
if np.random.uniform() < epsilon:
policy = sess.run(self.policy_step,
{self.inputs: np.expand_dims(inputs, axis=0)})
return np.random.choice(self.action_space, 1, p=policy)[0]
else:
return np.random.randint(self.action_size)
def get_deterministic_policy_action(self, sess, inputs):
# get deterministic action for test
return sess.run(self.greedy_action,
{self.inputs: np.expand_dims(inputs, axis=0)})
def get_value(self, sess, inputs):
return sess.run(self.value, {self.inputs: inputs})
def train_step(self, sess, inputs, actions, targets):
feed_dict = {self.inputs: inputs,
self.actions: actions,
self.targets: targets}
sess.run(self.update_global, feed_dict)
def init_network(self, sess):
"""
init or update local network
:param sess:
:return:
"""
sess.run(self.update_local)
def get_losses(self, sess, inputs, actions, targets):
"""
get all loss functions of network
:param sess:
:param inputs:
:param actions:
:param targets:
:return:
"""
feed_dict = {self.inputs: inputs,
self.actions: actions,
self.targets: targets}
return sess.run(self.a_interface, feed_dict)
| AlphaSmartDog/DeepLearningNotes | Note-6 A3CNet/Note-6.2 A3C与HS300指数择时/agent/framework.py | Python | mit | 6,700 |
from collections import Counter
from clarify_python.helper import get_embedded_items, get_link_href
MAX_METADATA_STRING_LEN = 2000
def default_to_empty_string(val):
return val if val is not None else ''
class ClarifyBrightcoveBridge:
def __init__(self, clarify_client, bc_client):
self.clarify_client = clarify_client
self.bc_client = bc_client
self.sync_stats = Counter(created=0, updated=0, deleted=0)
self.dry_run = False
def log(self, output_str):
print(output_str)
def log_sync_stats(self):
print('\nBundle stats:')
print(' created: {0}'.format(self.sync_stats['created']))
print(' updated: {0}'.format(self.sync_stats['updated']))
print(' deleted: {0}'.format(self.sync_stats['deleted']))
print(' total: {0}'.format(self.sync_stats['count']))
def _load_bundle_map(self):
'''
Return a map of all bundles in the Clarify app that have an external_id set for them.
The bundles with external_ids set are assumed to be the ones we have inserted from Brightcove.
The external_id contains the Brightcove video id.
'''
bundle_map = {}
next_href = None
has_next = True
while has_next:
bundles = self.clarify_client.get_bundle_list(href=next_href, embed_items=True)
items = get_embedded_items(bundles)
for item in items:
bc_video_id = item.get('external_id')
if bc_video_id is not None and len(bc_video_id) > 0:
bundle_map[bc_video_id] = item
next_href = get_link_href(bundles, 'next')
if next_href is None:
has_next = False
return bundle_map
def _metadata_from_video(self, video):
'''Generate the searchable metadata that we'll store in the bundle for the video'''
long_desc = video['long_description']
if long_desc is not None:
long_desc = long_desc[:MAX_METADATA_STRING_LEN]
tags = video.get('tags')
metadata = {
'name': default_to_empty_string(video.get('name')),
'description': default_to_empty_string(video.get('description')),
'long_description': default_to_empty_string(long_desc),
'tags': tags if tags is not None else [],
'updated_at': video.get('updated_at'),
'created_at': video.get('created_at'),
'state': video.get('state')
}
return metadata
def _src_media_url_for_video(self, video):
'''Get the url for the video media that we can send to Clarify'''
src_url = None
best_height = 0
best_source = None
# TODO: This assumes we have ingested videos. For remote videos, check if the remote flag is True
# and if so, use the src url from the Asset endpoint.
video_sources = self.bc_client.get_video_sources(video['id'])
# Look for codec H264 with good resolution
for source in video_sources:
height = source.get('height', 0)
codec = source.get('codec')
if source.get('src') and codec and codec.upper() == 'H264' and height <= 1080 and height > best_height:
best_source = source
if best_source is not None:
src_url = best_source['src']
return src_url
def _create_bundle_for_video(self, video):
media_url = self._src_media_url_for_video(video)
if not media_url:
self.log('SKIPPING: No suitable video src url for video {0} {1}'.format(video['id'], video['name']))
return
self.log('Creating bundle for video {0} {1}'.format(video['id'], video['name']))
if not self.dry_run:
external_id = video['id']
name = video.get('original_filename')
metadata = self._metadata_from_video(video)
self.clarify_client.create_bundle(name=name, media_url=media_url,
metadata=metadata, external_id=external_id)
self.sync_stats['created'] += 1
def _update_metadata_for_video(self, metadata_href, video):
'''
Update the metadata for the video if video has been updated in Brightcove since the bundle
metadata was last updated.
'''
current_metadata = self.clarify_client.get_metadata(metadata_href)
cur_data = current_metadata.get('data')
if cur_data.get('updated_at') != video['updated_at']:
self.log('Updating metadata for video {0}'.format(video['id']))
if not self.dry_run:
metadata = self._metadata_from_video(video)
self.clarify_client.update_metadata(metadata_href, metadata=metadata)
self.sync_stats['updated'] += 1
def sync_bundles(self, delete_bundles=True, update_metadata=True, confirm_delete_fun=None, dry_run=False):
self.dry_run = dry_run
self.sync_stats.clear()
if dry_run:
self.log('-----------------------------------')
self.log('DRY RUN - not modifying any bundles')
self.log('-----------------------------------')
self.log('Fetching bundles...')
bundle_map = self._load_bundle_map()
self.log('Fetching videos...')
videos = self.bc_client.get_all_videos()
self.log('Checking {0} videos...'.format(len(videos)))
for video in videos:
vid = video['id']
bundle = bundle_map.get(vid)
if bundle is None:
# Create a bundle for the video
self._create_bundle_for_video(video)
elif update_metadata:
# Update the metadata in the bundle for the video
self._update_metadata_for_video(get_link_href(bundle, 'clarify:metadata'), video)
if delete_bundles:
self.log('Checking deleted videos...')
# Delete bundles for missing videos
existing_vids = set([x['id'] for x in videos])
existing_bundles = set(bundle_map.keys())
missing_bundles = existing_bundles - existing_vids
if len(missing_bundles):
for vid in missing_bundles:
bundle = bundle_map.get(vid)
if dry_run or confirm_delete_fun is None or \
confirm_delete_fun(bundle['name'], bundle['external_id']):
self.log('Delete bundle for video {0}'.format(bundle['external_id']))
if not dry_run:
self.clarify_client.delete_bundle(get_link_href(bundle, 'self'))
self.sync_stats['deleted'] += 1
self.sync_stats['count'] = len(bundle_map) + self.sync_stats['created'] - self.sync_stats['deleted']
self.log('done.')
| Clarify/clarify_brightcove_sync | clarify_brightcove_sync/clarify_brightcove_bridge.py | Python | mit | 6,892 |
"""Softmax."""
scores = [3.0, 1.0, 0.2]
import numpy as np
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
return np.exp(x) / sum(np.exp(x))
print(softmax(scores))
# Plot softmax curves
import matplotlib.pyplot as plt
x = np.arange(-2.0, 6.0, 0.1)
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)])
plt.plot(x, softmax(scores).T, linewidth=2)
plt.show()
| ds-hwang/deeplearning_udacity | python_practice/quiz1.py | Python | mit | 409 |
'''
Authors: Donnie Marino, Kostas Stamatiou
Contact: [email protected]
Unit tests for the gbdxtools.Idaho class
'''
from gbdxtools import Interface
from gbdxtools.idaho import Idaho
from auth_mock import get_mock_gbdx_session
import vcr
from os.path import join, isfile, dirname, realpath
import tempfile
import unittest
# How to use the mock_gbdx_session and vcr to create unit tests:
# 1. Add a new test that is dependent upon actually hitting GBDX APIs.
# 2. Decorate the test with @vcr appropriately
# 3. Replace "dummytoken" with a real gbdx token
# 4. Run the tests (existing test shouldn't be affected by use of a real token). This will record a "cassette".
# 5. Replace the real gbdx token with "dummytoken" again
# 6. Edit the cassette to remove any possibly sensitive information (s3 creds for example)
class IdahoTest(unittest.TestCase):
_temp_path = None
@classmethod
def setUpClass(cls):
mock_gbdx_session = get_mock_gbdx_session(token='dymmytoken')
cls.gbdx = Interface(gbdx_connection=mock_gbdx_session)
cls._temp_path = tempfile.mkdtemp()
print("Created: {}".format(cls._temp_path))
def test_init(self):
c = Idaho(self.gbdx)
self.assertTrue(isinstance(c, Idaho))
@vcr.use_cassette('tests/unit/cassettes/test_idaho_get_images_by_catid_and_aoi.yaml', filter_headers=['authorization'])
def test_idaho_get_images_by_catid_and_aoi(self):
i = Idaho(self.gbdx)
catid = '10400100203F1300'
aoi_wkt = "POLYGON ((-105.0207996368408345 39.7338828628182839, -105.0207996368408345 39.7365972921260067, -105.0158751010894775 39.7365972921260067, -105.0158751010894775 39.7338828628182839, -105.0207996368408345 39.7338828628182839))"
results = i.get_images_by_catid_and_aoi(catid=catid, aoi_wkt=aoi_wkt)
assert len(results['results']) == 2
@vcr.use_cassette('tests/unit/cassettes/test_idaho_get_images_by_catid.yaml', filter_headers=['authorization'])
def test_idaho_get_images_by_catid(self):
i = Idaho(self.gbdx)
catid = '10400100203F1300'
results = i.get_images_by_catid(catid=catid)
assert len(results['results']) == 12
@vcr.use_cassette('tests/unit/cassettes/test_idaho_describe_images.yaml', filter_headers=['authorization'])
def test_idaho_describe_images(self):
i = Idaho(self.gbdx)
catid = '10400100203F1300'
description = i.describe_images(i.get_images_by_catid(catid=catid))
assert description['10400100203F1300']['parts'][1]['PAN']['id'] =='b1f6448b-aecd-4d9b-99ec-9cad8d079043'
| michaelconnor00/gbdxtools | tests/unit/test_idaho.py | Python | mit | 2,604 |
import json
from django_api_tools.APIModel import APIModel, UserAuthCode
from django_api_tools.APIView import APIUrl, ReservedURL, StatusCode
from django_api_tools.tests.models import Foo, Bar, Baz, Qux, TestProfile
from django_api_tools.tests.views import TestAPIView
from django.test import TestCase
from django.test.client import RequestFactory, Client
from django.contrib.auth.models import AnonymousUser, User
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.core.exceptions import ObjectDoesNotExist
__author__ = 'szpytfire'
class APIToolsTestCase(TestCase):
def assertDictKeysEqual(self, dict, keys):
# For related fields, APIModel cuts off the special related syntax when dictifying
# We should therefore do the same when testing for the correct keys
for index, val in enumerate(keys):
prefix = filter(lambda prefix: val.startswith(prefix), APIModel._reserved_prefixes)
if prefix:
keys[index] = keys[index][len(prefix[0]) + 1:]
self.assertSetEqual(set(dict.keys()), set(keys))
class APIModelTestCase(APIToolsTestCase):
fixtures = ['user_testprofile_foo.json', 'bar_baz_qux.json']
def remove_foreign_key_fields(self, fields):
return [field for field in fields if not filter(lambda prefix: field.startswith(prefix), APIModel._reserved_prefixes)]
def test_dictify(self):
foo = Foo.objects.get(id=1)
foo._curr_user = AnonymousUser()
# Test no fields to include returns empty dict
self.assertDictEqual(foo.dictify([], False), {})
# Test random fields to include returns empty dict
self.assertDictEqual(foo.dictify(['bar1', 'bar2'], False), {})
# Test defaults to public user
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields))
# Test correct registered user fields returned
foo._user_auth = UserAuthCode.REGISTERED_USER
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields + Foo.registered_user_fields))
# Test correct owner fields returned
foo._user_auth = UserAuthCode.OWNER
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
def test_dictify_helper(self):
user = User.objects.get(id=1)
foo = Foo.objects.get(id=1)
foo.set_user_auth(user)
# Test no dictified fields returned for empty fields to return
self.assertDictEqual(foo.dictify_helper(Foo.public_fields, [], False), {})
# Test no dictified fields returned for fields which aren't in the auth level
self.assertDictEqual(foo.dictify_helper(Foo.public_fields, ['bar1', 'bar2'], False), {})
# Test regular field is set in the dictionary
dictified_foo = foo.dictify_helper(Foo.public_fields, Foo.public_fields, False)
self.assertEqual(dictified_foo['id'], foo.id)
# Test invalid regular fields is set as None
non_existent_field = ('test', )
dictified_foo = foo.dictify_helper(non_existent_field, non_existent_field, False)
self.assertIsNone(dictified_foo[non_existent_field[0]])
# Test invalid related field is set as None
non_existent_rel_field = ('fk_short_test', )
dictified_foo = foo.dictify_helper(non_existent_rel_field, non_existent_rel_field, False)
self.assertIsNone(dictified_foo['test'])
# Test fk_short only returns the foreign model's ID
fk_short_field = ('fk_short_baz', )
bar = Bar.objects.get(id=1)
bar.set_user_auth(user)
dictified_bar = bar.dictify_helper(fk_short_field, fk_short_field, False)
self.assertEqual(len(dictified_bar), 1)
self.assertDictKeysEqual(dictified_bar['baz'], self.remove_foreign_key_fields(bar.baz.short_description_fields))
# Test fk_long returns the foreign model's dictify_long()
fk_long_field = ('fk_long_baz', )
dictified_bar = bar.dictify_helper(fk_long_field, fk_long_field, False)
self.assertEqual(len(dictified_bar), 1)
self.assertDictKeysEqual(dictified_bar['baz'], self.remove_foreign_key_fields(bar.baz.short_description_fields + bar.baz.long_description_fields))
# Test onetoone_short only returns the foreign model's ID
onetoone_short_field = ('onetoone_short_owner', )
dictified_foo = foo.dictify_helper(onetoone_short_field, onetoone_short_field, False)
self.assertEqual(len(dictified_foo), 1)
self.assertDictKeysEqual(dictified_foo['owner'], self.remove_foreign_key_fields(foo.owner.short_description_fields))
# Test onetoone_long returns the foreign model's dictify_long()
fk_long_field = ('onetoone_long_owner', )
qux = Qux.objects.get(id=1)
qux.set_user_auth(user)
dictified_qux = qux.dictify_helper(fk_long_field, fk_long_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertDictKeysEqual(dictified_qux['owner'], self.remove_foreign_key_fields(qux.owner.short_description_fields + qux.owner.long_description_fields))
# Test rel_short only returns the related models' ID's
rel_short_field = ('rel_short_bars', )
baz = Baz.objects.get(id=1)
baz.set_user_auth(user)
dictified_baz = baz.dictify_helper(rel_short_field, rel_short_field, False)
self.assertEqual(len(dictified_baz), 1)
self.assertEqual(len(dictified_baz['bars']), baz.bars.all().count())
self.assertDictKeysEqual(dictified_baz['bars'][0], self.remove_foreign_key_fields(baz.bars.all()[0].short_description_fields))
# Test rel_long returns the related models' dictify_long()
rel_long_field = ('rel_long_bars', )
dictified_baz = baz.dictify_helper(rel_long_field, rel_long_field, False)
self.assertEqual(len(dictified_baz), 1)
self.assertEqual(len(dictified_baz['bars']), baz.bars.all().count())
self.assertDictKeysEqual(dictified_baz['bars'][0], self.remove_foreign_key_fields(baz.bars.all()[0].short_description_fields + baz.bars.all()[0].long_description_fields))
# Test m2m_short only returns the related models' ID's
m2m_short_field = ('m2m_short_foos', )
qux = Qux.objects.get(id=1)
qux.set_user_auth(user)
qux.foos.add(foo)
dictified_qux = qux.dictify_helper(m2m_short_field, m2m_short_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertEqual(len(dictified_qux['foos']), qux.foos.all().count())
self.assertDictKeysEqual(dictified_qux['foos'][0], self.remove_foreign_key_fields(qux.foos.all()[0].short_description_fields))
# Test m2m_long returns the related models' dictify_long()
m2m_long_field = ('m2m_long_foos', )
dictified_qux = qux.dictify_helper(m2m_long_field, m2m_long_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertEqual(len(dictified_qux['foos']), qux.foos.all().count())
self.assertDictKeysEqual(dictified_qux['foos'][0], self.remove_foreign_key_fields(qux.foos.all()[0].short_description_fields + qux.foos.all()[0].long_description_fields))
def test_dictify_short(self):
# Test that the method only returns the short description fields
foo = Foo.objects.get(id=1)
self.assertDictKeysEqual(foo.dictify_short(False), Foo.short_description_fields)
def test_dictify_long(self):
# Test that the method returns the long and short description fields
foo = Foo.objects.get(id=1)
owner = TestProfile.objects.get(id=1).user
foo.set_user_auth(owner)
self.assertDictKeysEqual(foo.dictify_long(False), list(Foo.short_description_fields + Foo.long_description_fields))
def test_dictify_with_auth(self):
active_foo = Foo.objects.get(id=1)
deactivated_foo = Foo.objects.filter(active=0)[0]
owner = User.objects.get(id=1)
not_owner = User.objects.get(id=2)
public_user = AnonymousUser()
# Test whether a deactivated instance returns None
self.assertIsNone(deactivated_foo.dictify_with_auth(owner, False))
# Test whether a public user only sees the public fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(public_user, False), list(Foo.public_fields))
# Test whether an owner can view all the fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(owner, False), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
# Test whether a registered user sees registered user + public fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(not_owner, False), list(Foo.public_fields + Foo.registered_user_fields))
def test_is_owner(self):
# Test ownership of Foo
foo = Foo.objects.get(id=1)
# Test Foo with its rightful owner
# Test Foo with its rightful owner
owner = User.objects.get(id=1)
self.assertTrue(foo.is_owner(owner))
# Test Foo with an incorrect owner
not_owner = User.objects.get(id=2)
self.assertFalse(foo.is_owner(not_owner))
# Test Bar with an arbitrary user - Bar's don't have an owner.
bar = Bar.objects.get(id=1)
self.assertTrue(bar.is_owner(owner))
def test_get_all(self):
user = User.objects.get(id=1)
# Test number of Foo's equal to 10
self.assertEqual(len(Foo.get_all(1, user)), Foo.pagination)
# Test number of Bar's equal to number of Bar's (< 10)
self.assertEqual(len(Bar.get_all(1, user)), Bar.objects.all().count())
# Test invalid page number raises expected exception
with self.assertRaises(EmptyPage):
Bar.get_all(2, user)
# Test invalid page value raises expected exception
with self.assertRaises(PageNotAnInteger):
Bar.get_all("foo", user)
def test_get_model_instance(self):
# Test getting a Foo object with a valid ID
valid_foo_id = 1
# Make sure the method returns the right object
foo = Foo.objects.get(id=valid_foo_id)
self.assertEqual(Foo.get_model_instance(valid_foo_id), foo)
# Test invalid lookup raises expected exception
with self.assertRaises(ValueError):
Foo.objects.get(id="foo")
with self.assertRaises(ObjectDoesNotExist):
Foo.objects.get(id=20)
class APIViewTestCase(APIToolsTestCase):
fixtures = ['user_testprofile_foo.json', 'bar_baz_qux.json']
urls = 'django_api_tools.tests.urls'
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
t = TestAPIView()
# Test invalid request gives back 404
request = self.factory.get('/test_api/')
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test reserved URL gives back 200
request = self.factory.get('/test_api/{}'.format(ReservedURL.CSRFTOKEN))
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
user = User.objects.get(id=1)
# Test model request returns 200
request = self.factory.get('/test_api/foo/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test get instance gives back 200
request = self.factory.get('/test_api/foo/1/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test custom request on model with custom_request implemented gives back 200
request = self.factory.get('/test_api/qux/1/custom/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test custom request on model without implementation gives back 404
request = self.factory.get('/test_api/foo/1/custom/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_post(self):
t = TestAPIView()
# Test invalid request gives back 404
request = self.factory.post('/test_api/')
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test reserved URL gives back 200
request = self.factory.post('/test_api/{}/'.format(ReservedURL.CSRFTOKEN))
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
user = User.objects.get(id=1)
# Test post model request (create) returns 200
APIUrl.ADDITIONAL_FIELDS = list()
request = self.factory.post('/test_api/foo/', data={"f2": "foo"})
request.user = user
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test post instance (update) gives back 200
APIUrl.ADDITIONAL_FIELDS = list()
foo = Foo.objects.get(id=1)
request = self.factory.post('/test_api/foo/{}/'.format(foo.id), data={"f1": True})
request.user = user
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
def test_get_all(self):
user = User.objects.get(id=1)
t = TestAPIView()
# Test get first page of Foo's gives back 10 results
request = self.factory.get('/test_api/foo/')
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertEqual(len(json.loads(response.content)), 10)
# Test second page of Foo's gives back 1 results
request = self.factory.get('/test_api/foo/', data={"page": 2})
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertEqual(len(json.loads(response.content)), 1)
# Test third page of Foo's gives back 404
request = self.factory.get('/test_api/foo/', data={"page": 3})
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertIsNone(json.loads(response.content))
def test_get_instance(self):
user = User.objects.get(id=1)
t = TestAPIView()
# Test Foo ID = 1 gives back 200/ correct Foo
foo = Foo.objects.get(id=1)
foo_dict = foo.dictify_with_auth(user, short_dict=False)
request = self.factory.get('/test_api/foo/{}/'.format(foo.id))
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertDictEqual(json.loads(response.content), foo_dict)
self.assertEqual(response.status_code, StatusCode.OK)
# Test Foo ID = 22 gives back 404/ none
request = self.factory.get('/test_api/foo/22/')
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
self.assertIsNone(json.loads(response.content))
# Test Foo ID = "foo" gives back 404
request = self.factory.get('/test_api/foo/foo/')
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
self.assertIsNone(json.loads(response.content))
# Test Qux /custom/ gives back 200/ correct value
request = self.factory.get('/test_api/qux/custom/')
request.user = user
t._endpoint_model = Qux
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.OK)
self.assertEqual(json.loads(response.content), Qux.api_custom_request(request))
def test_post_handler(self):
t = TestAPIView()
# Test non-authenticated user and private endpoint gives back 404
request = self.factory.post('/test_api/qux/')
request.user = AnonymousUser()
public_endpoints = (Foo, )
t._endpoint_model = Qux
response = t._post_handler(request, public_endpoints)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test create:
f2_val = "hello"
user = User.objects.get(id=1)
request = self.factory.post('/test_api/foo/', data={"f2": f2_val})
request.user = user
public_endpoints = (Qux, )
t._endpoint_model = Foo
response = t._post_handler(request, public_endpoints)
foo_dict = json.loads(response.content)
self.assertEqual(response.status_code, StatusCode.OK)
self.assertEqual(foo_dict['f2'], f2_val)
self.assertEqual(foo_dict, Foo.objects.get(id=foo_dict['id']).dictify_with_auth(user, short_dict=False))
# Test create Foo with bad/missing fields returns 404
f1_val = "hello"
request = self.factory.post('/test_api/foo/', data={"f1": f1_val})
request.user = user
response = t._post_handler(request, public_endpoints)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test update with owner returns 200 + updated foo object
foo = Foo.objects.get(id=1)
f1_before = foo.f1
foo1_url = '/test_api/foo/{}/'.format(foo.id)
request = self.factory.post(foo1_url, data={"f1": True})
request.user = user
t._url_validator = APIUrl(request)
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.OK)
response_content = json.loads(response.content)
self.assertEqual(response_content['f1'], f1_before + 1)
new_foo = Foo.objects.get(id=1)
self.assertDictEqual(new_foo.dictify_with_auth(user, False), response_content)
# Test update with non owner returns 404
request = self.factory.post(foo1_url, data={"f1": True})
request.user = AnonymousUser()
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test deactivate gives back 404 + Test that the deactivate date is set
request = self.factory.post(foo1_url, data={"deactivate": True})
request.user = user
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_get_json_response_for_instance(self):
foo = Foo.objects.get(id=1)
t = TestAPIView()
# Test Anonymous user gives back public fields
user = AnonymousUser()
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), Foo.public_fields)
# Test registered user gives back all fields
user = User.objects.get(id=2)
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), list(Foo.public_fields + Foo.registered_user_fields))
# Test owner gives back all fields
user = User.objects.get(id=1)
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
def test_validate_request(self):
t = TestAPIView()
# Test invalid request returns False
request = self.factory.get('/test_api/fob/')
self.assertFalse(t._validate_request(request))
request = self.factory.get('/test_api/123/123/123/')
self.assertFalse(t._validate_request(request))
# Test valid request returns True
request = self.factory.get('/test_api/foo/')
self.assertTrue(t._validate_request(request))
# Test reserved URL returns True
request = self.factory.get('/test_api/{}/'.format(ReservedURL.LOGIN))
self.assertTrue(t._validate_request(request))
def test_handle_login_logout_request(self):
# We need to use Django's Client to test the login
# as RequestFactory doesn't offer any middleware by default
c = Client()
login_url = "/test_api/{}/".format(ReservedURL.LOGIN)
# Test valid user login returns the user's profile + sets cookies
valid_user = User.objects.get(id=1)
new_password = "newpassword1"
valid_user.set_password(new_password)
valid_user.save()
response = c.post(login_url, data={"username": valid_user.username, "password": new_password})
self.assertEqual(response.status_code, StatusCode.OK)
self.assertDictEqual(json.loads(response.content), valid_user.test_profile.dictify_with_auth(valid_user, short_dict=False))
# Test that logout deletes the authenticated session
session_val_before = response.cookies['sessionid'].value
response = c.post("/test_api/{}/".format(ReservedURL.LOGOUT))
session_val_after = response.cookies['sessionid'].value
self.assertNotEqual(session_val_before, session_val_after)
# Test an invalid login returns 404
response = c.post(login_url, data={"username": valid_user.username, "password": "badpassword"})
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test inactive user login returns 404
valid_user.is_active = False
valid_user.save()
response = c.post(login_url, data={"username": valid_user.username, "password": new_password})
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_handle_csrf_request(self):
# Test csrf request sets a token
c = Client()
response = c.get("/test_api/{}".format(ReservedURL.CSRFTOKEN))
self.assertIsNotNone(response.cookies['csrftoken'].value)
def test_handle_custom_request(self):
t = TestAPIView()
# Test model which handles custom request returns 200
request = self.factory.get('/test_api/qux/custom/')
t._endpoint_model = Qux
response = t.handle_custom_request(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test model which doesn't handle custom request returns 404
request = self.factory.get('/test_api/foo/custom/')
t._endpoint_model = Foo
response = t.handle_custom_request(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
class APIUrlTestCase(APIToolsTestCase):
def setUp(self):
self.factory = RequestFactory()
def test_split_url_components(self):
# Test an invalid request
request = self.factory.get("/api/")
splitter = APIUrl(request)
self.assertFalse(splitter.is_valid_request())
# Test a model request
MODEL_NAME = "foo"
request = self.factory.get("/api/{}/".format(MODEL_NAME))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_model_request())
self.assertEqual(MODEL_NAME, splitter.REQUESTED_MODEL)
# Test a model instance request
MODEL_INSTANCE = "1"
request = self.factory.get("/api/{}/{}/".format(MODEL_NAME, MODEL_INSTANCE))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_model_instance_request())
self.assertEqual(MODEL_NAME, splitter.REQUESTED_MODEL)
self.assertEqual(MODEL_INSTANCE, splitter.REQUESTED_MODEL_INSTANCE)
# Test a reserved URL request
reserved_url = ReservedURL.LOGOUT
request = self.factory.get("/api/{}/".format(reserved_url))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_reserved_url())
self.assertEqual(reserved_url, splitter.RESERVED_URL)
# Test a custom request
reserved_url = ReservedURL.LOGOUT
request = self.factory.get("/api/{}/".format(reserved_url))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_reserved_url())
self.assertEqual(reserved_url, splitter.RESERVED_URL) | szpytfire/django-api-tools | django_api_tools/tests/tests.py | Python | mit | 24,690 |
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.utils.translation import ugettext as _
class UserProfile(models.Model):
'''
username: 用户名是唯一的,可以为Null
nickname: 昵称是可以变的,可以重复
'''
user = models.OneToOneField(User, unique=True, related_name='profile', verbose_name=_('用户'))
username = models.CharField(blank=True, null=True, unique=True, max_length=255, verbose_name=_('用户名(唯一)'))
nickname = models.CharField(blank=True, max_length=255, db_index=True, default='', verbose_name=_('昵称(可变)'))
created_at = models.DateTimeField(auto_now_add=True, verbose_name=_('创建日期'))
updated_at = models.DateTimeField(auto_now=True, verbose_name=_('修改日期'))
class Meta:
db_table = "auth_userprofile"
verbose_name = _('用户附加信息')
verbose_name_plural = _('用户附加信息')
@receiver(pre_save, sender=User)
def pre_save_user_handler(sender, instance, **kwargs):
'''
保存用户前如果开启了EMAIL_AS_USERNAME, 需要将email字段设为username
'''
if settings.FEATURES.get('EMAIL_AS_USERNAME'):
if not instance.email or instance.email.strip() != instance.username.strip():
instance.email = instance.username
@receiver(post_save, sender=User)
def post_save_user_handler(sender, instance, created, **kwargs):
try:
profile = instance.profile
except UserProfile.DoesNotExist:
profile = UserProfile(user=instance)
profile.save()
@receiver(pre_save, sender=UserProfile)
def pre_save_userprofile_handler(sender, instance, **kwargs):
'''
保存profile前,如果用户名为空,则设置为None, 躲避unique检查
'''
if not instance.username:
instance.username = None
| duoduo369/django-scaffold | myauth/models.py | Python | mit | 1,991 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of convertdate.
# http://github.com/fitnr/convertdate
# Licensed under the GPL-v3.0 license:
# http://opensource.org/licenses/MIT
# Copyright (c) 2016, fitnr <fitnr@fakeisthenewreal>
from math import trunc
from .utils import ceil, jwday, monthcalendarhelper
from . import gregorian
EPOCH = 1948439.5
WEEKDAYS = ("al-'ahad", "al-'ithnayn",
"ath-thalatha'", "al-'arb`a'",
"al-khamis", "al-jum`a", "as-sabt")
HAS_29_DAYS = (2, 4, 6, 8, 10)
HAS_30_DAYS = (1, 3, 5, 7, 9, 11)
def leap(year):
'''Is a given year a leap year in the Islamic calendar'''
return (((year * 11) + 14) % 30) < 11
def to_jd(year, month, day):
'''Determine Julian day count from Islamic date'''
return (day + ceil(29.5 * (month - 1)) + (year - 1) * 354 + trunc((3 + (11 * year)) / 30) + EPOCH) - 1
def from_jd(jd):
'''Calculate Islamic date from Julian day'''
jd = trunc(jd) + 0.5
year = trunc(((30 * (jd - EPOCH)) + 10646) / 10631)
month = min(12, ceil((jd - (29 + to_jd(year, 1, 1))) / 29.5) + 1)
day = int(jd - to_jd(year, month, 1)) + 1
return (year, month, day)
def from_gregorian(year, month, day):
return from_jd(gregorian.to_jd(year, month, day))
def to_gregorian(year, month, day):
return gregorian.from_jd(to_jd(year, month, day))
def month_length(year, month):
if month in HAS_30_DAYS or (month == 12 and leap(year)):
return 30
return 29
def monthcalendar(year, month):
start_weekday = jwday(to_jd(year, month, 1))
monthlen = month_length(year, month)
return monthcalendarhelper(start_weekday, monthlen)
| mpercich/Calendarize | ios/dateparser/lib/python2.7/site-packages/convertdate/islamic.py | Python | mit | 1,668 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# @file ut.py
# @brief The main unit test program of whole project
# README: organize the unit tests in the number range
# refer UTGeneral functions
# print the suggested procedure in the console
# print the suggested check procedure in the console
# support current supported important features
# this unit test include in the release procedure
# MODULE_ARCH:
# CLASS_ARCH: UTGeneral
# GLOBAL USAGE:
#standard
import unittest
#homemake
import lib.globalclasses as gc
from lib.const import *
##### Unit test section ####
#the test ID provide the order of testes.
class UTGeneral(unittest.TestCase):
#local
#ID:0-99
def test_01_setting_signature(self):
print("\nThe expected unit test environment is")
print("1. TBD")
self.assertEqual(gc.SETTING["SIGNATURE"],'LASS-SIM')
def test_02_check_library(self):
#check external library that need to be installed
import simpy
from configobj import ConfigObj
import urllib
import simplejson
import requests
from vincenty import vincenty
import matplotlib
import numpy
import pygrib
def test_03_check_dir_exist(self):
pass
def test_04_check_grib(self):
import pygrib # import pygrib interface to grib_api
grbs = pygrib.open('include/M-A0060-000.grb2')
print("grbs[:4] count=%i" %(len(grbs[:4])))
def test_11_loadjson(self):
gc.LASSDATA.load_site_list()
print("LASS sites count = %i" % (len(gc.LASSDATA.sites)))
self.assertTrue(len(gc.LASSDATA.sites)>0) | LinkItONEDevGroup/LASS | LASS-Simulator/codes/ut.py | Python | mit | 1,669 |
import asyncio
import unittest
import random
from gremlinpy import Gremlin
from . import ConnectionTestCases, EntityTestCases, MapperTestCases
from gizmo import Mapper, Request, Collection, Vertex, Edge
from gizmo.mapper import EntityMapper
class BaseTests(unittest.TestCase):
def setUp(self):
self.request = Request('localhost', port=8182)
self.gremlin = Gremlin('gizmo_testing')
self.mapper = Mapper(self.request, self.gremlin)
self.ioloop = asyncio.get_event_loop()
super(BaseTests, self).setUp()
def tearDown(self):
super(BaseTests, self).tearDown()
async def purge(self):
script = "%s.V().map{it.get().remove()}" % self.gremlin.gv
res = await self.mapper.query(script=script)
return res
class ConnectionTests(BaseTests, ConnectionTestCases):
pass
class EntityTests(EntityTestCases, BaseTests):
pass
class MapperTests(MapperTestCases, BaseTests):
pass
class CollectionTests(BaseTests):
pass
class TraversalTests(BaseTests):
pass
if __name__ == '__main__':
unittest.main()
| emehrkay/Gizmo | gizmo/test/integration/tinkerpop.py | Python | mit | 1,104 |
"""
This is used to pack testlib into a json
Then you can load into database by using:
manage.py loaddata <fixturename>
fixturename here is `testlib.json`
"""
import hashlib
import json
from os import path, listdir
def hash(binary):
return hashlib.sha256(binary).hexdigest()
category = ['checker', 'generator', 'validator', 'validator']
father_dir = path.dirname(__file__)
output_file = open(path.join(father_dir, 'testlib.json'), 'w')
data = []
for cat in category:
for file in listdir(path.join(father_dir, cat)):
if file.startswith('.'):
continue
with open(path.join(father_dir, cat, file)) as fs:
code = fs.read()
with open(path.join(father_dir, cat, file), 'rb') as fs:
code_binary = fs.read()
data.append(dict(model='problem.SpecialProgram',
fields=dict(
fingerprint=hash(code_binary),
filename=file,
code=code,
lang='cpp',
category=cat,
builtin=True
)))
json.dump(data, output_file)
output_file.close()
| ultmaster/eoj3 | problem/testlib/pack.py | Python | mit | 1,142 |
"""ShipToasting web handlers."""
import os
import sys
import atexit
import random
import traceback
import gevent
from flask import Response
from flask import redirect
from flask import render_template
from flask import request
from flask import session
from apscheduler.schedulers.gevent import GeventScheduler
from shiptoasting import app
from shiptoasting import HEARTBEAT
from shiptoasting import requires_logged_in
from shiptoasting.storage import ShipToasts
from shiptoasting.storage import ShipToaster
@app.route("/", methods=["GET"])
def index():
"""Main index. Displays most recent then streams."""
shiptoasts = app.shiptoasts.get_shiptoasts()
return render_template(
"index.html",
shiptoasts=shiptoasts,
last_seen=shiptoasts[0].id if shiptoasts else None,
)
@app.route("/", methods=["POST"])
@requires_logged_in
def add_shiptoast():
"""Accepts the POST form, stores the content."""
post_content = request.form.get("content").strip()
if post_content:
if len(post_content) > 500:
post_content = "{}... and I've said too much.".format(
post_content[:500]
)
posted_authors = app.shiptoasts.add_shiptoast(
post_content,
session["character"]["CharacterName"],
session["character"]["CharacterID"],
)
if session["character"]["CharacterID"] not in posted_authors:
# spam filtered, time to calm the fuck down
enhance_your_calm_videos = [
"eCidRemUTKo",
"tYg6nP7yRRk",
"txQ6t4yPIM0",
"EYi5aW1GdUU",
"d-diB65scQU",
]
return redirect("https://www.youtube.com/watch?v={}".format(
random.choice(enhance_your_calm_videos)
))
return redirect("/")
@app.route("/shiptoasts")
def shiptoasts():
"""Returns the shiptoasts stream object."""
last_seen_id = request.args.get("last_seen", "None")
if last_seen_id == "None":
last_seen_id = None
else:
last_seen_id = int(last_seen_id)
return Response(
streaming_shiptoasts(last_seen_id),
mimetype="text/event-stream",
)
def streaming_shiptoasts(last_seen_id):
"""Iterator to asyncly deliver shiptoasts."""
for shiptoast in ShipToaster(last_seen_id).iter():
if shiptoast is HEARTBEAT:
data = HEARTBEAT
else:
data = (
'{id}%{author}%'
'<div class="shiptoaster">'
'<div class="prof_pic"><img src='
'"https://image.eveonline.com/Character/{author_id}_256.jpg" '
'height="256" width="256" alt="{author}" /></div>'
'<div class="author{ccp}">{author}</div>'
'</div>'
'<div class="content">{content}</div>'
'<div class="time">{time:%b %e, %H:%M:%S}</div>'
).format(
ccp=" ccp" * int(shiptoast.author.startswith("CCP ")),
**shiptoast._asdict()
)
yield "data: {}\n\n".format(data)
raise StopIteration
def traceback_formatter(excpt, value, tback):
"""Catches all exceptions and re-formats the traceback raised."""
sys.stdout.write("".join(traceback.format_exception(excpt, value, tback)))
def hook_exceptions():
"""Hooks into the sys module to set our formatter."""
if hasattr(sys.stdout, "fileno"): # when testing, sys.stdout is StringIO
# reopen stdout in non buffered mode
sys.stdout = os.fdopen(sys.stdout.fileno(), "wb", 0)
# set the hook
sys.excepthook = traceback_formatter
def production(*_, **settings):
"""Hooks exceptions and returns the Flask app."""
hook_exceptions()
app.shiptoasts = ShipToasts()
app.shiptoasts.initial_fill()
scheduler = GeventScheduler()
scheduler.add_job(app.shiptoasts.periodic_call, "interval", seconds=30)
cleaner = scheduler.start()
listener = gevent.Greenlet.spawn(app.shiptoasts.listen_for_updates)
atexit.register(cleaner.join, timeout=2)
atexit.register(listener.join, timeout=2)
atexit.register(scheduler.shutdown)
return app
def development():
"""Debug/cmdline entry point."""
production().run(
host="0.0.0.0",
port=8080,
debug=True,
use_reloader=False,
threaded=True,
)
if __name__ == "__main__":
development()
| ccpgames/shiptoasting | shiptoasting/web.py | Python | mit | 4,504 |
"""
flask.ext.acl
=============
This extension provides an Access Control implementation for `tipfy <http://www.tipfy.org/>`_.
Links
-----
* `Documentation <http://www.tipfy.org/wiki/extensions/acl/>`_
* `Source Code Repository <http://code.google.com/p/tipfy-ext-acl/>`_
* `Issue Tracker <http://code.google.com/p/tipfy-ext-acl/issues/list>`_
About tipfy
-----------
* `Home page <http://www.tipfy.org/>`_
* `Extension list <http://www.tipfy.org/wiki/extensions/>`_
* `Discussion Group <http://groups.google.com/group/tipfy>`_
"""
from setuptools import setup
setup(
name = 'flask.ext.acl',
version = '0.6',
license = 'BSD',
url = 'https://github.com/guotie/flask-acl',
description = 'Access Control extension for flask',
long_description = __doc__,
author = 'guotie',
author_email = '[email protected]',
zip_safe = False,
platforms = 'any',
packages = [
'flask',
'flask.ext',
],
namespace_packages = [
'flask',
'flask.ext',
],
include_package_data = True,
install_requires = [
'flask',
'flask.ext.sqlalchemy',
'flask.ext.cache',
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| guotie/flask-acl | setup.py | Python | mit | 1,588 |
# -*- coding: utf-8 -*-
from django.db import models
from ..Models import *
from center.Exceptions.ModelsExceptions import *
class Tools_todo(models.Model):
STATUS_TYPES = (
(1, u'New'),
(2, u'Doing'),
(3, u'Waiting'),
(4, u'Done'),
)
SPECIES_TYPES = (
(1, u'Task'), # 查询
(2, u'Check'), # 任务
)
PRIORITY_TYPES = (
(1, 'Low'),
(2, 'Normal'),
(3, 'High'),
)
user = models.ForeignKey(User)
title = models.CharField(max_length=100)
summary = models.TextField()
status = models.IntegerField(choices=STATUS_TYPES, default=1)
species = models.IntegerField(choices=SPECIES_TYPES, default=1)
priority = models.IntegerField(choices=PRIORITY_TYPES, default=2)
creation_date = models.DateTimeField(auto_now_add=True)
modify_date = models.DateTimeField(auto_now=True)
model_name = "计划"
def create(self, *args, **kwargs):
try:
super(Tools_todo, self).save(*args, **kwargs)
except:
raise createException(self.model_name)
def update(self, *args, **kwargs):
try:
super(Tools_todo, self).update(*args, **kwargs)
except:
raise updateException(self.model_name)
def delete(self, *args, **kwargs):
try:
super(Tools_todo, self).delete(*args, **kwargs)
except:
raise updateException(self.model_name) | swxs/web | cloudplat/center/Models/Tools_todo_Model.py | Python | mit | 1,457 |
#!/usr/bin/env python
telescope = "ATCA"
latitude_deg = -30.312906
diameter_m = 22.0
import os
import sys
from util_misc import ascii_dat_read
#-----------------------------------------------------------------------------#
def main():
# Read the station lookup table
col, dummy = ascii_dat_read("ATCA_stations.txt", delim=" ",
doFloatCols=[2, 3])
statDict = {}
for station, N, W in zip(col[1], col[2], col[3]):
statDict[station] = (-W+1622.449, N)
# Read the array configuration file
col, dummy = ascii_dat_read("ATCA_configs.txt", delim=" ",
doFloatCols=[2, 3, 4, 5, 6, 7])
for confName, A1, A2, A3, A4, A5, A6 in zip(col[1], col[2], col[3], col[4],
col[5], col[6], col[7]):
if A1=='':
continue
outFileName = "ATCA_%s.config" % confName
FH = open(outFileName, "w")
FH.write("#" + "-"*78 + "#\n")
FH.write("#\n")
FH.write("# Array definition file for the %s %s configuration.\n"
% (telescope, confName))
FH.write("#\n")
FH.write("#" + "-"*78 + "#\n")
FH.write("\n")
FH.write("# Name of the telescope\n")
FH.write("telescope = %s\n" % telescope)
FH.write("\n")
FH.write("# Name of the configuration\n")
FH.write("config = %s\n" % confName)
FH.write("\n")
FH.write("# Latitude of the array centre\n")
FH.write("latitude_deg = %f\n" % latitude_deg)
FH.write("\n")
FH.write("# Antenna diameter\n")
FH.write("diameter_m = %f\n" % diameter_m)
FH.write("\n")
FH.write("# Antenna coordinates (offset E, offset N)\n")
FH.write("%f, %f\n" % (statDict[A1][0], statDict[A1][1]))
FH.write("%f, %f\n" % (statDict[A2][0], statDict[A2][1]))
FH.write("%f, %f\n" % (statDict[A3][0], statDict[A3][1]))
FH.write("%f, %f\n" % (statDict[A4][0], statDict[A4][1]))
FH.write("%f, %f\n" % (statDict[A5][0], statDict[A5][1]))
FH.write("%f, %f\n" % (statDict[A6][0], statDict[A6][1]))
FH.close()
for confName, A1, A2, A3, A4, A5 in zip(col[1], col[2], col[3], col[4],
col[5], col[6]):
if A1=='':
continue
confName += "_No_6"
outFileName = "ATCA_%s.config" % confName
FH = open(outFileName, "w")
FH.write("#" + "-"*78 + "#\n")
FH.write("#\n")
FH.write("# Array definition file for the %s %s configuration.\n"
% (telescope, confName))
FH.write("#\n")
FH.write("#" + "-"*78 + "#\n")
FH.write("\n")
FH.write("# Name of the telescope\n")
FH.write("telescope = %s\n" % telescope)
FH.write("\n")
FH.write("# Name of the configuration\n")
FH.write("config = %s\n" % confName)
FH.write("\n")
FH.write("# Latitude of the array centre\n")
FH.write("latitude_deg = %f\n" % latitude_deg)
FH.write("\n")
FH.write("# Antenna diameter\n")
FH.write("diameter_m = %f\n" % diameter_m)
FH.write("\n")
FH.write("# Antenna coordinates (offset E, offset N)\n")
FH.write("%f, %f\n" % (statDict[A1][0], statDict[A1][1]))
FH.write("%f, %f\n" % (statDict[A2][0], statDict[A2][1]))
FH.write("%f, %f\n" % (statDict[A3][0], statDict[A3][1]))
FH.write("%f, %f\n" % (statDict[A4][0], statDict[A4][1]))
FH.write("%f, %f\n" % (statDict[A5][0], statDict[A5][1]))
FH.close()
#-----------------------------------------------------------------------------#
main()
| crpurcell/friendlyVRI | arrays/array_data/ATCA/mk_ATCA_array_configs.py | Python | mit | 3,761 |
__author__ = 'USER'
from learning.mlp.neuralnetwork import NeuralNetwork
from learning.mlp import learning
number_of_layers = 3
size_array = [2, 2, 1]
learning_rate = 0.3
momentum = 0.1
bnn = NeuralNetwork(number_of_layers, size_array, learning_rate, momentum)
xor_in = [
[0, 0],
[0, 1],
[1, 0],
[1, 1]
]
xor_out = [
[1],
[0],
[0],
[1]
]
learning.learning(bnn, xor_in, xor_out)
print("************** Result ****************")
learning.predict(bnn, xor_in)
| ParkJinSang/Logle | sample/mlp_learn.py | Python | mit | 498 |
import os
import unittest
from conans.model.ref import ConanFileReference, PackageReference
from conans.test.utils.conanfile import TestConanFile
from conans.test.utils.tools import TestClient, TestServer,\
NO_SETTINGS_PACKAGE_ID
from conans.util.files import set_dirty
class PackageIngrityTest(unittest.TestCase):
def remove_locks_test(self):
client = TestClient()
client.save({"conanfile.py": str(TestConanFile())})
client.run("create . lasote/testing")
self.assertNotIn('does not contain a number!', client.out)
ref = ConanFileReference.loads("Hello/0.1@lasote/testing")
conan_folder = client.cache.package_layout(ref).base_folder()
self.assertIn("locks", os.listdir(conan_folder))
self.assertTrue(os.path.exists(conan_folder + ".count"))
self.assertTrue(os.path.exists(conan_folder + ".count.lock"))
client.run("remove * --locks", assert_error=True)
self.assertIn("ERROR: Specifying a pattern is not supported", client.out)
client.run("remove", assert_error=True)
self.assertIn('ERROR: Please specify a pattern to be removed ("*" for all)', client.out)
client.run("remove --locks")
self.assertNotIn("locks", os.listdir(conan_folder))
self.assertFalse(os.path.exists(conan_folder + ".count"))
self.assertFalse(os.path.exists(conan_folder + ".count.lock"))
def upload_dirty_test(self):
test_server = TestServer([], users={"lasote": "mypass"})
client = TestClient(servers={"default": test_server},
users={"default": [("lasote", "mypass")]})
client.save({"conanfile.py": str(TestConanFile())})
client.run("create . lasote/testing")
ref = ConanFileReference.loads("Hello/0.1@lasote/testing")
pref = PackageReference(ref, NO_SETTINGS_PACKAGE_ID)
package_folder = client.cache.package_layout(pref.ref).package(pref)
set_dirty(package_folder)
client.run("upload * --all --confirm", assert_error=True)
self.assertIn("ERROR: Package %s is corrupted, aborting upload" % str(pref),
client.out)
self.assertIn("Remove it with 'conan remove Hello/0.1@lasote/testing -p=%s'"
% NO_SETTINGS_PACKAGE_ID, client.out)
client.run("remove Hello/0.1@lasote/testing -p=%s -f" % NO_SETTINGS_PACKAGE_ID)
client.run("upload * --all --confirm")
| memsharded/conan | conans/test/functional/old/package_integrity_test.py | Python | mit | 2,451 |
#coding=utf-8
#author='Shichao-Dong'
import unittest
import Web_Method_Baspd
import Public_Base_Method
import requests
import time
import HTMLTestRunner
class ST_Bas_pd(unittest.TestCase):
u'商品功能性测试'
def setUp(self):
global cookie
r = Public_Base_Method.login_func("172.31.3.73:6020", "dongshichao", "dong", "a111111")
cookie="WQSESSIONID="+"".join(r.cookies["WQSESSIONID"])
print cookie
def tearDown(self):
pass
def test_pd_queryData_all(self):
u'查询所有商品'
r=Web_Method_Baspd.pd_query(cookie)
print r.status_code
print r.content
self.assertTrue('r.status_code==200',msg='获取商品数据失败')
def test_pd_queryData_onsale(self):
u'查询在售商品'
r=Web_Method_Baspd.pd_query_onsale(cookie)
print r.status_code
print r.content
self.assertTrue('r.status_code==200',msg='获取在售商品失败')
def test_pd_queryData_offsale(self):
u'查询停售商品'
r=Web_Method_Baspd.pd_query_offsale(cookie)
print r.status_code
self.assertTrue('r.status_code==200',msg='获取停售商品失败')
if __name__=="__main__":
suite=unittest.TestSuite()
suite.addTest(ST_Bas_pd("test_pd_queryData_all"))
suite.addTest(ST_Bas_pd("test_pd_queryData_onsale"))
suite.addTest(ST_Bas_pd("test_pd_queryData_offsale"))
runner=unittest.TextTestRunner()
runner.run(suite)
# #定义报告路径及名称
# now = time.strftime("%Y-%m-%d-%H-%M-%S",time.localtime(time.time()))
# filename=r'D://Ptest/Report/'+now+'result.html'
# fp=open(filename,'wb')
# runner=HTMLTestRunner.HTMLTestRunner(stream=fp,title='test result',description=u'测试情况:')
#
# #执行测试
# runner.run(suite)
# fp.close() | NJ-zero/Android | Request/Test_pd/TestCase_Web_Baspd.py | Python | mit | 1,855 |
# from index import db
# class MyObject():
# def __init__(self):
# pass
# @staticmethod
# def get_something(arg1, arg2):
# return something
| tferreira/Flask-Redis | application/models.py | Python | mit | 170 |
import glob
import numpy as np
import pandas as pd
from numpy import nan
import os
os.chdir("/gpfs/commons/home/biederstedte-934/evan_projects/RRBS_anno_clean")
repeats = pd.read_csv("repeats_hg19.csv")
annofiles = glob.glob("RRBS_NormalBCD19pCD27mcell23_44_GTAGAGGA.A*")
def between_range(row):
subset = repeats.loc[(row["chr"] == repeats.chr) & (row.start >= repeats.start) & (row.start <= repeats.end), :]
if subset.empty:
return np.nan
return subset.repeat_class
#newdf1 = pd.DataFrame()
for filename in annofiles:
df = pd.read_table(filename)
df["hg19_repeats"] = df.apply(between_range, axis = 1)
df.to_csv(str("repeatregions_") + filename + ".csv", index=False)
| evanbiederstedt/RRBSfun | scripts/repeat_finder_scripts/repeat_finder_RRBS_NormalBCD19pCD27mcell23_44_GTAGAGGA.A.py | Python | mit | 706 |
#!/usr/bin/env python
from livereload import Server, shell
server = Server()
style = ("style.scss", "style.css")
script = ("typing-test.js", "typing-test-compiled.js")
server.watch(style[0], shell(["sass", style[0]], output=style[1]))
server.watch(script[0], shell(["babel", script[0]], output=script[1]))
server.watch("index.html")
server.serve(port=8080, host="localhost", open_url=True)
| daschwa/typing-test | server.py | Python | mit | 395 |
"""
EXAMPLE: Working with the uiverse class
"""
from universe import universe
import pandas as pd
# Create Instance of universe for US Equities
usEqUniverse = universe('usEquityConfig.txt')
# Get Summary Statistics
usEqUniverse.computeSummary()
# Plot Return
usEqUniverse.assetReturns.AA.plot() | pli1988/portfolioFactory | portfolioFactory/universe/example_generateUSEquityUniverse.py | Python | mit | 298 |
#!/usr/bin/python3
import sys
import getpass
import helper
from argparse import ArgumentParser
if __name__ == "__main__":
parser = ArgumentParser(description="Simple file password based encryption/decryption tools. When run as pipe, use standard in/out.")
parser.add_argument("-a", "--action", choices=["encrypt", "decrypt"], required=True, help="encrypt/decrypt the file/stream")
parser.add_argument("-p", "--password", required=False, help="password for operation or passphase for key")
parser.add_argument("-k", "--key", required=False, help="key path")
parser.add_argument("INPUT", default="", nargs="?", help="input file, default stdin")
parser.add_argument("OUTPUT", default="", nargs="?", help="output file, default to stdout")
parser.add_argument("-d", "--debug", action="store_true", help="debug print")
args = parser.parse_args()
if not args.password:
passwd = getpass.getpass("Password: ")
if args.action == "encrypt" and getpass.getpass("Verify password: ") != passwd:
print("Password mismatch...")
exit()
else:
passwd = args.password
# setup params
inF = sys.stdin.buffer if not sys.stdin.isatty() else args.INPUT
if not sys.stdin.isatty():
args.OUTPUT = args.INPUT
outF = sys.stdout.buffer if not sys.stdout.isatty() or args.OUTPUT == "" else args.OUTPUT
if inF == "":
print("No input source provided...")
exit()
_log_out = None if outF is sys.stdout.buffer else sys.stdout
op_type = helper.Type.with_password
if args.key:
op_type = helper.Type.with_rsa_key
if inF is not sys.stdin.buffer:
inF = open(inF, "rb")
if outF is not sys.stdout.buffer:
outF = open(outF, "wb")
if args.action == "encrypt":
helper.encrypt_stream(inF, outF, op_type, key_file=args.key, secret=passwd, log_out=_log_out)
if args.action == "decrypt":
helper.decrypt_stream(inF, outF, key_file=args.key, secret=passwd, log_out=_log_out)
| tingtingths/cipherhelper | demo.py | Python | mit | 2,028 |
import logging
from requests_oauthlib import OAuth1Session
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from .models import QuickbooksToken, get_quickbooks_token
from .api import QuickbooksApi, AuthenticationFailure
from .signals import qb_connected
REQUEST_TOKEN_URL = 'https://oauth.intuit.com/oauth/v1/get_request_token'
ACCESS_TOKEN_URL = 'https://oauth.intuit.com/oauth/v1/get_access_token'
AUTHORIZATION_URL = 'https://appcenter.intuit.com/Connect/Begin'
BLUE_DOT_CACHE_KEY = 'quickbooks:blue_dot_menu'
@login_required
def request_oauth_token(request):
# We'll require a refresh in the blue dot cache
if BLUE_DOT_CACHE_KEY in request.session:
del request.session[BLUE_DOT_CACHE_KEY]
access_token_callback = settings.QUICKBOOKS['OAUTH_CALLBACK_URL']
if callable(access_token_callback):
access_token_callback = access_token_callback(request)
session = OAuth1Session(client_key=settings.QUICKBOOKS['CONSUMER_KEY'],
client_secret=settings.QUICKBOOKS['CONSUMER_SECRET'],
callback_uri=access_token_callback)
response = session.fetch_request_token(REQUEST_TOKEN_URL)
try:
request_token = response['oauth_token']
request_token_secret = response['oauth_token_secret']
request.session['qb_oauth_token'] = request_token
request.session['qb_oauth_token_secret'] = request_token_secret
except:
logger = logging.getLogger('quickbooks.views.request_oauth_token')
logger.exception(("Couldn't extract oAuth parameters from token " +
"request response. Response was '%s'"), response)
raise
return HttpResponseRedirect("%s?oauth_token=%s" % (AUTHORIZATION_URL, request_token))
@login_required
def get_access_token(request):
# [todo] - add doc string for get_access_token
session = OAuth1Session(client_key=settings.QUICKBOOKS['CONSUMER_KEY'],
client_secret=settings.QUICKBOOKS['CONSUMER_SECRET'],
resource_owner_key=request.session['qb_oauth_token'],
resource_owner_secret=request.session['qb_oauth_token_secret'])
remote_response = session.parse_authorization_response('?{}'.format(request.META.get('QUERY_STRING')))
realm_id = remote_response['realmId']
data_source = remote_response['dataSource']
oauth_verifier = remote_response['oauth_verifier']
# [review] - Possible bug? This should be taken care of by session.parse_authorization_response
session.auth.client.verifier = unicode(oauth_verifier)
response = session.fetch_access_token(ACCESS_TOKEN_URL)
# Delete any existing access tokens
request.user.quickbookstoken_set.all().delete()
token = QuickbooksToken.objects.create(
user=request.user,
access_token=response['oauth_token'],
access_token_secret=response['oauth_token_secret'],
realm_id=realm_id,
data_source=data_source)
# Cache blue dot menu
try:
request.session[BLUE_DOT_CACHE_KEY] = None
blue_dot_menu(request)
except AttributeError:
raise Exception('The sessions framework must be installed for this ' +
'application to work.')
# Let everyone else know we conneted
qb_connected.send(None, token=token)
return render_to_response('oauth_callback.html',
{'complete_url': settings.QUICKBOOKS['ACCESS_COMPLETE_URL']})
@login_required
def blue_dot_menu(request):
""" Returns the blue dot menu. If possible a cached copy is returned.
"""
html = request.session.get(BLUE_DOT_CACHE_KEY)
if not html:
html = request.session[BLUE_DOT_CACHE_KEY] = \
HttpResponse(QuickbooksApi(request.user).app_menu())
return html
@login_required
def disconnect(request):
""" Try to disconnect from Intuit, then destroy our tokens."""
token = get_quickbooks_token(request)
try:
QuickbooksApi(token).disconnect()
except AuthenticationFailure:
# If there is an authentication error, then these tokens are bad
# We need to destroy them in any case.
pass
request.user.quickbookstoken_set.all().delete()
return HttpResponseRedirect(settings.QUICKBOOKS['ACCESS_COMPLETE_URL'])
| grue/django-quickbooks-online | quickbooks/views.py | Python | mit | 4,488 |
import re
import abc
from collections import deque
from copy import copy
from rdp.ast import Node
from rdp.exceptions import ParseError, UnexpectedToken
from rdp.utils import chain
def to_symbol(str_or_symbol, copy_if_not_created=False):
if isinstance(str_or_symbol, Symbol):
if copy_if_not_created:
return copy(str_or_symbol)
return str_or_symbol
if isinstance(str_or_symbol, str):
return Terminal(str_or_symbol)
raise TypeError("str or Symbol expected")
def flatten(symbol):
symbol = to_symbol(symbol, True)
symbol.flatten = True
return symbol
def drop(symbol):
symbol = to_symbol(symbol, True)
symbol.drop = True
return symbol
def keep(symbol):
symbol = to_symbol(symbol, True)
symbol.drop = False
return symbol
def group(symbol):
if isinstance(symbol, CompoundSymbol):
symbol.grouped = True
return symbol
class Symbol(metaclass=abc.ABCMeta):
def __init__(self, name=None):
self.flatten = False
self.transform = lambda x: x
self.drop = None
self.position = -1
self._name = name
def named(self, name):
if self._name:
return Alias(self, name)
self._name = name
return self
@property
def name(self):
return self._name
@abc.abstractmethod
def __call__(self, parser):
assert False
def __iter__(self):
yield from ()
def __str__(self):
return '<{0}>'.format(self.name)
def __add__(self, other):
return Sequence([self, to_symbol(other)])
def __radd__(self, other):
return to_symbol(other) + self
def __or__(self, other):
return OneOf([self, to_symbol(other)])
def __ror__(self, other):
return to_symbol(other) | self
def __ge__(self, func):
clone = copy(self)
clone.transform = chain(func, self.transform)
return clone
def __pos__(self):
return NonEmpty(self)
def iter(self):
visited = set()
next_symbols = deque([self])
while next_symbols:
next_symbol = next_symbols.popleft()
if next_symbol in visited:
continue
yield next_symbol
visited.add(next_symbol)
next_symbols.extend(next_symbol)
def terminals(self):
return (symbol for symbol in self.iter() if isinstance(symbol, Terminal))
def apply_transform(self, node):
return self.transform(node)
def is_rule(self):
return bool(self.name)
class Terminal(Symbol):
def __init__(self, lexeme, name=''):
super(Terminal, self).__init__(name=name)
self.lexeme = lexeme
self.priority = -1
@property
def pattern(self):
return re.escape(self.lexeme)
def apply_transform(self, node):
return self.transform(node.token.lexeme)
def __call__(self, parser):
token = parser.read()
if token.symbol != self:
raise UnexpectedToken(token, self)
yield parser.node(self, token, -1)
def __pos__(self):
return self
def __repr__(self):
name = '{0}='.format(self.name) if self.name else ''
return '<{0} {1}{2}>'.format(
self.__class__.__name__,
name,
repr(self.lexeme)
)
def __str__(self):
if self.name:
return '<{0}>'.format(self.name)
return repr(self.lexeme)
def __eq__(self, other):
return isinstance(other, type(self)) and self.lexeme == other.lexeme
def __hash__(self):
return hash(self.lexeme)
class Marker(Terminal):
def __init__(self, name):
super().__init__('', name=name)
@property
def pattern(self):
return None
def __pos__(self):
raise InvalidGrammar('Marker symbols cannot be non-empty')
class Epsilon(Marker):
def __call__(self, parser):
yield parser.node(self)
epsilon = Epsilon('')
empty_match = Node(None, None)
class Regexp(Terminal):
def __init__(self, pattern):
if re.match(pattern, ''):
raise ValueError('Regexp terminals may not match the empty string, use rdp.epsilon instead')
super().__init__(pattern)
@property
def pattern(self):
return self.lexeme
def __pos__(self):
return self
class CompoundSymbol(Symbol):
repr_sep = ', '
def __init__(self, symbols):
super().__init__()
self.symbols = symbols
self.grouped = False
def __iter__(self):
yield from self.symbols
def __repr__(self):
name = '{0} = '.format(self.name) if self.name else ''
return '<{0} {1}{2}>'.format(
self.__class__.__name__,
name,
self.repr_sep.join(repr(symbol) for symbol in self.symbols),
)
def apply_transform(self, node):
return self.transform([child.transform() for child in node])
class OneOf(CompoundSymbol):
repr_sep = ' | '
def __call__(self, parser):
node = parser.node(self)
longest_match_error = None
for symbol in self.symbols:
try:
child = yield symbol
node.append(child)
yield node
except ParseError as e:
if not longest_match_error or longest_match_error < e:
longest_match_error = e
continue
raise longest_match_error
def __or__(self, other):
if self.grouped:
return super().__or__(other)
return self.__class__(self.symbols + [to_symbol(other)])
def apply_transform(self, node):
return self.transform(node.children[0].transform())
class Sequence(CompoundSymbol):
repr_sep = ' + '
def __call__(self, parser):
node = parser.node(self)
for symbol in self.symbols:
value = yield symbol
node.append(value)
yield node
def __add__(self, other):
if self.grouped:
return super().__add__(other)
return self.__class__(self.symbols + [to_symbol(other)])
class Repeat(Symbol):
def __init__(self, symbol, min_matches=0):
super().__init__()
self.symbol = to_symbol(symbol)
self.min_matches = min_matches
def __iter__(self):
yield self.symbol
def __pos__(self):
if self.min_matches > 0:
return self
clone = copy(self)
clone.min_matches = 1
return clone
def __call__(self, parser):
node = parser.node(self)
n = 0
while True:
try:
child = yield self.symbol
n += 1
node.append(child)
except ParseError:
break
if n < self.min_matches:
raise ParseError("too few {0}".format(self.symbol))
yield node
def apply_transform(self, node):
return self.transform([child.transform() for child in node])
def repeat(symbol, separator=None, leading=False, trailing=False, min_matches=0):
if not separator:
return Repeat(symbol)
separator = to_symbol(separator)
tail = Repeat(flatten(separator + symbol), min_matches=max(min_matches - 1, 0))
r = group(symbol) + flatten(tail)
if leading:
r = Optional(separator) + flatten(r)
if trailing:
r = flatten(r) + Optional(separator)
if min_matches > 0:
return r
return flatten(r) | drop(epsilon)
class SymbolWrapper(Symbol):
def __init__(self, symbol, name=''):
super().__init__(name=name)
self.symbol = None if symbol is None else to_symbol(symbol)
def __iter__(self):
yield self.symbol
def apply_transform(self, node):
return self.transform(self.symbol.transform())
class SymbolProxy(SymbolWrapper):
def __init__(self, symbol=None, name=None):
super().__init__(symbol=symbol, name=name)
def __call__(self, parser):
node = yield self.symbol
yield node
def __str__(self):
return '<SymbolProxy {0}>'.format(self.symbol)
def __eq__(self, other):
return isinstance(other, SymbolProxy) and self.symbol == other.symbol
def __hash__(self):
return hash(self.symbol)
@property
def name(self):
return self.symbol.name
def is_rule(self):
return False
class Alias(SymbolProxy):
def __init__(self, symbol, name):
super().__init__(symbol)
self.alias = name
@property
def name(self):
return self.alias
def named(self, name):
return Alias(self.symbol, name)
def __call__(self, parser):
node = yield self.symbol
if node.symbol == self.symbol:
node.symbol = self
yield node
class Optional(SymbolWrapper):
def __call__(self, parser):
try:
node = yield self.symbol
except ParseError:
node = empty_match
yield node
class Lookahead(SymbolWrapper):
def __call__(self, parser):
node = yield self.symbol
parser.backtrack(node)
yield empty_match
def __pos__(self):
raise InvalidGrammar('Lookahead cannot be non-empty')
class NonEmpty(SymbolWrapper):
def __call__(self, parser):
node = yield self.symbol
if not node:
raise ParseError('non-empty match expected')
yield node
| emulbreh/rdp | rdp/symbols.py | Python | mit | 9,473 |
from logging import getLogger
from yarl import URL
from aiohttp import BasicAuth
try:
from aiosocks import Socks4Auth, Socks5Auth
except ImportError:
class Socks4Auth(Exception):
def __init__(*args, **kwargs):
raise ImportError(
'You must install aiosocks to use a SOCKS proxy.')
Socks5Auth = Socks4Auth
from . import __title__, __version__
from .rpc_api import RpcApi, RpcState
from .auth_ptc import AuthPtc
from .auth_google import AuthGoogle
from .hash_server import HashServer
from .exceptions import AuthTokenExpiredException, InvalidCredentialsException, NoPlayerPositionSetException, ServerApiEndpointRedirectException
from .pogoprotos.networking.requests.request_type_pb2 import RequestType
from .pogoprotos.networking.platform.platform_request_type_pb2 import PlatformRequestType
class PGoApi:
log = getLogger(__name__)
log.info('%s v%s', __title__, __version__)
def __init__(self, lat=None, lon=None, alt=None, proxy=None, device_info=None):
self.auth_provider = None
self.state = RpcState()
self._api_endpoint = 'https://pgorelease.nianticlabs.com/plfe/rpc'
self.latitude = lat
self.longitude = lon
self.altitude = alt
self.proxy_auth = None
self.proxy = proxy
self.device_info = device_info
async def set_authentication(self, provider='ptc', username=None, password=None, timeout=10, locale='en_US', refresh_token=None):
if provider == 'ptc':
self.auth_provider = AuthPtc(
username,
password,
proxy=self._proxy,
proxy_auth=self.proxy_auth,
timeout=timeout)
elif provider == 'google':
self.auth_provider = AuthGoogle(
proxy=self._proxy, refresh_token=refresh_token)
if refresh_token:
return await self.auth_provider.get_access_token()
else:
raise InvalidCredentialsException(
"Invalid authentication provider - only ptc/google available.")
await self.auth_provider.user_login(username, password)
def set_position(self, lat, lon, alt=None):
self.log.debug('Set Position - Lat: %s Lon: %s Alt: %s', lat, lon, alt)
self.latitude = lat
self.longitude = lon
self.altitude = alt
def create_request(self):
return PGoApiRequest(self)
@staticmethod
def activate_hash_server(hash_token, conn_limit=300):
HashServer.set_token(hash_token)
HashServer.activate_session(conn_limit)
@property
def position(self):
return self.latitude, self.longitude, self.altitude
@property
def api_endpoint(self):
return self._api_endpoint
@api_endpoint.setter
def api_endpoint(self, api_url):
if api_url.startswith("https"):
self._api_endpoint = URL(api_url)
else:
self._api_endpoint = URL('https://' + api_url + '/rpc')
@property
def proxy(self):
return self._proxy
@proxy.setter
def proxy(self, proxy):
if proxy is None:
self._proxy = proxy
else:
self._proxy = URL(proxy)
if self._proxy.user:
scheme = self._proxy.scheme
if scheme == 'http':
self.proxy_auth = BasicAuth(
self._proxy.user, self._proxy.password)
elif scheme == 'socks5':
self.proxy_auth = Socks5Auth(
self._proxy.user, self._proxy.password)
elif scheme == 'socks4':
self.proxy_auth = Socks4Auth(self._proxy.user)
else:
raise ValueError(
'Proxy protocol must be http, socks5, or socks4.')
@property
def start_time(self):
return self.state.start_time
def __getattr__(self, func):
async def function(**kwargs):
request = self.create_request()
getattr(request, func)(**kwargs)
return await request.call()
if func.upper() in RequestType.keys():
return function
else:
raise AttributeError('{} not known.'.format(func))
class PGoApiRequest:
log = getLogger(__name__)
def __init__(self, parent):
self.__parent__ = parent
self._req_method_list = []
self._req_platform_list = []
async def call(self):
parent = self.__parent__
auth_provider = parent.auth_provider
position = parent.position
try:
assert position[0] is not None and position[1] is not None
except AssertionError:
raise NoPlayerPositionSetException('No position set.')
request = RpcApi(auth_provider, parent.state)
while True:
try:
response = await request.request(parent.api_endpoint, self._req_method_list, self._req_platform_list, position, parent.device_info, parent._proxy, parent.proxy_auth)
break
except AuthTokenExpiredException:
self.log.info('Access token rejected! Requesting new one...')
await auth_provider.get_access_token(force_refresh=True)
except ServerApiEndpointRedirectException as e:
self.log.debug('API endpoint redirect... re-executing call')
parent.api_endpoint = e.endpoint
# cleanup after call execution
self._req_method_list = []
return response
def list_curr_methods(self):
for i in self._req_method_list:
print("{} ({})".format(RequestType.Name(i), i))
def __getattr__(self, func):
func = func.upper()
def function(**kwargs):
self.log.debug('Creating a new request...')
try:
if func in RequestType.keys():
if kwargs:
self._req_method_list.append((RequestType.Value(func), kwargs))
self.log.debug("Arguments of '%s': \n\r%s", func, kwargs)
else:
self._req_method_list.append(RequestType.Value(func))
self.log.debug("Adding '%s' to RPC request", func)
elif func in PlatformRequestType.keys():
if kwargs:
self._req_platform_list.append((PlatformRequestType.Value(func), kwargs))
self.log.debug("Arguments of '%s': \n\r%s", func, kwargs)
else:
self._req_platform_list.append(PlatformRequestType.Value(func))
self.log.debug("Adding '%s' to RPC request", func)
except ValueError:
raise AttributeError('{} not known.'.format(func))
return self
return function
| bellowsj/aiopogo | aiopogo/pgoapi.py | Python | mit | 6,917 |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.fsm import StateData
from toontown.toontowngui import TTDialog
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from direct.directnotify import DirectNotifyGlobal
class Trolley(StateData.StateData):
notify = DirectNotifyGlobal.directNotify.newCategory('Trolley')
def __init__(self, safeZone, parentFSM, doneEvent):
StateData.StateData.__init__(self, doneEvent)
self.fsm = ClassicFSM.ClassicFSM('Trolley', [
State.State('start',
self.enterStart,
self.exitStart,
['requestBoard',
'trolleyHFA',
'trolleyTFA']),
State.State('trolleyHFA',
self.enterTrolleyHFA,
self.exitTrolleyHFA,
['final']),
State.State('trolleyTFA',
self.enterTrolleyTFA,
self.exitTrolleyTFA,
['final']),
State.State('requestBoard',
self.enterRequestBoard,
self.exitRequestBoard,
['boarding']),
State.State('boarding',
self.enterBoarding,
self.exitBoarding,
['boarded']),
State.State('boarded',
self.enterBoarded,
self.exitBoarded,
['requestExit',
'trolleyLeaving',
'final']),
State.State('requestExit',
self.enterRequestExit,
self.exitRequestExit,
['exiting',
'trolleyLeaving']),
State.State('trolleyLeaving',
self.enterTrolleyLeaving,
self.exitTrolleyLeaving,
['final']),
State.State('exiting',
self.enterExiting,
self.exitExiting,
['final']),
State.State('final',
self.enterFinal,
self.exitFinal,
['start'])],
'start', 'final')
self.parentFSM = parentFSM
return None
def load(self):
self.parentFSM.getStateNamed('trolley').addChild(self.fsm)
self.buttonModels = loader.loadModel('phase_3.5/models/gui/inventory_gui')
self.upButton = self.buttonModels.find('**//InventoryButtonUp')
self.downButton = self.buttonModels.find('**/InventoryButtonDown')
self.rolloverButton = self.buttonModels.find('**/InventoryButtonRollover')
def unload(self):
self.parentFSM.getStateNamed('trolley').removeChild(self.fsm)
del self.fsm
del self.parentFSM
self.buttonModels.removeNode()
del self.buttonModels
del self.upButton
del self.downButton
del self.rolloverButton
def enter(self):
self.fsm.enterInitialState()
if base.localAvatar.hp > 0:
messenger.send('enterTrolleyOK')
self.fsm.request('requestBoard')
else:
self.fsm.request('trolleyHFA')
return None
def exit(self):
self.ignoreAll()
return None
def enterStart(self):
return None
def exitStart(self):
return None
def enterTrolleyHFA(self):
self.noTrolleyBox = TTDialog.TTGlobalDialog(message=TTLocalizer.TrolleyHFAMessage, doneEvent='noTrolleyAck', style=TTDialog.Acknowledge)
self.noTrolleyBox.show()
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('noTrolleyAck', self.__handleNoTrolleyAck)
def exitTrolleyHFA(self):
self.ignore('noTrolleyAck')
self.noTrolleyBox.cleanup()
del self.noTrolleyBox
def enterTrolleyTFA(self):
self.noTrolleyBox = TTDialog.TTGlobalDialog(message=TTLocalizer.TrolleyTFAMessage, doneEvent='noTrolleyAck', style=TTDialog.Acknowledge)
self.noTrolleyBox.show()
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('noTrolleyAck', self.__handleNoTrolleyAck)
def exitTrolleyTFA(self):
self.ignore('noTrolleyAck')
self.noTrolleyBox.cleanup()
del self.noTrolleyBox
def __handleNoTrolleyAck(self):
ntbDoneStatus = self.noTrolleyBox.doneStatus
if ntbDoneStatus == 'ok':
doneStatus = {}
doneStatus['mode'] = 'reject'
messenger.send(self.doneEvent, [doneStatus])
else:
self.notify.error('Unrecognized doneStatus: ' + str(ntbDoneStatus))
def enterRequestBoard(self):
return None
def handleRejectBoard(self):
doneStatus = {}
doneStatus['mode'] = 'reject'
messenger.send(self.doneEvent, [doneStatus])
def exitRequestBoard(self):
return None
def enterBoarding(self, nodePath):
camera.wrtReparentTo(nodePath)
self.cameraBoardTrack = LerpPosHprInterval(camera, 1.5, Point3(-35, 0, 8), Point3(-90, 0, 0))
self.cameraBoardTrack.start()
return None
def exitBoarding(self):
self.ignore('boardedTrolley')
return None
def enterBoarded(self):
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: RIDETHETROLLEY: Ride the Trolley')
self.enableExitButton()
return None
def exitBoarded(self):
self.cameraBoardTrack.finish()
self.disableExitButton()
return None
def enableExitButton(self):
self.exitButton = DirectButton(relief=None, text=TTLocalizer.TrolleyHopOff, text_fg=(1, 1, 0.65, 1), text_pos=(0, -0.23), text_scale=TTLocalizer.TexitButton, image=(self.upButton, self.downButton, self.rolloverButton), image_color=(1, 0, 0, 1), image_scale=(20, 1, 11), pos=(0, 0, 0.8), scale=0.15, command=lambda self = self: self.fsm.request('requestExit'))
return
def disableExitButton(self):
self.exitButton.destroy()
def enterRequestExit(self):
messenger.send('trolleyExitButton')
return None
def exitRequestExit(self):
return None
def enterTrolleyLeaving(self):
camera.lerpPosHprXYZHPR(0, 18.55, 3.75, -180, 0, 0, 3, blendType='easeInOut', task='leavingCamera')
self.acceptOnce('playMinigame', self.handlePlayMinigame)
return None
def handlePlayMinigame(self, zoneId, minigameId):
base.localAvatar.b_setParent(ToontownGlobals.SPHidden)
doneStatus = {}
doneStatus['mode'] = 'minigame'
doneStatus['zoneId'] = zoneId
doneStatus['minigameId'] = minigameId
messenger.send(self.doneEvent, [doneStatus])
def exitTrolleyLeaving(self):
self.ignore('playMinigame')
taskMgr.remove('leavingCamera')
return None
def enterExiting(self):
return None
def handleOffTrolley(self):
doneStatus = {}
doneStatus['mode'] = 'exit'
messenger.send(self.doneEvent, [doneStatus])
return None
def exitExiting(self):
return None
def enterFinal(self):
return None
def exitFinal(self):
return None
| ksmit799/Toontown-Source | toontown/trolley/Trolley.py | Python | mit | 7,361 |
from __future__ import unicode_literals
from httoop import URI
def test_simple_uri_comparision(uri):
u1 = URI(b'http://abc.com:80/~smith/home.html')
u2 = URI(b'http://ABC.com/%7Esmith/home.html')
u3 = URI(b'http://ABC.com:/%7esmith/home.html')
u4 = URI(b'http://ABC.com:/%7esmith/./home.html')
u5 = URI(b'http://ABC.com:/%7esmith/foo/../home.html')
assert u1 == u2
assert u2 == u3
assert u1 == u3
assert u1 == u4
assert u1 == u5
def test_request_uri_maxlength():
pass
def test_request_uri_is_star():
pass
def test_request_uri_containig_fragment():
pass
def test_invalid_uri_scheme():
pass
def test_invalid_port():
pass
def test_normalized_uri_redirects():
pass
def test_uri_composing_username_and_password():
assert bytes(URI(b'http://[email protected]')) == b'http://[email protected]'
assert bytes(URI(b'http://username:[email protected]')) == b'http://username:[email protected]'
| spaceone/httoop | tests/uri/test_uri.py | Python | mit | 930 |
# Copyright (c) 2017, Udacity
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project
# Author: Devin Anzelmo
import os
import glob
import numpy as np
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
from tensorflow.contrib.keras.python import keras
from scipy import misc
def make_dir_if_not_exist(path):
if not os.path.exists(path):
os.makedirs(path)
def show(im, x=5, y=5):
plt.figure(figsize=(x,y))
plt.imshow(im)
plt.show()
def show_images(maybe_ims, x=4, y=4):
if isinstance(maybe_ims, (list, tuple)):
border = np.ones((maybe_ims[0].shape[0], 10, 3))
border = border.astype(np.uint8)
new_im = maybe_ims[0]
for i in maybe_ims[1:]:
new_im = np.concatenate((new_im, border, i), axis=1)
show(new_im, len(maybe_ims)*x, y)
else:
show(maybe_ims)
# helpers for loading a few images from the grading data
def get_im_files(path, subset_name):
return sorted(glob.glob(os.path.join(path, subset_name, 'images', '*.jpeg')))
def get_mask_files(path, subset_name):
return sorted(glob.glob(os.path.join(path, subset_name, 'masks', '*.png')))
def get_pred_files(subset_name):
return sorted(glob.glob(os.path.join('..','data', 'runs', subset_name, '*.png')))
def get_im_file_sample(grading_data_dir_name, subset_name, pred_dir_suffix=None, n_file_names=10):
path = os.path.join('..', 'data', grading_data_dir_name)
ims = np.array(get_im_files(path, subset_name))
masks = np.array(get_mask_files(path, subset_name))
shuffed_inds = np.random.permutation(np.arange(masks.shape[0]))
ims_subset = ims[shuffed_inds[:n_file_names]]
masks_subset = masks[shuffed_inds[:n_file_names]]
if not pred_dir_suffix:
return list(zip(ims_subset, masks_subset))
else:
preds = np.array(get_pred_files(subset_name+'_'+pred_dir_suffix))
preds_subset = preds[shuffed_inds[:n_file_names]]
return list(zip(ims_subset, masks_subset, preds_subset))
def load_images(file_tuple):
im = misc.imread(file_tuple[0])
mask = misc.imread(file_tuple[1])
if len(file_tuple) == 2:
return im, mask
else:
pred = misc.imread(file_tuple[2])
if pred.shape[0] != im.shape[0]:
mask = misc.imresize(mask, pred.shape)
im = misc.imresize(im, pred.shape)
return im, mask, pred
def plot_keras_model(model, fig_name):
base_path = os.path.join('..', 'data', 'figures')
make_dir_if_not_exist(base_path)
keras.utils.vis_utils.plot_model(model, os.path.join(base_path, fig_name))
keras.utils.vis_utils.plot_model(model, os.path.join(base_path, fig_name +'_with_shapes'), show_shapes=True)
def train_val_curve(train_loss, val_loss=None):
train_line = plt.plot(train_loss, label='train_loss')
train_patch = mpatches.Patch(color='blue',label='train_loss')
handles = [train_patch]
if val_loss:
val_line = plt.plot(val_loss, label='val_loss')
val_patch = mpatches.Patch(color='orange',label='val_loss')
handles.append(val_patch)
plt.legend(handles=handles, loc=2)
plt.title('training curves')
plt.ylabel('loss')
plt.xlabel('epochs')
plt.show()
# modified from the BaseLogger in file linked below
# https://github.com/fchollet/keras/blob/master/keras/callbacks.py
class LoggerPlotter(keras.callbacks.Callback):
"""Callback that accumulates epoch averages of metrics.
and plots train and validation curves on end of epoch
"""
def __init__(self):
self.hist_dict = {'loss':[], 'val_loss':[]}
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
for k in self.params['metrics']:
if k in self.totals:
# Make value available to next callbacks.
logs[k] = self.totals[k] / self.seen
self.hist_dict['loss'].append(logs['loss'])
if 'val_loss' in self.params['metrics']:
self.hist_dict['val_loss'].append(logs['val_loss'])
train_val_curve(self.hist_dict['loss'], self.hist_dict['val_loss'])
else:
train_val_curve(self.hist_dict['loss'])
| squared9/Robotics | Follow_Me-Semantic_Segmentation/code/utils/plotting_tools.py | Python | mit | 6,172 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from user import make_anonymous_user
from exeptions import HttpStatusError, RegexError
def make_subject_url(url):
if url.endswith("/"):
return url + "subject.txt"
else:
return url + "/subject.txt"
def parse_board(string):
if not isinstance(string, unicode):
raise TypeError("unsupported string type:" + str(type(string)))
thread_expressions = re.compile(
r"^(?P<dat>\d+\.dat)<>(?P<title>.*) \((?P<n_comments>\d*)\)$")
results = []
for thread_string in string.split("\n"):
thread_data = thread_expressions.search(thread_string)
if thread_data:
results.append({
"title": thread_data.group("title"),
"n_comments": int(thread_data.group("n_comments")),
"dat": thread_data.group("dat"),
})
elif len(thread_string) != 0:
raise RegexError(
"Regex unmatched in parsing the thread's data",
thread_expressions)
return results
def retrieve_board(board_url, user=None):
my_user = user if user else make_anonymous_user()
subject_url = make_subject_url(board_url)
response = my_user.urlopen(subject_url, gzip=False)
if response.code == 200:
retrieved_string = unicode(response.read(), "Shift_JIS", "ignore")
print type(retrieved_string)
return parse_board(retrieved_string)
else:
message = "HTTP status is invalid: " + str(response.code)
raise HttpStatusError(message, response)
| rezoo/twopy | twopy/board.py | Python | mit | 1,589 |
from tehbot.plugins import *
import tehbot.plugins as plugins
import wolframalpha
import prettytable
class WolframAlphaPlugin(StandardPlugin):
def __init__(self):
StandardPlugin.__init__(self)
self.parser.add_argument("query", nargs="+")
def initialize(self, dbconn):
StandardPlugin.initialize(self, dbconn)
try:
self.client = wolframalpha.Client(self.settings["wolframalpha_app_id"])
except:
self.settings["enabled"] = False
@staticmethod
def remove_empty_columns(table, nr_cols):
t = [[] for n in range(len(table))]
for i in range(nr_cols):
keep = False
for line in table:
if line[i]:
keep = True
break
if keep:
for j in range(len(table)):
t[j].append(table[j][i])
return t
@staticmethod
def format_table(s):
table = [[y.strip() for y in x.strip().split("|")] for x in s.splitlines()]
nr_cols = max(map(len, table))
table = [[x[i] if i < len(x) else "" for i in range(nr_cols)] for x in table]
table = WolframAlphaPlugin.remove_empty_columns(table, nr_cols)
if len(table) < 2:
s2 = " | ".join(table[0])
return s2
pt = prettytable.PrettyTable()
pt.header = False
for line in table:
pt.add_row(line)
s = pt.get_string()
return s
def execute(self, connection, event, extra, dbconn):
try:
pargs = self.parser.parse_args(extra["args"])
if self.parser.help_requested:
return self.parser.format_help().strip()
except Exception as e:
return u"Error: %s" % str(e)
txt = "\x0303[Wolfram|Alpha]\x03 "
try:
res = None
misc = []
for p in self.client.query(" ".join(pargs.query)).pods:
if p.id == "Input":
inp = " | ".join(p.text.splitlines())
elif p.id == "Result" and p.text:
res = self.format_table(p.text)
elif p.title and p.text:
misc.append("%s\n%s" % (p.title, self.format_table(p.text)))
txt += inp + "\n"
if res:
txt += res + "\n"
elif misc:
txt += "\n".join(misc)
else:
raise NameError
except (NameError, AttributeError):
txt += "No results."
except Exception as e:
txt = "Error: %s" % e
return plugins.shorten(txt, 450)
register_plugin(["wolframalpha", "wa"], WolframAlphaPlugin())
| spaceone/tehbot | tehbot/plugins/wolframalpha/__init__.py | Python | mit | 2,727 |
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting SUN values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19332 if testnet else 9332
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f SUN available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| tuaris/suncoin | contrib/spendfrom/spendfrom.py | Python | mit | 10,053 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Detect same images and try to merge metadatas before deleting duplicates"""
import PIL.Image as Image
import PIL.ImageChops as ImageChops
import sys
import os
import pyexiv2
import datetime
import logging
#Handler for merging properties
def handler_mergeList(a, b):
"""List merger"""
#FIXME : there is certainly a better python way !
for p in b :
if p not in a:
a.append(p)
return a
def handler_minDate(a, b):
"""Minimum date"""
if a < b :
return a
else :
return b
def handler_keepMain(a, b):
"""Keep left"""
return a
def handler_Exif_Image_Orientation(a, b):
"""Assert : the higher is better, mainly because 1 is 'no orientation'"""
if a > b :
return a
else :
return b
def handler_concat(a, b):
return (a+b)
def handler_Iptc_Application2_ProgramVersion(a, b):
try :
la = [int(x) for x in a[0].split(".")]
lb = [int(x) for x in b[0].split(".")]
if la > lb :
return [".".join([str(x) for x in la])]
else :
return [".".join([str(x) for x in lb])]
except :
if a > b :
return a
else :
return b
exiv_changed_keywords = ["merged_kmd"] #Tag set when pictures are merged (if IPTC is in use)
#Match exif/iptc properties to do the merge
exiv_handlers = {
#Keep left (main pictures)
"Iptc.Application2.ProgramVersion" : handler_Iptc_Application2_ProgramVersion,
"Exif.Image.Software" : handler_keepMain,
#Concat
"Exif.Photo.UserComment" : handler_concat,
#Lists
"Iptc.Application2.Keywords" : handler_mergeList,
#Orientation
"Exif.Image.Orientation" : handler_Exif_Image_Orientation,
"Exif.Thumbnail.Orientation" : handler_Exif_Image_Orientation,
#Dates
"Exif.Image.DateTime" : handler_minDate,
"Exif.Photo.DateTimeOriginal" : handler_minDate,
}
#Don't try to do anything with these properties
exiv_ignored_properties = ["Exif.Thumbnail.JPEGInterchangeFormat", "Exif.Image.ExifTag", "Exif.Photo.InteroperabilityTag", "Exif.Photo.MakerNote", "Exif.MakerNote.Offset"]
def comparePilImages(img1, img2):
"""Compare 2 PIL.Images and return True if there is no difference"""
try :
diff = ImageChops.difference(img1, img2)
bbox = diff.getbbox()
del(diff)
except :
return False
return bbox == None
def compareImagesFiles(f1, f2):
"""Load two files in PIL, and compare"""
img1 = Image.open(f1)
img2 = Image.open(f2)
return comparePilImages(img1, img2)
def compareImagesFolder(folder, quick = False):
"""Compare images in a folder"""
logging.debug("Comparing images in %s", folder)
files = [os.path.join(folder, x) for x in os.listdir(folder)]
return compareImagesCollection(files, quick)
def compareImagesCollection(files, quick = True):
imgf = []
samef = []
for fpath in files :
if not os.path.isfile(fpath):
#Only try to load files !
logging.info("Not a file : %s" % fpath)
continue
try:
Image.open(fpath)
imgf.append(fpath)
except:
logging.info("Not an image : %s" % fpath)
pass
for f1 in imgf:
imgf1 = Image.open(f1)
for f2 in imgf:
if f2 <= f1 :
continue
if quick and abs(os.path.getsize(f1) - os.path.getsize(f2)) > 1000 :
continue
imgf2 = Image.open(f2)
if comparePilImages(imgf1, imgf2):
logging.debug("Match Images : %s = %s", f1, f2)
found = False
for fl in samef:
#FIXME : Should rewrite this !
if f1 in fl and f2 in fl :
found = True
break
if f1 in fl:
fl.append(f2)
found = True
break
if f2 in fl:
fl.append(f1)
found = True
break
if not found :
#Images are the same, but not already matched
samef.append([f1,f2])
del(imgf)
return(samef)
def readExivMetadata(path):
try :
meta = pyexiv2.ImageMetadata(path)
meta.read()
return meta
except :
logging.debug("This file has no exiv metadatas : %s", path)
return False
def mergeExivMetadata(sameImages, doit = False):
#FIXME : should be clevier to choose the mainImage
write = False
mainI = sameImages[0]
metas = {}
for pathI in sameImages:
meta = pyexiv2.ImageMetadata(pathI)
meta.read()
metas[pathI] = meta
for pathI in sameImages[1:]:
logging.debug("Comparing %s and %s", mainI, pathI)
for k in metas[pathI].iptc_keys + metas[pathI].exif_keys:
if k in exiv_ignored_properties:
continue
newval = None
if k in metas[mainI].iptc_keys + metas[mainI].exif_keys :
try :
if metas[mainI][k].value != metas[pathI][k].value :
logging.debug("Difference for %s", k)
logging.debug("%s <> %s", metas[mainI][k].value, metas[pathI][k].value)
if k in exiv_handlers :
newval = exiv_handlers[k](metas[mainI][k].value, metas[pathI][k].value)
logging.info("Merged property %s : %s", k, newval)
else :
logging.warn("NO HANDLER for %s", k)
except :
logging.warn("Coulnd't compare %s exif property for %s", k, mainI)
else :
newval = metas[pathI][k].value
logging.info("Imported property %s : %s", k, newval)
if newval != None :
try :
metas[mainI][k] = newval
write = True
except :
logging.warn("Coulnd't setup %s exif property for %s", k, mainI)
if write :
if "Iptc.Application2.Keywords" in metas[mainI].iptc_keys:
metas[mainI]["Iptc.Application2.Keywords"] = handler_mergeList(metas[mainI]["Iptc.Application2.Keywords"].value, exiv_changed_keywords)
logging.info("Writing properties to %s", mainI)
if doit :
metas[mainI].write()
for f in sameImages[1:] :
logging.info("Removing %s", f)
if doit :
os.remove(f)
for m in metas.keys():
del(metas[m])
del(metas)
def cleanDir(folder, doit = False, quick = True):
logging.info("Cleaning %s", folder)
samef = compareImagesFolder(folder, quick = True)
for s in samef :
mergeExivMetadata(s, doit)
del(samef)
for f in os.listdir(folder):
p = os.path.join(folder, f)
if os.path.isdir(p):
logging.debug("Testing %s", p)
cleanDir(p, doit, quick = True)
| pzia/keepmydatas | src/KmdImages.py | Python | mit | 7,111 |
import pykka
from rx.subjects import *
from TorrentPython.DownloadManager import *
from TorrentPython.RoutingTable import *
class DownloaderActor(pykka.ThreadingActor):
def __init__(self, downloader):
super(DownloaderActor, self).__init__()
self.downloader = downloader
self.download_manager = DownloadManager(downloader)
def on_receive(self, message):
return message.get('func')(self)
def from_start(self):
pass
def from_stop(self):
pass
class Downloader(Subject):
def __init__(self, client_id, metainfo, path, routing_table=None):
super(Downloader, self).__init__()
self.client_id = client_id
self.metainfo = metainfo
self.path = path
self.routing_table = routing_table or RoutingTable.INITIAL_ROUTING_TABLE
self.actor = DownloaderActor.start(self)
def __del__(self):
self.destroy()
def destroy(self):
if self.actor.is_alive():
self.actor.stop()
def start(self):
self.actor.tell({'func': lambda x: x.from_start()})
def stop(self):
self.actor.tell({'func': lambda x: x.from_stop()})
| reignofmiracle/RM_Torrent | TorrentPython/TorrentPython/Downloader.py | Python | mit | 1,176 |
'''
Created by auto_sdk on 2014-12-17 17:22:51
'''
from top.api.base import RestApi
class ItemUpdateDelistingRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.num_iid = None
def getapiname(self):
return 'taobao.item.update.delisting'
| CooperLuan/devops.notes | taobao/top/api/rest/ItemUpdateDelistingRequest.py | Python | mit | 318 |
# -*- coding: utf-8 -*-
# GMate - Plugin Based Programmer's Text Editor
# Copyright © 2008 Alexandre da Silva / Carlos Antonio da Silva
#
# This file is part of Gmate.
#
# See LICENTE.TXT for licence information
import os
import gnomevfs
from datetime import datetime
def get_file_info(uri):
"""Return the File information if uri exists"""
if uri is not None and gnomevfs.exists(uri):
return gnomevfs.get_file_info(uri)
return False
def is_uri_dir(uri):
"""Checks if given uri is a dir"""
file_info = get_file_info(uri)
if file_info:
return is_dir(file_info)
return False
def is_dir(file_info):
"""Checks to see if the file is a directory."""
if file_info is not None:
return file_info.type == gnomevfs.FILE_TYPE_DIRECTORY
return False
def is_file(file_info):
"""Checks to see if the file is a directory."""
if file_info is not None:
return file_info.type != gnomevfs.FILE_TYPE_DIRECTORY
return False
def is_hidden(file_info):
"""Checks to see if the file is hidden."""
if file_info is not None:
return file_info.name.startswith(u'.') or file_info.name.endswith(u'~')
return False
def is_hidden_dir(file_info):
"""Checks to see if the file is a hidden directory."""
return is_dir(file_info) and is_hidden(file_info)
def is_hidden_file(file_info):
"""Checks to see if the file is a hidden file."""
return not is_dir(file_info) and is_hidden(file_info)
def is_visible_dir(file_info):
"""Checks to see if the file is a visible directory."""
return is_dir(file_info) and not is_hidden(file_info)
def is_visible_file(file_info):
"""Checks to see if the file is a visible file."""
return not is_dir(file_info) and not is_hidden(file_info)
def get_user_home_uri():
"""Gets a URI pointing to the user's home directory '~'."""
return gnomevfs.URI(u'file://%s' % os.path.expanduser(u'~'))
def get_mime_type(uri):
"""Gets the mime type of given file uri"""
return gnomevfs.get_mime_type(uri)
def get_path_from_uri(uri):
return gnomevfs.get_local_path_from_uri(uri)
def get_last_modification(uri):
"""Gigen a file uri return the last modification date"""
file_info = get_file_info(uri)
if file_info:
return datetime.fromtimestamp(file_info.mtime)
| lexrupy/gmate-editor | GMATE/files.py | Python | mit | 2,328 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE',
'video_gallery.tests.south_settings')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| bitmazk/cmsplugin-video-gallery | manage.py | Python | mit | 294 |
# python -m unittest discover
import unittest
from datetime import datetime
from tasks import old_issues as c
class TestCloseOldIssue(unittest.TestCase):
def test_is_closed_issue(self):
self.assertEquals(c.is_closed({'closed_at': None}), False)
self.assertEquals(c.is_closed({'closed_at': "2014-10-10T00:09:51Z"}), True)
def test_is_pull_request(self):
self.assertEquals(c.is_pull_request({}), False)
self.assertEquals(c.is_pull_request({'pull_request': {}}), True)
def test_has_milestone(self):
self.assertEquals(c.has_milestone({'milestone': None}), False)
self.assertEquals(c.has_milestone({'milestone': "v1.1"}), True)
def test_is_old_issue(self):
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 9), close_inactive_after=10), False)
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 11), close_inactive_after=10), False)
self.assertEquals(c.is_old_issue(datetime(2000, 1, 1), now=datetime(2000, 1, 12), close_inactive_after=10), True)
def test_has_labels_preventing_close(self):
self.assertEquals(c.has_labels_preventing_close({
'labels': [{
'name': 'bug'
}]
}, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({}, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({ 'labels': [] }, ['in progress', 'ready', 'high priority']), False)
self.assertEquals(c.has_labels_preventing_close({
'labels': [{
'name': 'ready'
}]
}, ['in progress', 'ready', 'high priority']), True)
def test_has_comments_preventing_close(self):
self.assertEquals(c.has_comments_preventing_close({
'comments': None
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 0
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 2
}, 2), False)
self.assertEquals(c.has_comments_preventing_close({
'comments': 3
}, 2), True)
def test_has_assignee_preventing_close(self):
self.assertEquals(c.has_assignee_preventing_close({
'assignee': None
}), False)
self.assertEquals(c.has_assignee_preventing_close({
'assignee': {}
}), False)
self.assertEquals(c.has_assignee_preventing_close({
'assignee': { 'login': 'steve' }
}), True)
def test_has_milestone_preventing_close(self):
self.assertEquals(c.has_milestone_preventing_close({}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': None
}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': {}
}), False)
self.assertEquals(c.has_milestone_preventing_close({
'milestone': { 'url': 'https://api.github.com/repos/octocat/Hello-World/milestones/1' }
}), True)
def test_has_events_preventing_close(self):
self.assertEquals(c.has_events_preventing_close(None), False)
self.assertEquals(c.has_events_preventing_close([
{ 'event': 'closed' },
{ 'event': 'labeled' }
]), False)
self.assertEquals(c.has_events_preventing_close([
{ 'event': 'closed' },
{ 'event': 'referenced' }
]), True)
| driftyco/ionitron-issues | tests/test_close_old_issue.py | Python | mit | 3,578 |
# (c) 2016 Douglas Roark
# Licensed under the MIT License. See LICENSE for the details.
from __future__ import print_function, division # Valid as of 2.6
import sys
sys.path.insert(0, '/home/droark/Projects/etotheipi-BitcoinArmory')
import binascii, hashlib, string, os
from collections import namedtuple
from math import ceil, log
from copy import deepcopy
# from armoryengine.BinaryPacker import * # Armory
from armoryengine.BinaryUnpacker import * # Armory
from armoryengine.ArmoryUtils import * # Armory
preBufZero = '\x00' * 4
# Quick-and-dirty enum.
class TxType:
p2pKey = 1
p2pHash = 2
p2sh = 3
multiSig = 4
opReturn = 5
unknownTx = 6
# Transaction opcodes. Include only the ones we care about.
OP_0 = '\x00'
OP_1 = '\x51'
OP_2 = '\x52'
OP_3 = '\x53'
OP_RETURN = '\x6a'
OP_DUP = '\x76'
OP_EQUAL = '\x87'
OP_EQUALVERIFY = '\x88'
OP_HASH160 = '\xa9'
OP_CHECKSIG = '\xac'
OP_CHECKMULTISIG = '\xae'
# OP_CHECKSIG hashtype values (https://en.bitcoin.it/wiki/OP_CHECKSIG)
NULL_BYTE = '\x00' # A few sig/pubKey combos use a null byte.
SIGHASH_ALL = '\x01'
SIGHASH_NONE = '\x02'
SIGHASH_SINGLE = '\x03'
SIGHASH_ANYONECANPAY = '\x80'
# ASN.1 encoding bytes
ASN1_INTEGER = '\x02'
ASN1_SEQUENCE = '\x30'
# A global variable 'cause I'm being a lazy bastard. :) Used when printing data
# about a weird transaction. Useful for blockchain.info, blockexplorer.com, etc.
curTxHash = '\x00'
# Get a block file name based on a given number (blkXXXXX.dat).
# Input: A number between 0-99,999.
# Output: Block file name based on the input (e.g., blk00008.dat).
def getBCBlkFilename(fileNum):
'''Create a Bitcoin block data file name based on the incoming number.'''
if(fileNum < 0 or fileNum > 99999):
print("Bad blkXXXXX.dat input number. Defaulting to blk99999.dat.")
fileNum = 99999
blkFile = os.path.join(BLKFILE_DIR, 'blk%05d.dat' % fileNum)
return blkFile
# Read block header values and get the block header 2xSHA256 hash. The block
# header values are as follows, in the given order, and also returned in the
# same order. (The block header hash is returned last.)
# 1)Block header version (4 bytes - Little endian)
# 2)Previous block header's hash (32 bytes - Big endian)
# 3)Block transactions' merkle root (32 bytes - Big endian)
# 4)Block timestamp (4 bytes - Little endian)
# 5)Block difficulty "bits" (4 bytes - Little endian)
# 6)Block nonce (4 bytes - Little endian)
# Input: Raw data pointing to a block header.
# Output: The individual block header pieces, and the block header hash.
def getBlkHdrValues(header):
'''Get the block header values & hash. Will read the data itself.'''
# Get the block hash (endian-flipped result of 2xSHA256 block header
# hash), then get the individual block pieces and return everything.
blkHdrData = header.read(80)
blkHdrHash = hash256(blkHdrData) # BE
blkHdrUnpack = BinaryUnpacker(blkHdrData)
blkVer = blkHdrUnpack.get(UINT32) # LE
prevBlkHash = blkHdrUnpack.get(BINARY_CHUNK, 32) # BE
blkMerkleRoot = blkHdrUnpack.get(BINARY_CHUNK, 32) # BE
blkTimestamp = blkHdrUnpack.get(UINT32) # LE
blkBits = blkHdrUnpack.get(UINT32) # LE
blkNonce = blkHdrUnpack.get(UINT32) # LE
return (blkVer, prevBlkHash, blkMerkleRoot, blkTimestamp, blkBits, \
blkNonce, blkHdrHash)
# Look in a BinaryUnpacker object with a transaction input item and gets the
# pieces. The transaction input includes, in the following order:
# 1)A 2xSHA256 hash of a transaction being used as an input. (32 bytes - BE)
# 2)The index of the referenced output in the referenced trans. (4 bytes - LE)
# 3)Transaction input's script length. (VAR_INT - Little endian)
# 4)Transaction input's script. (VAR_LEN - Big endian)
# 5)Sequence # (usually 0xFFFFFFFF, usually irrelevant). (4 bytes - LE)
# Input: A BinaryUnpacker object with the transaction input.
# Output: The Tx input's individual objects and the TxIn binary string.
def getTxIn(txUnpack):
'''Function that unpacks the items inside a transaction input.'''
txStartPos = txUnpack.getPosition()
# Get the individual Tx pieces.
txInPrevHash = txUnpack.get(BINARY_CHUNK, 32) # BE
txInPrevTxOutHashIdx = txUnpack.get(UINT32)
txInScrLen = txUnpack.get(VAR_INT)
txInScr = txUnpack.get(BINARY_CHUNK, txInScrLen)
txInSeqNum = txUnpack.get(UINT32)
# While we're here, let's get the Tx binary string itself.
txLen = txUnpack.getPosition() - txStartPos
txUnpack.rewind(txLen)
txInStr = txUnpack.get(BINARY_CHUNK, txLen)
return (txInPrevHash, txInPrevTxOutHashIdx, txInScrLen, txInScr, \
txInSeqNum, txInStr)
# Look in a BinaryUnpacker object with a transaction output item and gets the
# pieces. The transaction output includes, in the following order:
# 1)The amount sent in the transaction. (8 bytes - LE)
# 2)Transaction output's script length. (VAR_INT - LE)
# 3)Transaction output's script. (VAR_LEN - BE)
# Input: A BinaryUnpacker object with the transaction input.
# Output: The Tx output's individual objects and the TxOut binary string.
def getTxOut(txUnpack):
'''Function that unpacks the items inside a transaction output.'''
txStartPos = txUnpack.getPosition()
# Get the individual Tx pieces.
txOutVal = txUnpack.get(UINT64)
txOutScrLen = txUnpack.get(VAR_INT)
txOutScr = txUnpack.get(BINARY_CHUNK, txOutScrLen)
# While we're here, let's get the Tx binary string itself.
txLen = txUnpack.getPosition() - txStartPos
txUnpack.rewind(txLen)
txOutStr = txUnpack.get(BINARY_CHUNK, txLen)
return (txOutVal, txOutScrLen, txOutScr, txOutStr)
# Look in a BinaryUnpacker object with a transaction item and gets the pieces.
# The transaction includes, in the following order:
# 1)Transaction version number. (4 bytes - Little endian)
# 2)Number of transaction inputs. (VAR INT - LE)
# 3)Transaction inputs. (VAR_LEN - Big endian)
# 4)Number of transaction outputs. (VAR INT - LE)
# 5)Transaction outputs. (VAR_LEN - BE)
# 6)Transaction lock time. (4 bytes - LE)
# Input: A BinaryUnpacker object with the transaction.
# Output: The transaction's individual objects, and the 2xSHA256 hash of the
# transaction. The inputs & outputs will be returned in lists.
def getTxObj(txUnpack):
'''Function that unpacks the items inside a transaction.'''
txInList = []
txOutList = []
txInStr = b''
txOutStr = b''
unpackStartPos = txUnpack.getPosition()
# Get the Tx version and the inputs. Put the inputs in a list.
txVer = txUnpack.get(UINT32) # Item 1
numTxIn = txUnpack.get(VAR_INT) # Item 2
txInCtr = numTxIn
while(txInCtr > 0):
txInPrevHash, txInPrevTxOutHashIdx, txInScrLen, txInScr, txInSeqNum, \
txInStr = getTxIn(txUnpack) # Item 3
txInList.append(txInStr)
txInCtr -= 1
# Get the Tx outputs and put them in a list.
numTxOut = txUnpack.get(VAR_INT) # Item 4
txOutCtr = numTxOut
while(txOutCtr > 0):
txOutVal, txOutScrLen, txOutScr, txOutStr = getTxOut(txUnpack) # Item 5
txOutList.append(txOutStr)
txOutCtr -= 1
# Get the Tx lock time.
txLockTime = txUnpack.get(UINT32) # Item 6
# Because the reference Bitcoin client currently tolerates non-canonical
# VAR_INT objects, we're not allowed to recreate a hash from the individual
# Tx elements. It's a possible Tx malleability attack. Once the client
# disallows non-canonical VAR_INTs, we can hash the pieces. 'Til then, we
# must rewind and hash the entire Tx.
txLen = txUnpack.getPosition() - unpackStartPos
txUnpack.rewind(txLen)
txStr = txUnpack.get(BINARY_CHUNK, txLen)
txHash = hash256(txStr)
return (txVer, numTxIn, txInList, numTxOut, txOutList, txLockTime, txHash)
# Function that determines if a BinaryUnpacker object contains an ECDSA public
# key, as defined by X9.62. The function only determines if the format is
# correct, not if the key is actually valid. The format is:
# Compressed key - (0x02 or 0x03) + 32 bytes
# Uncompressed key - 0x04 + 64 bytes
# Input: BinaryUnpacker object pointing to a supposed ECDSA public key. The
# object will be rewound to its starting position at the end.
# Output: 0 if the key is invalid, or the key length if the key is valid.
def isPubKey(pkIn):
'''Determine if a chunk of data is an ECDSA public key (X9.62 encoding).'''
retVal = 0
rewindVal = 0 # Here only so that future changes are made obvious.
# There must be at least 33 bytes left to read.
if(pkIn.getRemainingSize() >= 33):
initByte = pkIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(initByte == '\x02' or initByte == '\x03'): # Compressed key
retVal = 33
elif(initByte == '\x04'): # Uncompressed key
# The previous length check wasn't adequate for uncompressed keys.
# Make sure there's enough data before confirming the key is valid.
if(pkIn.getRemainingSize() >= 64):
retVal = 65
# Rewind and return.
pkIn.rewind(rewindVal)
return retVal
# Function that determines if a hash byte is a valid SIGHASH byte. We'll also
# accept NULL (i.e., 0x00) byte. See https://en.bitcoin.it/wiki/OP_CHECKSIG for
# more details.
# Input: A byte to check.
# Output: True if a byte's valid, False if not.
def isValidSigHashByte(inByte):
'''Determine if a byte is a valid SIGHASH byte.'''
retVal = False
# HACK ALERT: Some scriptSig objects have a NULL byte present instead of a
# proper SIGHASH byte. We'll accept it.
if(inByte == NULL_BYTE or inByte == SIGHASH_ALL or inByte == SIGHASH_NONE or
inByte == SIGHASH_SINGLE or inByte == SIGHASH_ANYONECANPAY):
retVal = True
return retVal
# Function that determines if a BinaryUnpacker object contains an ECDSA
# signature or signatures, as defined by X9.62, plus a byte used by OP_CHECKSIG
# to determine how the Tx will be hashed. The function only determines if the
# format is correct, not if the signature is actually valid.
# NB 1: For 256-bit ECDSA, r & s are 32 byte values. However, due to DER
# encoding quirks (and Satoshi quirks), the values can be 30-34 bytes.
# NB 2: This code, strictly speaking, shouldn't handle the initial size byte or
# the SIGHASH byte. A revision for a future HW problem....
# The format is:
# Length of signature (not part of the X9.62 sig - 1 byte - Must be 67-75)
# 0x30 (1 byte - "ASN.1 SEQUENCE" byte)
# Length of "r" & "s" segments (1 byte - Must be 64-70)
# 0x02 (1 byte - "ASN.1 INTEGER" byte)
# Length of "r" variable (1 byte - Must be 30-34)
# "r" variable (31-33 bytes)
# 0x02 (1 byte - "ASN.1 INTEGER" byte)
# Length of "s" variable (1 byte - Must be 30-34)
# "s" variable (31-33 bytes)
# The OP_CHECKSIG "SIGHASH" byte (not part of the X9.62 sig - 1 byte)
# Input: BinaryUnpacker object pointing to a supposed ECDSA signature. The
# object will be rewound to its starting position at the end.
# Output: 0 if the key's invalid, or the valid key's overall length (72-74).
def isSig(sigIn):
'''Determine if a data chunk is an ECDSA signature (X9.62 DER encoding).'''
retVal = 0
rewindVal = 0
sigValid = False
chunkSize = sigIn.getRemainingSize()
# Signatures can be 67-75 bytes. We need at least 1 byte to read, at which
# point the signature will help us determine if we have enough data to read.
if(chunkSize >= 1):
initByte = sigIn.get(BINARY_CHUNK, 1)
sigSize = sigIn.getRemainingSize()
rewindVal += 1
if(initByte >= '\x43' and initByte <= '\x4b' and sigSize >= binary_to_int(initByte)):
if(sigIn.getRemainingSize() >= binary_to_int(initByte)):
asn1SeqByte = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(asn1SeqByte == ASN1_SEQUENCE):
lenByte1 = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(lenByte1 >= '\x40' and lenByte1 <= '\x48'):
asn1IntByte1 = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(asn1IntByte1 == ASN1_INTEGER):
rLen = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(rLen >= '\x1e' and rLen <= '\x22'):
sigIn.advance(binary_to_int(rLen))
rewindVal += binary_to_int(rLen)
asn1IntByte2 = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(asn1IntByte2 == ASN1_INTEGER):
sLen = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(sLen >= '\x1e' and sLen <= '\x22'):
sigIn.advance(binary_to_int(sLen))
rewindVal += binary_to_int(sLen)
lastByte = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(isValidSigHashByte(lastByte) == True):
sigValid = True
# Do final cleanup. Rewind and, if necessary, set the sig size.
sigIn.rewind(rewindVal)
if(sigValid == True):
retVal = binary_to_int(initByte) + 1
return retVal
# "Shell" function that acts as a starting point for figuring out if a chunk of
# data is an ECDSA, DER-encoded signature. This can handle chunks of data that
# may actually have multiple signatures. If we get up to 3 sigs and there's
# still more data, we'll say the data's valid and assume the user will check the
# remaining data to see if it's valid for any particular purpose.
# Input: BinaryUnpacker object pointing to a supposed ECDSA signature (it'll be
# rewound to its starting position at the end) and a boolean flag
# indicating whether or not the data may be multi-sig.
# Output: 0 if there are no valid sigs, or the number of bytes (from the
# beginning) that contain actual sigs.
def isSigShell(sigIn, isMultiSig):
'''Determine if a data chunk contains a valid ECDSA signature (X9.62 DER
encoding.'''
validSigs = True
numSigs = 0
rewindVal = 0
# For now, assume valid scripts can have only up to 3 sigs.
maxNumSigs = 1
if(isMultiSig == True):
maxNumSigs = 3
# While there's data to be read, we believe there are valid sigs, and we
# haven't encountered more than 3 signatures,
while(sigIn.getRemainingSize() != 0 and numSigs < 3 and validSigs == True):
readByte = sigIn.get(BINARY_CHUNK, 1)
rewindVal += 1
if(readByte >= '\x43' and readByte <= '\x4b'):
sigIn.rewind(1)
sigAdv = isSig(sigIn)
if(sigAdv != 0):
rewindVal += sigAdv
sigIn.advance(sigAdv)
numSigs += 1
else:
validSigs = False
else:
sigIn.rewind(1)
rewindVal -= 1
validSigs = False
# Rewind to where we started and return how many bytes are part of actual
# sigs.
sigIn.rewind(rewindVal)
return rewindVal
# Process TxOut from a Tx. It basically unpacks a TxOut and allows other functs
# to perform more specific tasks. Use it as a starting point!
# Input: The TxOut object, the TxOut block's hash, the TxOut block's position,
# the TxOut Tx's index, the TxOut index, and the file where unknown
# TxOuts will go.
# Output: None
def processTxOut(txOut, blkHash, blkPos, txIdx, txOutIdx, txOutFile):
'''Function used to start processing of a TxOut object.'''
advanceVal = 0
txOutUnpack = BinaryUnpacker(txOut)
# Simple sanity check before proceeding.
txOutLen = len(txOut)
if(txOutLen > 0):
txOutVal, txOutScrLen, txOutScr, txOutStr = getTxOut(txOutUnpack)
scrType = processTxOutScr(txOutScr, blkHash, blkPos, txIdx, \
txOutIdx, txOutFile)
# Determines the type of an incoming TxOut script. If it's not any of the five
# standard types (pay-to-pub-key, pay-to-pub-hash, pay-to-script-hash (P2SH -
# BIP16), multisig (BIP11), or OP_RETURN (BC-Core v0.9+)), it's saved to a file.
# Input: The TxOut object's script, the TxOut block's hash, the TxOut block's
# position, the TxOut Tx's index, the TxOut index, and the file where
# unknown TxOuts will go.
# Output: The TxOut type, as defined by class TxType.
def processTxOutScr(txOutScr, blkHash, blkPos, txIdx, txOutIdx, txOutFile):
'''Function processing a TxOut script.'''
# Proceed only if there's data to read.
txOutScrUnpack = BinaryUnpacker(txOutScr)
retVal = TxType.unknownTx
txOutScrSize = txOutScrUnpack.getRemainingSize()
if(txOutScrSize > 0):
# Read the initial byte and determine what TxOut type it is.
initByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
# 0x21/0x41 = Pay2PubKey
if(initByte == '\x21' or initByte == '\x41'):
# Make sure it's a valid pub key before declaring it valid.
pkLen = isPubKey(txOutScrUnpack)
if(pkLen != 0):
retVal = TxType.p2pKey
# OP_DUP = Pay2PubKeyHash
elif(initByte == OP_DUP):
# HACK ALERT: Some bright bulb has created OP_* TxOuts that have
# nothing but the OP_* code. Check the remaining size upfront.
# (Checking after every read is more robust, really. I'm just lazy
# and don't want to retrofit this chunk of code. :) )
if(txOutScrUnpack.getRemainingSize() > 0):
hashByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
if(hashByte == OP_HASH160):
hashSize = txOutScrUnpack.get(BINARY_CHUNK, 1)
hashRemSize = txOutScrUnpack.getRemainingSize()
if(hashSize == '\x14' and hashRemSize >= binary_to_int(hashSize)):
txOutScrUnpack.advance(binary_to_int(hashSize))
eqVerByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
if(eqVerByte == OP_EQUALVERIFY):
checkSigByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
if(checkSigByte == OP_CHECKSIG):
retVal = TxType.p2pHash
# OP_HASH160 = Pay2ScriptHash
elif(initByte == OP_HASH160):
hashSize = txOutScrUnpack.get(BINARY_CHUNK, 1)
hashRemSize = txOutScrUnpack.getRemainingSize()
if(hashSize == '\x14' and hashRemSize >= binary_to_int(hashSize)):
txOutScrUnpack.advance(binary_to_int(hashSize))
eqByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
if(eqByte == OP_EQUAL):
retVal = TxType.p2sh
# OP_1/2/3 = MultiSig
elif(initByte == OP_1 or initByte == OP_2 or initByte == OP_3):
validKeys = True
readByte = 0
numKeys = 0
# HACK ALERT 1: Some scripts are weird and initially appear to be
# multi-sig but really aren't. We should compensate. One particular
# way is to require at least 36 bytes (assume 1-of-1 w/ compressed
# key) beyond the initial byte.
#
# HACK ALERT 2: There are some multisig TxOuts that, for unknown
# reasons have things like compressed keys that where the first byte
# is 0x00, not 0x02 or 0x03. For now, we just mark them as unknown
# Tx and move on.
if(txOutScrUnpack.getRemainingSize() >= 36):
readByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
while((readByte == '\x21' or readByte == '\x41') and numKeys < 3
and validKeys == True):
pkLen = isPubKey(txOutScrUnpack)
if(pkLen != 0):
txOutScrUnpack.advance(pkLen)
numKeys += 1
readByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
else:
validKeys = False
else:
validKeys = False
if(validKeys == True):
if((readByte == OP_1 or readByte == OP_2 or readByte == OP_3) \
and binary_to_int(initByte) <= binary_to_int(readByte)):
cmsByte = txOutScrUnpack.get(BINARY_CHUNK, 1)
if(cmsByte == OP_CHECKMULTISIG):
retVal = TxType.multiSig
# OP_RETURN = Arbitrary data attached to a Tx.
# Official as of BC-Core 0.9. https://bitcoinfoundation.org/blog/?p=290
# and https://github.com/bitcoin/bitcoin/pull/2738 have the details of
# the initial commit, with https://github.com/bitcoin/bitcoin/pull/3737
# having the revision down to 40 bytes.
elif(initByte == OP_RETURN):
# If the 1st byte is OP_RETURN, as of BC-Core v0.9, there can be
# arbitrary data placed afterwards. This makes the TxOut immediately
# prunable, meaning it can never be used as a TxIn. (It can still be
# spent, mind you.) The final BC-Core 0.9 only accepts <=40 bytes,
# but preview versions accepted <=80. In theory, any amount of data
# is valid, but miners won't accept non-standard amounts by default.
#
# Anyway, since it's arbitrary, we don't care what's present and
# just assume it's valid.
retVal = TxType.opReturn
# Everything else is weird and should be reported.
else:
print("DEBUG: Block {0} - Tx Hash {1}: 1st BYTE (TxOut) IS " \
"TOTALLY UNKNOWN!!! BYTE={2}".format(blkPos, \
binary_to_hex(curTxHash, endOut=BIGENDIAN), \
binary_to_hex(initByte)))
# Write the script to the file if necessary.
if(retVal == TxType.unknownTx):
print("TxOut: {0}".format(binary_to_hex(txOutScr)), \
file=txOutFile)
print("Block Number: {0}".format(blkPos), file=txOutFile)
print("Block Hash: {0}".format(binary_to_hex(blkHash)), \
file=txOutFile)
print("Tx Hash: {0}", binary_to_hex(curTxHash, endOut=BIGENDIAN), \
file=txOutFile)
print("Tx Index: {0}", txIdx, file=txOutFile)
print("TxOut Index: {0}", txOutIdx, file=txOutFile)
print("---------------------------------------", file=txOutFile)
return retVal
# Process TxIn from a Tx. It basically unpacks a TxIn and allows other functs to
# perform more specific tasks. Coinbase TxIns are ignored. Use this function as
# a starting point!
# Input: The TxIn object, the TxIn block's hash, the TxIn block's position, the
# TxIn Tx's index, the TxIn index, and the file where unknown TxIns will
# go.
# Output: None
def processTxIn(txIn, blkHash, blkPos, txIdx, txInIdx, txInFile):
'''Function used to start processing of a TxIn object.'''
advanceVal = 0
txInUnpack = BinaryUnpacker(txIn)
# Simple sanity check before proceeding.
txInLen = len(txIn)
if(txInLen > 0):
txInPrevHash, txInPrevTxOutHashIdx, txInScrLen, txInScr, txInSeqNum, \
txInStr = getTxIn(txInUnpack)
# Proceed only if there's a script and if this isn't a coinbase TxIn.
# For now, assume it's coinbase if the hash of the referenced TxOut
# is a 32-byte NULL object.
if(txInScrLen != 0 and txInPrevHash != ('\x00' * 32)):
scrType = processTxInScr(txInScr, blkHash, blkPos, txIdx, \
txInIdx, txInFile)
# Process TxIn from a Tx. Determines which TxIn type it is. If it's not any of
# the four standard types (pay-to-pub-key, pay-to-pub-hash, pay-to-script-hash
# (P2SH - BIP16), multisig (BIP11)), or if it's a P2SH script, it's saved to a
# file. (The OP_RETURN script type has no use for TxIns.) P2SH TxIn scripts are
# saved because the scripts inside the TxIn, as of Apr. 2014, are never
# standard TxOut scripts.
# Input: TxIn length, TxIn, the TxIn block's hash, the TxIn block's position,
# the TxIn Tx's index, the TxIn index, and the file where unknown TxIns will go.
# Output: The TxIn type, as defined by class TxType.
def processTxInScr(txInScr, blkHash, blkPos, txIdx, txInIdx, txInFile):
'''Function processing a TxIn script.'''
# Proceed only if there's data to read.
txInScrUnpack = BinaryUnpacker(txInScr)
retVal = TxType.unknownTx
txInScrSize = txInScrUnpack.getRemainingSize()
if(txInScrSize > 0):
# Read the initial byte and determine what TxOut type it is.
initByte = txInScrUnpack.get(BINARY_CHUNK, 1)
# Except for multisig and possibly OP_RETURN, all should start with a
# sig.
if(initByte >= '\x43' and initByte <= '\x4b'):
# Make sure it's a valid pub key before declaring it valid.
# CATCH: We'll rewind because the first byte of the sig isn't
# repeated, meaning the stack uses the first byte of the sig to push
# the rest of the sig onto the stack. The rewind isn't necessary but
# I'd like to keep the sig verification whole.
txInScrUnpack.rewind(1)
sigLen = isSigShell(txInScrUnpack, False)
if(sigLen != 0):
txInScrUnpack.advance(sigLen)
if(txInScrUnpack.getRemainingSize() == 0):
retVal = TxType.p2pKey
else:
readByte = txInScrUnpack.get(BINARY_CHUNK, 1)
if(readByte == '\x21' or readByte == '\x41'):
pkLen = isPubKey(txInScrUnpack)
if(pkLen != 0):
retVal = TxType.p2pHash
# OP_0 = P2SH or MultiSig
elif(initByte == OP_0):
numBytesAdv = isSigShell(txInScrUnpack, True)
# Proceed only if there was at least 1 valid sig.
if(numBytesAdv != 0):
txInScrUnpack.advance(numBytesAdv)
numBytesRem = txInScrUnpack.getRemainingSize()
if(numBytesRem != 0):
# Confirm that the remaining bytes are a standard script
# before marking this as a P2SH script. (There are P2SH
# scripts that aren't standard, so we'll mark the entire
# script as unknown and save it.) In a fully robust system,
# we'd Hash160 and compare against the Hash160 in the
# ref'd TxOut to confirm that this is valid.
# NB: In the real world, it looks like all scripts don't
# match the normal TxOut types! Just mark this as P2SH and
# write it out anyway.
# p2shScript = txInScrUnpack.get(BINARY_CHUNK, numBytesRem)
# if(processTxOutScr(p2shScript, blkHash, blkPos, txIdx, \
# txOutIdx, txOutFile) != TxType.unknownTx):
# retVal = TxType.p2sh
# print("HEY, WE GOT A GOOD SCRIPT! {0}".format(binary_to_hex(p2shScript)))
# else:
# print("OH NO, WE HAVE A BAD SCRIPT! {0}".format(binary_to_hex(p2shScript)))
retVal = TxType.p2sh
else:
# We have multi-sig.
retVal = TxType.multiSig
# We have an unknown script type. We'll report it. There's a chance it
# refers to an OP_RETURN TxOut script but we'll ignore that possibility
# for now in order to keep things simple.
else:
print("DEBUG: Block {0}: 1st BYTE (TxIn) IS TOTALLY UNKNOWN!!! " \
"BYTE={1}".format(blkPos, binary_to_hex(initByte)))
# If a script is unknown or is P2SH, write it out here.
# NB: After running this code several times, it appears that the multisig
# code uses keys with invalid first bytes. I'm not sure what's going on. The
# scripts seem valid otherwise.
# if(retVal == TxType.unknownTx or retVal == TxType.p2sh):
# if(retVal == TxType.p2sh):
# print("P2SH script")
# else:
# print("Unknown TxIn script")
if retVal == TxType.unknownTx:
print("TxIn: {0}".format(binary_to_hex(txInScr)), \
file=txInFile)
print("Block Number: {0}".format(blkPos), file=txInFile)
print("Block Hash: {0}".format(binary_to_hex(blkHash)), \
file=txInFile)
print("Tx Hash: {0}", binary_to_hex(curTxHash, endOut=BIGENDIAN), \
file=txInFile)
print("Tx Index: {0}", txIdx, file=txInFile)
print("TxIn Index: {0}", txInIdx, file=txInFile)
print("---------------------------------------", file=txInFile)
return retVal
if __name__ == '__main__':
# Variables
# curBlkFile = 138 ################################################################## ONLY IN SPECIAL CIRCUMSTANCES
curBlkFile = 0
numBlks = 0
fileName = getBCBlkFilename(curBlkFile)
# Open a file which will receive the TxOut materials.
txInFilename = "prob3TxIn.txt"
txInFile = open(txInFilename, "wt")
txOutFilename = "prob3TxOut.txt"
txOutFile = open(txOutFilename, "wt")
# Write the starts of the TxOut/TxIn files.
print("Unknown TxOuts", file=txOutFile)
print("---------------------------------------", file=txOutFile)
print("Unknown/P2SH TxIns", file=txInFile)
print("---------------------------------------", file=txInFile)
# Iterate through each block by going through each file. Note that the code
# assumes blocks are in order. In the future, this may not be case.
while(os.path.isfile(fileName) is True):
# if(os.path.isfile(fileName) == True): # SPECIAL DEBUG: ONLY 1 FILE PARSED
print("DEBUG: File blk%05d.dat exists." % curBlkFile)
# While reading the files, read data only as needed, and not all at
# once. More I/O but it keeps memory usage down.
with open(fileName, "rb") as rawData:
try:
# Read the magic bytes (4 bytes) & block size (4 bytes). Proceed
# only if there's data to read.
readData = rawData.read(8)
while(readData != ""):
# If the magic bytes are legit, proceed.
readUnpack = BinaryUnpacker(readData)
read_magic = readUnpack.get(BINARY_CHUNK, 4)
if(read_magic == MAGIC_BYTES):
# Get the block header data.
blockLen = readUnpack.get(UINT32)
blockVer, prevBlkHash, merkRoot, timestamp, bits, \
nonce, blkHdrHash = getBlkHdrValues(rawData)
# Get the transaction data and process it.
rawTxData = rawData.read(blockLen - 80)
txUnpack = BinaryUnpacker(rawTxData)
txVarInt = txUnpack.get(VAR_INT)
txIdx = 0
# Process all Tx objects.
while(txVarInt > 0):
txVer, numTxIn, txInList, numTxOut, txOutList, \
txLockTime, txHash = getTxObj(txUnpack)
curTxHash = txHash # Global hack 'cause I'm lazy.
# Process every TxOut & TxIn in a Tx.
txOutIdx = 0
txInIdx = 0
for txOutObj in txOutList:
processTxOut(txOutObj, blkHdrHash, numBlks, \
txIdx, txOutIdx, txOutFile)
txOutIdx += 1
for txInObj in txInList:
processTxIn(txInObj, blkHdrHash, numBlks, \
txIdx, txInIdx, txInFile)
txInIdx += 1
txIdx += 1
txVarInt -= 1
# Increment the # of blocks we've processed.
numBlks += 1
# If magic bytes aren't magic, assume we've hit the
# end. In theory, Bitcoin-Qt pre-buffers w/ 0s, but
# in practice, the pre-buffering seems to be anything.
else:
break
# Before looping back, try reading data again.
readData = rawData.read(8)
# Always close a file once it's done.
finally:
rawData.close()
# Get ready to read the next file.
curBlkFile += 1
fileName = getBCBlkFilename(curBlkFile)
txInFile.close()
txOutFile.close()
| droark/Misc-Blockchain-Parse-Tools | Find-Unusual-Scripts.py | Python | mit | 33,452 |
'''
Online link spider test
'''
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import next
import unittest
from unittest import TestCase
import time
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import scrapy
import redis
from redis.exceptions import ConnectionError
import json
import threading, time
from crawling.spiders.link_spider import LinkSpider
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from kafka import KafkaConsumer
class CustomSpider(LinkSpider):
'''
Overridden link spider for testing
'''
name = "test-spider"
class TestLinkSpider(TestCase):
example_feed = "{\"allowed_domains\":null,\"allow_regex\":null,\""\
"crawlid\":\"abc12345\",\"url\":\"http://dmoztools.net/\",\"expires\":0,\""\
"ts\":1461549923.7956631184,\"priority\":1,\"deny_regex\":null,\""\
"cookie\":null,\"attrs\":null,\"appid\":\"test\",\"spiderid\":\""\
"test-link\",\"useragent\":null,\"deny_extensions\":null,\"maxdepth\":0}"
def setUp(self):
self.settings = get_project_settings()
self.settings.set('KAFKA_TOPIC_PREFIX', "demo_test")
# set up redis
self.redis_conn = redis.Redis(host=self.settings['REDIS_HOST'],
port=self.settings['REDIS_PORT'],
db=self.settings['REDIS_DB'])
try:
self.redis_conn.info()
except ConnectionError:
print("Could not connect to Redis")
# plugin is essential to functionality
sys.exit(1)
# clear out older test keys if any
keys = self.redis_conn.keys("test-spider:*")
for key in keys:
self.redis_conn.delete(key)
# set up kafka to consumer potential result
self.consumer = KafkaConsumer(
"demo_test.crawled_firehose",
bootstrap_servers=self.settings['KAFKA_HOSTS'],
group_id="demo-id",
auto_commit_interval_ms=10,
consumer_timeout_ms=5000,
auto_offset_reset='earliest'
)
time.sleep(1)
def test_crawler_process(self):
runner = CrawlerRunner(self.settings)
d = runner.crawl(CustomSpider)
d.addBoth(lambda _: reactor.stop())
# add crawl to redis
key = "test-spider:dmoztools.net:queue"
self.redis_conn.zadd(key, self.example_feed, -99)
# run the spider, give 20 seconds to see the url, crawl it,
# and send to kafka. Then we kill the reactor
def thread_func():
time.sleep(20)
reactor.stop()
thread = threading.Thread(target=thread_func)
thread.start()
reactor.run()
message_count = 0
m = next(self.consumer)
if m is None:
pass
else:
the_dict = json.loads(m.value)
if the_dict is not None and the_dict['appid'] == 'test' \
and the_dict['crawlid'] == 'abc12345':
message_count += 1
self.assertEquals(message_count, 1)
def tearDown(self):
keys = self.redis_conn.keys('stats:crawler:*:test-spider:*')
keys = keys + self.redis_conn.keys('test-spider:*')
for key in keys:
self.redis_conn.delete(key)
# if for some reason the tests fail, we end up falling behind on
# the consumer
for m in self.consumer:
pass
self.consumer.close()
if __name__ == '__main__':
unittest.main()
| istresearch/scrapy-cluster | crawler/tests/online.py | Python | mit | 3,708 |
# oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
# Copyright (c) 2016, Gluu
#
# Author: Yuriy Movchan
#
from org.gluu.model.custom.script.type.user import CacheRefreshType
from org.gluu.util import StringHelper, ArrayHelper
from java.util import Arrays, ArrayList
from org.gluu.oxtrust.model import GluuCustomAttribute
from org.gluu.model.custom.script.model.bind import BindCredentials
import java
class CacheRefresh(CacheRefreshType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, configurationAttributes):
print "Cache refresh. Initialization"
print "Cache refresh. Initialized successfully"
return True
def destroy(self, configurationAttributes):
print "Cache refresh. Destroy"
print "Cache refresh. Destroyed successfully"
return True
# Get bind credentials required to access source server
# configId is the source server
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
# return None (use password from configuration) or org.gluu.model.custom.script.model.bind.BindCredentials
def getBindCredentials(self, configId, configurationAttributes):
print "Cache refresh. GetBindCredentials method"
return None
# Update user entry before persist it
# user is org.gluu.oxtrust.model.GluuCustomPerson
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
def updateUser(self, user, configurationAttributes):
print "Cache refresh. UpdateUser method"
attributes = user.getCustomAttributes()
# Add new attribute preferredLanguage
attrPrefferedLanguage = GluuCustomAttribute("preferredLanguage", "en-us")
attributes.add(attrPrefferedLanguage)
# Add new attribute userPassword
attrUserPassword = GluuCustomAttribute("userPassword", "test")
attributes.add(attrUserPassword)
# Update givenName attribute
for attribute in attributes:
attrName = attribute.getName()
if (("givenname" == StringHelper.toLowerCase(attrName)) and StringHelper.isNotEmpty(attribute.getValue())):
attribute.setValue(StringHelper.removeMultipleSpaces(attribute.getValue()) + " (updated)")
return True
def getApiVersion(self):
return 2
| GluuFederation/oxExternal | cache_refresh/sample/SampleScript.py | Python | mit | 2,443 |
import _plotly_utils.basevalidators
class ShowtickprefixValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="showtickprefix", parent_name="layout.yaxis", **kwargs
):
super(ShowtickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "ticks"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["all", "first", "last", "none"]),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/layout/yaxis/_showtickprefix.py | Python | mit | 562 |
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class ShopifyAccount(ProviderAccount):
pass
class ShopifyProvider(OAuth2Provider):
id = 'shopify'
name = 'Shopify'
account_class = ShopifyAccount
def get_auth_params(self, request, action):
ret = super(ShopifyProvider, self).get_auth_params(request, action)
shop = request.GET.get('shop', None)
if shop:
ret.update({'shop': shop})
return ret
def get_default_scope(self):
return ['read_orders', 'read_products']
def extract_uid(self, data):
return str(data['shop']['id'])
def extract_common_fields(self, data):
# See: https://docs.shopify.com/api/shop
# User is only available with Shopify Plus, email is the only
# common field
return dict(email=data['shop']['email'])
providers.registry.register(ShopifyProvider)
| wli/django-allauth | allauth/socialaccount/providers/shopify/provider.py | Python | mit | 1,032 |
import math
import copy
def print_matrix(matrix):
"""
This function prettyprints a matrix
:param matrix: The matrix to prettyprint
"""
for i in range(len(matrix)):
print(matrix[i])
def transpose(matrix):
"""
This function transposes a matrix
:param matrix: The matrix to transpose
:return: The transposed matrix
"""
num_cols = len(matrix[0])
trans = []
for i in range(num_cols):
temp = []
for row in matrix:
temp.append(row[i])
trans.append(temp)
return trans
def minor_matrix(matrix, row_index, col_index):
"""
This function calculates the minor of a matrix for a given row and column
index. The matrix should be a square matrix, and the row and column
should be positive and smaller than the width and height of the matrix.
:param matrix: The matrix to calculate the minor
:param row_index: The row index of the minor to calculate
:param col_index: The column index of the minor to calculate
:return: The minor for the given row and column
"""
minor = matrix
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_cols != num_rows:
raise ValueError("You should pass a square matrix")
if row_index > num_rows or col_index > num_cols or row_index < 1 or col_index < 1:
raise ValueError("Invalid row or column")
# remove the specified row
minor.pop(row_index - 1)
# remove the specified column
for row in minor:
row.pop(col_index - 1)
return minor
def determinant(matrix):
"""
This function calculates the determinant of a square matrix.
:param m_copy: The matrix to find the determinant
:return: The determinant of the matrix
"""
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_cols != num_rows:
raise ValueError("You should pass a square matrix")
dim = num_cols
det = 0
if dim == 1:
return matrix[0][0]
if dim == 2:
det = matrix[0][0] * matrix[1][1] - matrix[0][1] * matrix[1][0]
return det
for j in range(dim):
m_copy = copy.deepcopy(matrix)
minor = minor_matrix(m_copy, 1, j+1)
d = determinant(minor)
det += matrix[0][j] * d * math.pow(-1, j)
return det
def inverse(matrix):
"""
This function inverts a square matrix. If the matrix is not square,
it returns nothing
:param matrix: The matrix to invert
:return: The inverse of the matrix passed as parameter
"""
num_rows = len(matrix)
num_cols = len(matrix[0])
if num_rows != num_cols:
raise ValueError("You should pass a square matrix")
dim = num_rows
denom = determinant(matrix)
if denom == 0:
raise ValueError("The determinant is 0. Can't invert matrix")
cofactors = [] # the matrix of cofactors, transposed
for i in range(dim):
cofactor_row = []
for j in range(dim):
m_copy = copy.deepcopy(matrix)
minor = minor_matrix(m_copy, j+1, i+1)
minor_det = determinant(minor) * math.pow(-1, i + j)
cofactor_row.append(minor_det)
cofactors.append(cofactor_row)
# multiply every cofactor with 1/denom
scalar_multiply(cofactors, 1 / denom)
return cofactors
def scalar_multiply(matrix, const):
"""
This function makes the scalar multiplication between a matrix and a number.
:param matrix: The matrix to multiply
:param const: The constant number which will multiply the matrix
:return: The result of the multiplication
"""
for i in range(len(matrix)):
for j in range(len(matrix[0])):
matrix[i][j] *= const
return matrix
def multiply(matrix1, matrix2):
"""
This function multiplies two matrices. In order to multiply, it makes sure
the width of matrix1 is the same as the height of matrix2
:param matrix1: Left matrix
:param matrix2: Right matrix
:return: The product matrix of the multiplication
"""
width1 = len(matrix1[0])
height2 = len(matrix2)
if width1 != height2:
raise ValueError("Can't multiply these matrices")
length = len(matrix1)
width = len(matrix2[0])
product_matrix = [] # product_matrix = matrix_A * matrix_B
for i in range(length):
product_row = [] # one row of product_matrix
for j in range(width):
val = 0
for a in range(height2):
val += matrix1[i][a] * matrix2[a][j]
product_row.append(val)
product_matrix.append(product_row)
return product_matrix
def linear_solver(coef, const):
"""
This function solves a system of linear equations of the standard form Ax = B
:param coef: The matrix of coefficients, A
:param const: The matrix of constant terms, B
:returns: A list of the solutions
"""
if len(coef) == 2:
y = (const[0][0] * coef[1][0] - coef[0][0] * const[1][0]) / (-coef[0][0] * coef[1][1] + coef[1][0] * coef[0][1])
x = (const[1][0] - coef[1][1] * y) / coef[1][0]
return [x, y]
return multiply(inverse(coef), const)
| aziflaj/numberoid | src/matrix/pymatrix.py | Python | mit | 5,173 |
from django.contrib import admin
from .models import User
# Register your models here.
admin.site.register(User) | aaronsnig501/foreign-guides | apps/accounts/admin.py | Python | mit | 113 |
# This is a separate module for parser functions to be added.
# This is being created as static, so only one parser exists for the whole game.
from nota import Nota
from timingpoint import TimingPoint
from tools import *
import random
import math
def get_Name (osufile):
Splitlines = osufile.split('\n')
for Line in Splitlines:
if len(Line) > 0:
if Line.find('Title:', 0, len(Line)) != -1:
title = Line.split(':', 1)
return title[1].replace("\r", "")
def get_PreviewTime (osufile):
Splitlines = osufile.split('\n')
for Line in Splitlines:
if len(Line) > 0:
if Line.find('PreviewTime:', 0, len(Line)) != -1:
time = Line.split(':', 1)
return int(time[1])
def get_Artist (osufile):
Splitlines = osufile.split('\n')
for Line in Splitlines:
if len(Line) > 0:
if Line.find('Artist:', 0, len(Line)) != -1:
artist = Line.split(':', 1)
return artist[1].replace("\r", "")
def get_TimingPoints(osufile):
Lines = osufile.split('\n')
TimingPointString = []
TimingPoints = []
save = False;
for line in Lines:
if len(line) > 2:
if save:
TimingPointString.append(line)
else:
save = False
if line.find("[TimingPoints]") != -1:
save = True
for point in TimingPointString:
# self, offset, mpb, meter, sampleType, sampleSet, volume, inherited, kiai
params = point.split(',')
#print params
offset = float(params[0])
mpb = float(params[1])
meter = int(params[2])
sampleType = int(params[3])
sampleSet = int(params[4])
volume = int(params[5])
inherited = int(params[6])
kiai = int(params[7])
newPoint = TimingPoint(offset, mpb, meter, sampleType, sampleSet, volume, inherited, kiai)
TimingPoints.append(newPoint)
return TimingPoints
def get_NoteList (osufile, sprites, screen_width, screen_height, bpm):
NoteList = []
SplitLines = []
#This function returns a list of notes with all their properties to the user
#Make sure you have a list to receive it
SplitLines = osufile.split('[HitObjects]\r\n', 1)
SplitObjects = SplitLines[1].split('\n')
for Line in SplitObjects:
if len(Line) > 0:
params = Line.split(',')
posx = int(params[0])
posy = int(params[1])
time = int(params[2])
ntype = int(params[3])
IgnoreFirstLine = True
if ntype == 1 or ntype == 5:
nota = Nota(posx, posy, time, sprites[random.randint(0,3)], screen_width, screen_height, 1)
NoteList.append(nota)
elif ntype == 2 or ntype == 6:
## THE GOD LINE
## this.sliderTime = game.getBeatLength() * (hitObject.getPixelLength() / sliderMultiplier) / 100f;
curva = params[5]
repeat = int(params[6])
pixellength = float(params[7])
sliderEndTime = (bpm * (pixellength/1.4) / 100.0)
curveParams = curva.split('|')[1:]
xCoords = []
for i in curveParams:
xCoords.append(int(i.split(':')[0]))
#notai = Nota(posx, posy, time, spritinhotexture, screen_width, screen_height)
#NoteList.append(notai)
numSteps = (int)(math.ceil(sliderEndTime * 0.01))
#print(curveParams)
SpriteValue = random.randint(0,3)
for k in range(numSteps+1):
t = float(k) / (numSteps)
mnx = int(B(xCoords, 0, len(xCoords) - 1, t))
#print("time: " + str(time))
mny = time + (float(k)/float(numSteps)) * float(sliderEndTime)
#print("mnx: " + str(mnx))
#print("t: " + str(t))
if t == 0 or t==1:
notam = Nota(mnx, mny, mny, sprites[SpriteValue], screen_width, screen_height, 1)
else:
notam = Nota((random.randint(-11, 11)+mnx), mny, mny, sprites[4], screen_width, screen_height, 2)
NoteList.append(notam)
elif ntype == 8 or ntype == 12:
endTime = int(params[5])
for i in range(20):
notasp = Nota(random.randint(0, 512), posy, random.randint(time, endTime), sprites[5], screen_width, screen_height, 3)
NoteList.append(notasp)
return NoteList
def get_BreakPeriods(osufile):
Lines = osufile.split('\n')
BreakPString = []
BreakPoints = []
save = False;
for line in Lines:
if line.find("//") == -1:
if save:
BreakPString.append(line)
else:
save = False
if line.find("//Break Periods") != -1:
save = True
for splitted in BreakPString:
params = splitted.split(",")
StartBreakTime = int(params[1])
EndBreakTime = int(params[2])
BreakPoints.append((StartBreakTime, EndBreakTime))
#print(BreakPoints)
return BreakPoints
| danielpontello/beatfever-legacy | FileParser.py | Python | mit | 4,395 |
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="size", parent_name="histogram2d.xbins", **kwargs):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/histogram2d/xbins/_size.py | Python | mit | 394 |
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import TwilioTaskRouterClient
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
workspace_sid = "WSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
client = TwilioTaskRouterClient(account_sid, auth_token)
workspace = client.workspaces.get(workspace_sid)
statistics = workspace.statistics.get()
print(statistics.cumulative["avg_task_acceptance_time"])
print(statistics.cumulative["tasks_created"])
print(statistics.realtime["tasks_by_status"]["pending"])
print(statistics.realtime["tasks_by_status"]["assigned"])
| teoreteetik/api-snippets | rest/taskrouter/statistics/workspace/example-1/example-1.5.x.py | Python | mit | 685 |
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2011, Luis Pedro Coelho <[email protected]>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# License: MIT. See COPYING.MIT file in the milk distribution
import numpy as np
__all__ = [
'multi_view_learner',
]
class multi_view_model(object):
def __init__(self, models):
self.models = models
def apply(self, features):
if len(features) != len(self.models):
raise ValueError('milk.supervised.two_view: Nr of features does not match training data (got %s, expected %s)' % (len(features) ,len(self.models)))
Ps = np.array([model.apply(f) for model,f in zip(self.models, features)])
if np.any(Ps <= 0.): return False
if np.any(Ps >= 1.): return True
# This is binary only:
# if \prod Pi > \prod (1-Pi) return 1
# is equivalent to
# if \prod Pi/(1-Pi) > 1. return 1
# if \sum \log( Pi/(1-Pi) ) > 0. return 1
return np.sum( np.log(Ps/(1-Ps)) ) > 0
class multi_view_learner(object):
'''
Multi View Learner
This learner learns different classifiers on multiple sets of features and
combines them for classification.
'''
def __init__(self, bases):
self.bases = bases
def train(self, features, labels, normalisedlabels=False):
features = zip(*features)
if len(features) != len(self.bases):
raise ValueError('milk.supervised.multi_view_learner: ' +
'Nr of features does not match classifiser construction (got %s, expected %s)'
% (len(features) ,len(self.bases)))
models = []
for basis,f in zip(self.bases, features):
try:
f = np.array(f)
except:
f = np.array(f, dtype=object)
models.append(basis.train(f, labels))
return multi_view_model(models)
multi_view_classifier = multi_view_learner
| arnaudsj/milk | milk/supervised/multi_view.py | Python | mit | 1,957 |
"""
The MIT License (MIT)
Copyright (c) 2015 Ricardo Yorky
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, with_statement
__all__ = ['MongoDbService']
from tornado import gen
import motor
import datetime
import json
import bson.objectid
def json_default(obj):
"""Default JSON serializer."""
if isinstance(obj, bson.objectid.ObjectId):
return str(obj)
elif isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M")
else:
raise TypeError("%s is not JSON-serializable" % repr(obj))
def json_loads(data):
return json.loads(data)
def json_dumps(instance):
return json.dumps(instance, default=json_default)
class MongoDbService(object):
def __init__(self, uri_connection, database):
client = motor.MotorClient(uri_connection)
self.db = client[database]
@gen.coroutine
def insert(self, collection, document):
result = yield self.db[collection].insert(document)
json_data = json_dumps(result)
raise gen.Return(json_data)
@gen.coroutine
def update(self, collection, _id, document):
result = yield self.db[collection].update({'_id': _id}, {'$set': document})
json_data = json_dumps(result)
raise gen.Return(json_data)
@gen.coroutine
def find(self, collection, query=None, skip=0, limit=1000):
if query:
cursor = self.db[collection]\
.find(query)\
.limit(limit)\
.skip(skip)
else:
cursor = self.db[collection]\
.find()\
.limit(limit)\
.skip(skip)
result = yield cursor.to_list(length=limit)
raise gen.Return(result)
| ricardoyorky/emailqueue | emailqueue/mongodb.py | Python | mit | 2,778 |
import os
import copy
import scipy.interpolate as spi
import math
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
data_root = 'toneclassifier'
train_data_path = "%s/train" % data_root
val_data_path = "%s/test" % data_root
test_data_path = "%s/test_new" % data_root
def SetPath(root):
global data_root, train_data_path, val_data_path, test_data_path
data_root = root
train_data_path = "%s/train" % data_root
val_data_path = "%s/test" % data_root
test_data_path = "%s/test_new" % data_root
labels = {
'one': 0,
'two': 1,
'three': 2,
'four': 3
}
def LoadData(mode='train'):
data_path = train_data_path
if mode == 'val':
data_path = val_data_path
elif mode == 'test':
data_path = test_data_path
Engy = []
F0 = []
y = []
for labelName, label in labels.iteritems():
data_subset_path = "%s/%s" % (data_path, labelName)
data_names = set()
for filename in os.listdir(data_subset_path):
if filename[0] == ".":
continue
if ".engy" in filename:
data_names.add(filename[0:-5])
elif ".f0" in filename:
data_names.add(filename[0:-3])
for data_name in data_names:
engy = map(float, open("%s/%s.engy" % (data_subset_path, data_name)).readlines())
f0 = map(float, open("%s/%s.f0" % (data_subset_path, data_name)).readlines())
Engy.append(engy)
F0.append(f0)
y.append(label)
return Engy, F0, y
def IgnoreLowEnergyFrequence(Engy, F0):
data_num = len(Engy)
if data_num != len(F0):
raise ValueError("the number of input data mismatched. len(Engy)==%d and len(F0)==%d" % (len(Engy), len(F0)))
resEngy = []
resF0 = []
for i in xrange(data_num):
engy = copy.copy(Engy[i])
f0 = copy.copy(F0[i])
data_len = len(engy)
if data_len != len(f0):
raise ValueError("the length of %d-th data mismatched. len(engy)==%d and len(f0)==%d" % (i, len(engy), len(f0)))
zero_freq_engy_sum = 0.0
zero_freq_count = 0.0
for j in xrange(data_len):
if f0[j] < 1e-4:
zero_freq_count += 1
zero_freq_engy_sum += math.sqrt(engy[j])
mean_engy = zero_freq_engy_sum / zero_freq_count
for j in xrange(data_len):
if math.sqrt(engy[j]) <= max(mean_engy, 1.0):
f0[j] = 0.0
resEngy.append(engy)
resF0.append(f0)
return resEngy, resF0
def TrimData(Engy, F0):
data_num = len(Engy)
if data_num != len(F0):
raise ValueError("the number of input data mismatched. len(Engy)==%d and len(F0)==%d" % (len(Engy), len(F0)))
resEngy = []
resF0 = []
for i in xrange(data_num):
engy = copy.copy(Engy[i])
f0 = copy.copy(F0[i])
data_len = len(engy)
if data_len != len(f0):
raise ValueError("the length of %d-th data mismatched. len(engy)==%d and len(f0)==%d" % (i, len(engy), len(f0)))
start = None
end = None
for i in xrange(len(f0)):
if f0[i] > 1e-5:
start = i
break
for i in xrange(len(f0) - 1, -1, -1):
if f0[i] > 1e-5:
end = i + 1
break
resEngy.append(copy.copy(engy[start:end]))
resF0.append(copy.copy(f0[start:end]))
return resEngy, resF0
def TransformToMelFrequencyScale(F0):
data_num = len(F0)
resF0 = []
for i in xrange(data_num):
f0 = copy.copy(F0[i])
data_len = len(f0)
for j in xrange(data_len):
f0[j] = 1127 * math.log(1 + f0[j] / 700)
resF0.append(f0)
return resF0
def DivSingleDataStd(F0):
data_num = len(F0)
resF0 = []
for i in xrange(data_num):
f0 = copy.copy(F0[i])
data_len = len(f0)
f0arr = np.asarray(f0)
std = f0arr.std()
f0arr = f0arr / std
for j in xrange(data_len):
f0[j] = f0arr[j]
resF0.append(f0)
return resF0
def DivDataStd(F0):
data_num = len(F0)
resF0 = []
tmp = []
for i in xrange(data_num):
for j in xrange(len(F0[i])):
tmp.append(F0[i][j])
F0arr = np.asarray(tmp)
std = F0arr.std()
for i in xrange(data_num):
f0 = copy.copy(F0[i])
data_len = len(f0)
for j in xrange(data_len):
f0[j] = f0[j] / std
resF0.append(f0)
return resF0
def SmoothRawF0(F0):
C1 = 15
data_num = len(F0)
resF0 = []
for i in xrange(data_num):
f0 = copy.copy(F0[i])
data_len = len(f0)
for k in xrange(data_len - 1, -1, -1):
for j in xrange(k, data_len):
if abs(f0[j] - f0[j - 1]) < C1:
continue
if abs(f0[j] / 2 - f0[j - 1]) < C1:
f0[j] /= 2
elif abs(2 * f0[j] - f0[j - 1]) < C1:
f0[j] *= 2
resF0.append(f0)
return resF0
def SmoothF0(F0):
C1 = 0.16
C2 = 0.4
data_num = len(F0)
resF0 = []
for i in xrange(data_num):
f0 = copy.copy(F0[i])
data_len = len(f0)
for j in xrange(1, data_len):
if abs(f0[j] - f0[j - 1]) < C1:
continue
if abs(f0[j] / 2 - f0[j - 1]) < C1:
f0[j] /= 2
elif abs(2 * f0[j] - f0[j - 1]) < C1:
f0[j] *= 2
ff0 = copy.copy([f0[0]] + f0 + [f0[-1]])
fff0 = copy.copy(ff0)
data_len = len(ff0)
f0_2 = (ff0[0], ff0[0])
for j in xrange(1, data_len - 1):
if abs(ff0[j] - ff0[j - 1]) > C1 and abs(ff0[j + 1] - ff0[j - 1]) > C2:
ff0[j] = 2 * f0_2[1] - f0_2[0]
elif abs(ff0[j] - ff0[j - 1]) > C1 and abs(ff0[j + 1] - ff0[j - 1]) <= C2:
ff0[j] = (ff0[j - 1] + ff0[j + 1]) / 2
f0_2 = (f0_2[1], ff0[j])
res_f0 = None
if abs(ff0[-1] - fff0[-1]) <= C1:
res_f0 = ff0
else:
f0_2 = (fff0[-1], fff0[-1])
for j in xrange(data_len - 2, 0, -1):
if abs(fff0[j] - fff0[j + 1]) > C1 and abs(fff0[j - 1] - fff0[j + 1]) > C2:
fff0[j] = 2 * f0_2[1] - f0_2[0]
elif abs(fff0[j] - fff0[j + 1]) > C1 and abs(fff0[j - 1] - fff0[j + 1]) <= C2:
fff0[j] = (fff0[j - 1] + fff0[j + 1]) / 2
f0_2 = (f0_2[1], fff0[j])
s = 0
for j in xrange(data_len - 2, 0, -1):
if abs(fff0[j] - ff0[j]) < C1:
s = j
break
res_f0 = ff0[: s + 1] + fff0[s + 1: ]
res_f0 = [res_f0[0]] + res_f0 + [res_f0[-1]]
data_len = len(res_f0)
for j in xrange(2, data_len - 2):
res_f0[j] = (res_f0[j - 2] + res_f0[j - 1] + res_f0[j] + res_f0[j + 1] + res_f0[j + 2]) / 5.0
resF0.append(res_f0[2:-2])
return resF0
def NormalizeDataLengthWithInterpolation(Engy, F0, result_len=200):
data_num = len(Engy)
if data_num != len(F0):
raise ValueError("the number of input data mismatched. len(Engy)==%d and len(F0)==%d" % (len(Engy), len(F0)))
resEngy = []
resF0 = []
for i in xrange(data_num):
engy = copy.copy(Engy[i])
f0 = copy.copy(F0[i])
data_len = len(engy)
if data_len != len(f0):
raise ValueError(
"the length of %d-th data mismatched. len(engy)==%d and len(f0)==%d" % (i, len(engy), len(f0)))
k = float(result_len - 1) / float(data_len - 1)
x = [i * k for i in xrange(data_len)]
newX = [i * 1.0 for i in xrange(result_len)]
newX[-1] = x[-1]
new_engy = spi.interp1d(x, engy, kind='cubic')(newX)
new_f0 = spi.interp1d(x, f0, kind='cubic')(newX)
resEngy.append(new_engy)
resF0.append(new_f0)
return resEngy, resF0
def SingleDataDivideMax(data):
mean = np.asarray(data).max()
for i in xrange(len(data)):
data[i] /= mean
return data
def DataSetDivideMax(Data):
for i in xrange(len(Data)):
Data[i] = SingleDataDivideMax(Data[i])
return Data
def SingleDataMinusMean(data):
mean = np.asarray(data).mean()
for i in xrange(len(data)):
data[i] -= mean
return data
def DataSetMinusMean(Data):
for i in xrange(len(Data)):
Data[i] = SingleDataMinusMean(Data[i])
return Data
def SaveData(Engy, F0, y, mode='train'):
save_engy_name = 'train_engys'
save_f0_name = 'train_f0s'
save_y_name = 'train_labels'
if mode == 'val':
save_engy_name = 'val_engys'
save_f0_name = 'val_f0s'
save_y_name = 'val_labels'
elif mode == 'test':
save_engy_name = 'test_engys'
save_f0_name = 'test_f0s'
save_y_name = 'test_labels'
engy_file = open(save_engy_name, "w")
f0_file = open(save_f0_name, "w")
y_file = open(save_y_name, "w")
data_num = len(Engy)
if data_num != len(F0) or data_num != len(y):
raise ValueError("the number of data mismatched, Engy:%d, F0:%d, y:%d" % (len(Engy), len(F0), len(y)))
for i in xrange(data_num):
engy_file.write("%s\n" % (' '.join(map(lambda x: "%.5f" % x, Engy[i]))))
f0_file.write("%s\n" % (' '.join(map(lambda x: "%.5f" % x, F0[i]))))
y_file.write("%d\n"% y[i])
engy_file.close()
f0_file.close()
y_file.close()
def PlotAndSaveF0(plot_prefix='train', F0=None, y=None):
max_len = max(map(len, F0))
for label in xrange(4):
for i in xrange(len(F0)):
if (y[i] != label):
continue
coff = float(max_len - 1) / (len(F0[i]) - 1)
x = np.arange(0, len(F0[i]), 1)
x = coff * x
fx = np.asarray(F0[i])
plt.plot(x, fx)
plt.savefig('%s-plt_%d' % (plot_prefix, label))
plt.clf()
def Amplify(Data, times):
for i in xrange(len(Data)):
for j in xrange(len(Data[i])):
Data[i][j] *= times
return Data
def unison_shuffled_copies(a, b):
assert len(a) == len(b)
p = np.random.permutation(len(a))
return a[p], b[p]
def FitMissPoint(F0):
data_num = len(F0)
resF0 = []
for i in xrange(data_num):
f0 = F0[i]
data_len = len(f0)
f0arr = np.asarray(f0)
mean = f0arr.mean()
x = []
y = []
for j in xrange(data_len):
if f0[j] > 0.1 * mean:
x.append(j)
y.append(f0[j])
z = np.poly1d(np.polyfit(x, y, 2))
for j in xrange(data_len):
if f0[j] <= 0.1 * mean:
f0[j] = z(j)
resF0.append(f0)
return resF0
def AddWhiteNoise(F0):
data_num = len(F0)
for i in xrange(data_num):
data_len = len(F0[i])
for j in xrange(data_len):
F0[i][j] += np.random.normal(0, 1e-4)
return F0
def FitSingleData(f0):
data_len = len(f0)
flag = []
x = []
y = []
for i in xrange(data_len):
if f0[i] > 10:
x.append(i)
y.append(f0[i])
flag.append(True)
else:
flag.append(False)
z = np.polyfit(x, y, 2)
a, b, c = z
z = np.poly1d(z)
for i in xrange(data_len):
if f0[i] <= 1.:
f0[i] = z(i)
# Solve 2a * x + b == 0
g = -b / (2 * a)
g = int(g)
if g > 1 and g < data_len - 1:
part_a = f0[: g]
flag_a = flag[: g]
part_b = f0[g: ]
flag_b = flag[g: ]
x = []
y = []
for i in xrange(len(part_a)):
x.append(i)
y.append(f0[i])
z = np.poly1d(np.polyfit(x, y, 1))
for i in xrange(len(part_a)):
if not flag_a[i]:
part_a[i] = z(i)
x = []
y = []
for i in xrange(len(part_b)):
x.append(i)
y.append(f0[i])
z = np.poly1d(np.polyfit(x, y, 1))
for i in xrange(len(part_b)):
if not flag_b[i]:
part_b[i] = z(i)
f0 = part_a + part_b
else:
x = []
y = []
for i in xrange(data_len):
x.append(i)
y.append(f0[i])
z = np.poly1d(np.polyfit(x, y, 1))
for i in xrange(data_len):
if not flag[i]:
f0[i] = z(i)
return f0
def FitData(F0):
data_num = len(F0)
for i in xrange(data_num):
F0[i] = FitSingleData(F0[i])
return F0 | BreakVoid/DL_Project | data_utils.py | Python | mit | 12,608 |
Subsets and Splits