code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use mailgun to send emails
- Use Redis on Heroku
"""
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env('DJANGO_SECRET_KEY')
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# SECURITY CONFIGURATION
# ------------------------------------------------------------------------------
# See https://docs.djangoproject.com/en/1.9/ref/middleware/#module-django.middleware.security
# and https://docs.djangoproject.com/ja/1.9/howto/deployment/checklist/#run-manage-py-check-deploy
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True)
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['mertisconsulting.com'])
# END SITE CONFIGURATION
INSTALLED_APPS += ('gunicorn', )
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.io/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
AWS_S3_CALLING_FORMAT = OrdinaryCallingFormat()
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
# See:http://stackoverflow.com/questions/10390244/
from storages.backends.s3boto import S3BotoStorage
StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')
MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')
DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3BotoStorage'
MEDIA_URL = 'https://s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
# Static Assets
# ------------------------
STATIC_URL = 'https://s3.amazonaws.com/%s/static/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'config.settings.production.StaticRootS3BotoStorage'
# See: https://github.com/antonagestam/collectfast
# For Django 1.7+, 'collectfast' should come before
# 'django.contrib.staticfiles'
AWS_PRELOAD_METADATA = True
INSTALLED_APPS = ('collectfast', ) + INSTALLED_APPS
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='cpq-exporter <[email protected]>')
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[cpq-exporter] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# Anymail with Mailgun
INSTALLED_APPS += ("anymail", )
ANYMAIL = {
"MAILGUN_API_KEY": env('DJANGO_MAILGUN_API_KEY'),
}
EMAIL_BACKEND = "anymail.backends.mailgun.MailgunBackend"
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db('DATABASE_URL')
# CACHING
# ------------------------------------------------------------------------------
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'mail_admins'],
'propagate': True
}
}
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
| mjj55409/cpq-exporter | config/settings/production.py | Python | mit | 7,354 |
from .base import * # TODO: import the relevant names instead of importing everything.
import cuda_convnet
import corrmm | dnouri/Lasagne | nntools/layers/__init__.py | Python | mit | 120 |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_recognize_business_cards.py
DESCRIPTION:
This sample demonstrates how to recognize fields on business cards.
See fields found on a business card here:
https://aka.ms/formrecognizer/businesscardfields
USAGE:
python sample_recognize_business_cards.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
"""
import os
class RecognizeBusinessCardSample(object):
def recognize_business_card(self):
path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__),
"..", "..", "./sample_forms/business_cards/business-card-english.jpg"))
# [START recognize_business_cards]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import FormRecognizerClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
form_recognizer_client = FormRecognizerClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
with open(path_to_sample_forms, "rb") as f:
poller = form_recognizer_client.begin_recognize_business_cards(business_card=f, locale="en-US")
business_cards = poller.result()
for idx, business_card in enumerate(business_cards):
print("--------Recognizing business card #{}--------".format(idx+1))
contact_names = business_card.fields.get("ContactNames")
if contact_names:
for contact_name in contact_names.value:
print("Contact First Name: {} has confidence: {}".format(
contact_name.value["FirstName"].value, contact_name.value["FirstName"].confidence
))
print("Contact Last Name: {} has confidence: {}".format(
contact_name.value["LastName"].value, contact_name.value["LastName"].confidence
))
company_names = business_card.fields.get("CompanyNames")
if company_names:
for company_name in company_names.value:
print("Company Name: {} has confidence: {}".format(company_name.value, company_name.confidence))
departments = business_card.fields.get("Departments")
if departments:
for department in departments.value:
print("Department: {} has confidence: {}".format(department.value, department.confidence))
job_titles = business_card.fields.get("JobTitles")
if job_titles:
for job_title in job_titles.value:
print("Job Title: {} has confidence: {}".format(job_title.value, job_title.confidence))
emails = business_card.fields.get("Emails")
if emails:
for email in emails.value:
print("Email: {} has confidence: {}".format(email.value, email.confidence))
websites = business_card.fields.get("Websites")
if websites:
for website in websites.value:
print("Website: {} has confidence: {}".format(website.value, website.confidence))
addresses = business_card.fields.get("Addresses")
if addresses:
for address in addresses.value:
print("Address: {} has confidence: {}".format(address.value, address.confidence))
mobile_phones = business_card.fields.get("MobilePhones")
if mobile_phones:
for phone in mobile_phones.value:
print("Mobile phone number: {} has confidence: {}".format(phone.value, phone.confidence))
faxes = business_card.fields.get("Faxes")
if faxes:
for fax in faxes.value:
print("Fax number: {} has confidence: {}".format(fax.value, fax.confidence))
work_phones = business_card.fields.get("WorkPhones")
if work_phones:
for work_phone in work_phones.value:
print("Work phone number: {} has confidence: {}".format(work_phone.value, work_phone.confidence))
other_phones = business_card.fields.get("OtherPhones")
if other_phones:
for other_phone in other_phones.value:
print("Other phone number: {} has confidence: {}".format(other_phone.value, other_phone.confidence))
# [END recognize_business_cards]
if __name__ == '__main__':
sample = RecognizeBusinessCardSample()
sample.recognize_business_card()
| Azure/azure-sdk-for-python | sdk/formrecognizer/azure-ai-formrecognizer/samples/v3.1/sample_recognize_business_cards.py | Python | mit | 5,129 |
from time import sleep
import os
import shutil
import merfi
from merfi import logger
from merfi import util
from merfi.collector import RepoCollector
from merfi.backends import base
class RpmSign(base.BaseBackend):
help_menu = 'rpm-sign handler for signing files'
_help = """
Signs files with rpm-sign. Crawls a given path looking for Debian repos.
Note: this sub-command tells merfi to use Red Hat's internal signing tool
inconveniently named "rpm-sign", not the rpmsign(8) command that is a part of
the http://rpm.org open-source project.
%s
Options
--key Name of the key to use (see rpm-sign --list-keys)
--keyfile File path location of the public keyfile, for example
/etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release
or /etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta
--nat A NAT is between this system and the signing server.
Positional Arguments:
[path] The path to crawl for signing repos. Defaults to current
working directory
"""
executable = 'rpm-sign'
name = 'rpm-sign'
options = ['--key', '--keyfile', '--nat']
def clear_sign(self, path, command):
"""
When doing a "clearsign" with rpm-sign, the output goes to stdout, so
that needs to be captured and written to the default output file for
clear signed signatures (InRelease).
"""
logger.info('signing: %s' % path)
out, err, code = util.run_output(command)
# Sometimes rpm-sign will fail with this error. I've opened
# rhbz#1557014 to resolve this server-side. For now, sleep and retry
# as a workaround. These sleep/retry values are suggestions from the
# team that runs the signing service.
known_failure = "ERROR: unhandled exception occurred: ('')."
tries = 1
while known_failure in err and tries < 30:
logger.warning('hit known rpm-sign failure.')
tries += 1
logger.warning('sleeping, running try #%d in 30 seconds.' % tries)
sleep(2)
out, err, code = util.run_output(command)
if code != 0:
for line in err.split('\n'):
logger.error('stderr: %s' % line)
for line in out.split('\n'):
logger.error('stdout: %s' % line)
raise RuntimeError('rpm-sign non-zero exit code %d', code)
if out.strip() == '':
for line in err.split('\n'):
logger.error('stderr: %s' % line)
logger.error('rpm-sign clearsign provided nothing on stdout')
raise RuntimeError('no clearsign signature available')
absolute_directory = os.path.dirname(os.path.abspath(path))
with open(os.path.join(absolute_directory, 'InRelease'), 'w') as f:
f.write(out)
def detached(self, command):
return util.run(command)
def sign(self):
self.keyfile = self.parser.get('--keyfile')
if self.keyfile:
self.keyfile = os.path.abspath(self.keyfile)
if not os.path.isfile(self.keyfile):
raise RuntimeError('%s is not a file' % self.keyfile)
logger.info('using keyfile "%s" as release.asc' % self.keyfile)
self.key = self.parser.get('--key')
if not self.key:
raise RuntimeError('specify a --key for signing')
logger.info('Starting path collection, looking for files to sign')
repos = RepoCollector(self.path)
if repos:
logger.info('%s repos found' % len(repos))
# FIXME: this should spit the actual verified command
logger.info('will sign with the following commands:')
logger.info('rpm-sign --key "%s" --detachsign Release --output Release.gpg' % self.key)
logger.info('rpm-sign --key "%s" --clearsign Release --output InRelease' % self.key)
else:
logger.warning('No paths found that matched')
for repo in repos:
# Debian "Release" files:
for path in repo.releases:
self.sign_release(path)
# Public key:
if self.keyfile:
logger.info('placing release.asc in %s' % repo.path)
if merfi.config.get('check'):
logger.info('[CHECKMODE] writing release.asc')
else:
shutil.copyfile(
self.keyfile,
os.path.join(repo.path, 'release.asc'))
def sign_release(self, path):
""" Sign a "Release" file from a Debian repo. """
if merfi.config.get('check'):
new_gpg_path = path.split('Release')[0]+'Release.gpg'
new_in_path = path.split('Release')[0]+'InRelease'
logger.info('[CHECKMODE] signing: %s' % path)
logger.info('[CHECKMODE] signed: %s' % new_gpg_path)
logger.info('[CHECKMODE] signed: %s' % new_in_path)
else:
os.chdir(os.path.dirname(path))
detached = ['rpm-sign', '--key', self.key, '--detachsign',
'Release', '--output', 'Release.gpg']
clearsign = ['rpm-sign', '--key', self.key, '--clearsign',
'Release']
if self.parser.has('--nat'):
detached.insert(1, '--nat')
clearsign.insert(1, '--nat')
logger.info('signing: %s' % path)
self.detached(detached)
self.clear_sign(path, clearsign)
| alfredodeza/merfi | merfi/backends/rpm_sign.py | Python | mit | 5,502 |
# -*- coding: utf-8 -*-
import arrow
import datetime
import ujson
import timeit
from flask.ext.login import login_required
from flask import (
Blueprint, render_template
)
from feedback.dashboard.vendorsurveys import (
get_rating_scale, get_surveys_by_role,
get_surveys_by_completion, get_surveys_by_purpose,
get_all_survey_responses, get_rating_by_lang,
get_rating_by_purpose, get_rating_by_role
)
from feedback.surveys.constants import SURVEY_DAYS
from feedback.surveys.models import Survey
from feedback.dashboard.permits import (
api_health, get_lifespan,
get_permit_types, trade,
get_master_permit_counts,
dump_socrata_api
)
blueprint = Blueprint(
"dashboard", __name__,
template_folder='../templates',
static_folder="../static"
)
def to_bucket(str_date):
''' Converts the DB string time to a MM-DD string format.
'''
result = arrow.get(str_date)
return result.strftime("%m-%d")
@blueprint.route("/", methods=["GET", "POST"])
def home():
json_obj = {}
json_obj_home = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection_home = [
{
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
"datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{},
{},
{},
{},
{},
{},
{},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Respondents by Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj_home['daily_graph'] = ujson.dumps(dashboard_collection_home[0]['data']['graph'])
json_obj_home['surveys_type'] = ujson.dumps(dashboard_collection_home[2])
json_obj_home['survey_role'] = ujson.dumps(dashboard_collection_home[10])
json_obj_home['survey_complete'] = ujson.dumps(dashboard_collection_home[12])
json_obj_home['survey_purpose'] = ujson.dumps(dashboard_collection_home[13])
today = datetime.date.today()
return render_template(
"public/home.html",
api=1,
date=today.strftime('%B %d, %Y'),
json_obj=json_obj_home,
dash_obj=dashboard_collection_home,
resp_obj=survey_table,
title='Dashboard - Main'
)
@blueprint.route("/metrics", methods=["GET", "POST"])
def metrics():
json_obj = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection = [
{
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
"datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{
"title": "Commercial",
"data": {
"nc": get_lifespan('nc'),
"rc": get_lifespan('rc'),
"s": get_lifespan('s')
}
},
{
"title": "Residential",
"data": {
"nr": get_lifespan('nr'),
"rr": get_lifespan('rr'),
"p": get_lifespan('p'),
"f": get_lifespan('f'),
"e": get_lifespan('e')
}
},
{
"title": "Average time from application date to permit issuance, Owner/Builder Permits, Last 30 Days",
"data": 0
},
{
"title": "Same Day Trade Permits",
"data": {
"PLUM": trade(30, 'PLUM'),
"BLDG": trade(30, 'BLDG'),
"ELEC": trade(30, 'ELEC'),
"FIRE": trade(30, 'FIRE'),
"ZIPS": trade(30, 'ZIPS')
}
},
{
"title": "(UNUSED) Avg Cost of an Open Residential Permit",
"data": 0
},
{
"title": "(UNUSED) Avg Cost of an Owner/Builder Permit",
"data": 0
},
{
"title": "Permits & sub-permits issued by type, Last 30 Days",
"data": get_permit_types()
},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{
"title": "Master Permits Issued, Last 30 Days",
"data": get_master_permit_counts('permit_issued_date')
},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj['daily_graph'] = ujson.dumps(dashboard_collection[0]['data']['graph'])
json_obj['surveys_type'] = ujson.dumps(dashboard_collection[2])
json_obj['permits_type'] = ujson.dumps(dashboard_collection[9])
json_obj['survey_role'] = ujson.dumps(dashboard_collection[10])
json_obj['survey_complete'] = ujson.dumps(dashboard_collection[12])
json_obj['survey_purpose'] = ujson.dumps(dashboard_collection[13])
json_obj['permits_rawjson'] = ujson.dumps(dump_socrata_api('p'))
json_obj['violations_rawjson'] = ujson.dumps(dump_socrata_api('v'))
json_obj['violations_locations_json'] = ujson.dumps(dump_socrata_api('vl'))
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
json_obj['violations_per_month_json'] = ujson.dumps(dump_socrata_api('vm'))
today = datetime.date.today()
return render_template(
"public/home-metrics.html",
api=api_health(),
date=today.strftime('%B %d, %Y'),
json_obj=json_obj,
dash_obj=dashboard_collection,
resp_obj=survey_table,
title='Dashboard - PIC Metrics'
)
@blueprint.route("/violations", methods=["GET", "POST"])
def violations():
json_obj = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection = [
{
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
"datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{
"title": "Commercial",
"data": {
"nc": get_lifespan('nc'),
"rc": get_lifespan('rc'),
"s": get_lifespan('s')
}
},
{
"title": "Residential",
"data": {
"nr": get_lifespan('nr'),
"rr": get_lifespan('rr'),
"p": get_lifespan('p'),
"f": get_lifespan('f'),
"e": get_lifespan('e')
}
},
{
"title": "Average time from application date to permit issuance, Owner/Builder Permits, Last 30 Days",
"data": 0
},
{
"title": "Same Day Trade Permits",
"data": {
"PLUM": trade(30, 'PLUM'),
"BLDG": trade(30, 'BLDG'),
"ELEC": trade(30, 'ELEC'),
"FIRE": trade(30, 'FIRE'),
"ZIPS": trade(30, 'ZIPS')
}
},
{
"title": "(UNUSED) Avg Cost of an Open Residential Permit",
"data": 0
},
{
"title": "(UNUSED) Avg Cost of an Owner/Builder Permit",
"data": 0
},
{
"title": "Permits & sub-permits issued by type, Last 30 Days",
"data": get_permit_types()
},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{
"title": "Master Permits Issued, Last 30 Days",
"data": get_master_permit_counts('permit_issued_date')
},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj['daily_graph'] = ujson.dumps(dashboard_collection[0]['data']['graph'])
json_obj['surveys_type'] = ujson.dumps(dashboard_collection[2])
json_obj['permits_type'] = ujson.dumps(dashboard_collection[9])
json_obj['survey_role'] = ujson.dumps(dashboard_collection[10])
json_obj['survey_complete'] = ujson.dumps(dashboard_collection[12])
json_obj['survey_purpose'] = ujson.dumps(dashboard_collection[13])
json_obj['permits_rawjson'] = ujson.dumps(dump_socrata_api('p'))
json_obj['violations_rawjson'] = ujson.dumps(dump_socrata_api('v'))
json_obj['violations_locations_json'] = ujson.dumps(dump_socrata_api('vl'))
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
json_obj['violations_per_month_json'] = ujson.dumps(dump_socrata_api('vm'))
today = datetime.date.today()
return render_template(
"public/home-violations.html",
api=api_health(),
date=today.strftime('%B %d, %Y'),
json_obj=json_obj,
dash_obj=dashboard_collection,
resp_obj=survey_table,
title='Dashboard - Neighborhood Compliance'
)
@blueprint.route('/dashboard/feedback/', methods=['GET'])
def all_surveys():
survey_table = get_all_survey_responses(SURVEY_DAYS)
today = datetime.date.today()
return render_template(
"dashboard/all-surveys.html",
resp_obj=survey_table,
title='All Survey Responses',
date=today.strftime('%B %d, %Y')
)
@blueprint.route('/dashboard/feedback/<id>', methods=['GET'])
@login_required
def survey_detail(id):
survey = Survey.query.filter_by(id=id)
today = datetime.date.today()
return render_template(
"dashboard/survey-detail.html",
resp_obj=survey,
title='Permitting & Inspection Center User Survey Metrics: Detail',
date=today.strftime('%B %d, %Y'))
@blueprint.route("/dashboard/violations/", methods=['GET'])
def violations_detail():
json_obj = {}
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
today = datetime.date.today()
return render_template(
"public/violations-detail.html",
title='Violations by Type: Detail',
json_obj=json_obj,
date=today.strftime('%B %d, %Y'))
| codeforamerica/mdc-feedback | feedback/dashboard/views.py | Python | mit | 18,098 |
import unittest
import requests
class TranslationTests(unittest.TestCase):
def setUp(self):
self.url = 'http://127.0.0.1/api/translate'
def test_given_words(self):
"""Should pass for the basic test cases provided"""
test_words = ['pig', 'banana', 'trash', 'happy', 'duck', 'glove',
'eat', 'omelet', 'are']
expected_words = ['igpay', 'ananabay', 'ashtray', 'appyhay', 'uckday',
'oveglay', 'eatyay', 'omeletyay', 'areyay']
responses = [requests.post(self.url, x).text for x in test_words]
self.assertEqual(responses, expected_words,
'Should pass for the basic test cases provided')
def test_capitalization(self):
"""Should preserve capitalization in words"""
test_words = ['Capitalized', 'Words', 'Should', 'Work']
expected_words = ['Apitalizedcay', 'Ordsway', 'Ouldshay', 'Orkway']
responses = [requests.post(self.url, x).text for x in test_words]
self.assertEqual(responses, expected_words,
'Words should preserve their capitalization')
def test_sentences(self):
"""Should translate sentences with preserved punctuation"""
test_sentence = ('Long sentences should retain their capitalization, '
'as well as punctuation - hopefully!!')
expected_result = ('Onglay entencessay ouldshay etainray eirthay '
'apitalizationcay, asyay ellway asyay unctuationpay'
' - opefullyhay!!')
response = requests.post(self.url, test_sentence).text
self.assertEqual(response, expected_result,
'Should translate sentences accurately')
def test_edge_cases(self):
"""Should be able to handle words with no vowels"""
test_word = 'sky'
expected_result = 'skyay'
response = requests.post(self.url, test_word).text
self.assertEqual(response, expected_result,
'Should be able to translate words without vowels')
def test_error_cases(self):
"""Should return errors for invalid input"""
self.assertEqual(requests.post(self.url, '').status_code, 406,
'Should return HTTP/406 for empty strings')
def test_long_paragraphs(self):
"""Should translate long paragraphs with new lines intact"""
self.maxDiff = None
expected_result = ''
test_paragraph = ''
with open('tests/lorem_ipsum.txt') as input_paragraph:
test_paragraph = input_paragraph.read()
with open('tests/lorem_ipsum_translated.txt') as expected:
expected_result = expected.read()
response = requests.post(self.url, test_paragraph).text
self.assertEqual(response, expected_result,
'Should translate long paragraphs accurately')
if __name__ == '__main__':
unittest.main() | chrswt/vicarious-microservice | tests/translation.py | Python | mit | 2,992 |
# encoding: utf-8
import datetime
__all__ = [
'info',
]
def info():
return {
'birthday': datetime.date(1992, 2, 10),
'class': 3,
'family_name_en': u'nakagawa',
'family_name_kana': u'なかがわ',
'first_name_en': u'haruka',
'first_name_kana': u'はるか',
'graduate_date': None,
'hometown': u'東京',
'name_en': u'Nakagawa Haruka',
'name_ja': u'仲川遥香',
'name_kana': u'なかがわ はるか',
'nick': u'はるごん',
'team': u'A',
}
| moriyoshi/pyakb48 | akb48/member/nakagawa_haruka.py | Python | mit | 655 |
<!DOCTYPE html>
<html lang="en" class="">
<head prefix="og: http://ogp.me/ns# fb: http://ogp.me/ns/fb# object: http://ogp.me/ns/object# article: http://ogp.me/ns/article# profile: http://ogp.me/ns/profile#">
<meta charset='utf-8'>
<link crossorigin="anonymous" href="https://assets-cdn.github.com/assets/frameworks-130b94ff796a9660d814b59665547ebaf99cc439323c908f41c6ff46e4255c8e.css" media="all" rel="stylesheet" />
<link crossorigin="anonymous" href="https://assets-cdn.github.com/assets/github-907704789dc9e0c1cd78c2f3adfc91e42ed23a0a97b2790c4171d9d8959f7cdc.css" media="all" rel="stylesheet" />
<link crossorigin="anonymous" href="https://assets-cdn.github.com/assets/site-becbb68a5e0ae3f94214b9e9edea2c49974f6d60b9eae715b70e5d017ff1b935.css" media="all" rel="stylesheet" />
<link as="script" href="https://assets-cdn.github.com/assets/frameworks-74e2880351ce368d8f0a52f12a7452b422bef6397d5477d8120207ea79f0dfd9.js" rel="preload" />
<link as="script" href="https://assets-cdn.github.com/assets/github-2a591b51a4438c7a3e39b82d3119de5d8894bf09aeb9148fc057632c7a2aca9f.js" rel="preload" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta http-equiv="Content-Language" content="en">
<meta name="viewport" content="width=device-width">
<title>android-platform_sdk/monkey_playback.py at master · miracle2k/android-platform_sdk · GitHub</title>
<link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub">
<link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub">
<link rel="apple-touch-icon" href="/apple-touch-icon.png">
<link rel="apple-touch-icon" sizes="57x57" href="/apple-touch-icon-57x57.png">
<link rel="apple-touch-icon" sizes="60x60" href="/apple-touch-icon-60x60.png">
<link rel="apple-touch-icon" sizes="72x72" href="/apple-touch-icon-72x72.png">
<link rel="apple-touch-icon" sizes="76x76" href="/apple-touch-icon-76x76.png">
<link rel="apple-touch-icon" sizes="114x114" href="/apple-touch-icon-114x114.png">
<link rel="apple-touch-icon" sizes="120x120" href="/apple-touch-icon-120x120.png">
<link rel="apple-touch-icon" sizes="144x144" href="/apple-touch-icon-144x144.png">
<link rel="apple-touch-icon" sizes="152x152" href="/apple-touch-icon-152x152.png">
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon-180x180.png">
<meta property="fb:app_id" content="1401488693436528">
<meta content="https://avatars0.githubusercontent.com/u/13807?v=3&s=400" name="twitter:image:src" /><meta content="@github" name="twitter:site" /><meta content="summary" name="twitter:card" /><meta content="miracle2k/android-platform_sdk" name="twitter:title" /><meta content="android-platform_sdk - To keep the deprecated apkbuilder tool alive should it break." name="twitter:description" />
<meta content="https://avatars0.githubusercontent.com/u/13807?v=3&s=400" property="og:image" /><meta content="GitHub" property="og:site_name" /><meta content="object" property="og:type" /><meta content="miracle2k/android-platform_sdk" property="og:title" /><meta content="https://github.com/miracle2k/android-platform_sdk" property="og:url" /><meta content="android-platform_sdk - To keep the deprecated apkbuilder tool alive should it break." property="og:description" />
<meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats">
<meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors">
<link rel="assets" href="https://assets-cdn.github.com/">
<meta name="pjax-timeout" content="1000">
<meta name="request-id" content="DABD7F08:88D1:CDCD340:57C0022F" data-pjax-transient>
<meta name="msapplication-TileImage" content="/windows-tile.png">
<meta name="msapplication-TileColor" content="#ffffff">
<meta name="selected-link" value="repo_source" data-pjax-transient>
<meta name="google-site-verification" content="KT5gs8h0wvaagLKAVWq8bbeNwnZZK1r1XQysX3xurLU">
<meta name="google-site-verification" content="ZzhVyEFwb7w3e0-uOTltm8Jsck2F5StVihD0exw2fsA">
<meta name="google-analytics" content="UA-3769691-2">
<meta content="collector.githubapp.com" name="octolytics-host" /><meta content="github" name="octolytics-app-id" /><meta content="DABD7F08:88D1:CDCD340:57C0022F" name="octolytics-dimension-request_id" />
<meta content="/<user-name>/<repo-name>/blob/show" data-pjax-transient="true" name="analytics-location" />
<meta class="js-ga-set" name="dimension1" content="Logged Out">
<meta name="hostname" content="github.com">
<meta name="user-login" content="">
<meta name="expected-hostname" content="github.com">
<meta name="js-proxy-site-detection-payload" content="M2U1NjI5YmZkMTkzMDk4NWZjNzdlMmU1YjU5NzZjMzhmYjZhMTc4M2Y3YzUzODI3N2M4YmEwYTA0ZmY2YTBhZXx7InJlbW90ZV9hZGRyZXNzIjoiMjE4LjE4OS4xMjcuOCIsInJlcXVlc3RfaWQiOiJEQUJEN0YwODo4OEQxOkNEQ0QzNDA6NTdDMDAyMkYiLCJ0aW1lc3RhbXAiOjE0NzIyMDEyNjR9">
<link rel="mask-icon" href="https://assets-cdn.github.com/pinned-octocat.svg" color="#4078c0">
<link rel="icon" type="image/x-icon" href="https://assets-cdn.github.com/favicon.ico">
<meta name="html-safe-nonce" content="a7a43ef8cf73c3d6bb870f4fca27a80272027589">
<meta content="4c09cb75b1ca694c4c736f01d9c7db0f3117e090" name="form-nonce" />
<meta http-equiv="x-pjax-version" content="8c57ae2406b1076c4bddfbcd9b742e31">
<meta name="description" content="android-platform_sdk - To keep the deprecated apkbuilder tool alive should it break.">
<meta name="go-import" content="github.com/miracle2k/android-platform_sdk git https://github.com/miracle2k/android-platform_sdk.git">
<meta content="13807" name="octolytics-dimension-user_id" /><meta content="miracle2k" name="octolytics-dimension-user_login" /><meta content="1420024" name="octolytics-dimension-repository_id" /><meta content="miracle2k/android-platform_sdk" name="octolytics-dimension-repository_nwo" /><meta content="true" name="octolytics-dimension-repository_public" /><meta content="false" name="octolytics-dimension-repository_is_fork" /><meta content="1420024" name="octolytics-dimension-repository_network_root_id" /><meta content="miracle2k/android-platform_sdk" name="octolytics-dimension-repository_network_root_nwo" />
<link href="https://github.com/miracle2k/android-platform_sdk/commits/master.atom" rel="alternate" title="Recent Commits to android-platform_sdk:master" type="application/atom+xml">
<link rel="canonical" href="https://github.com/miracle2k/android-platform_sdk/blob/master/monkeyrunner/scripts/monkey_playback.py" data-pjax-transient>
</head>
<body class="logged-out env-production vis-public page-blob">
<div id="js-pjax-loader-bar" class="pjax-loader-bar"><div class="progress"></div></div>
<a href="#start-of-content" tabindex="1" class="accessibility-aid js-skip-to-content">Skip to content</a>
<header class="site-header js-details-container" role="banner">
<div class="container-responsive">
<a class="header-logo-invertocat" href="https://github.com/" aria-label="Homepage" data-ga-click="(Logged out) Header, go to homepage, icon:logo-wordmark">
<svg aria-hidden="true" class="octicon octicon-mark-github" height="32" version="1.1" viewBox="0 0 16 16" width="32"><path d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"></path></svg>
</a>
<button class="btn-link float-right site-header-toggle js-details-target" type="button" aria-label="Toggle navigation">
<svg aria-hidden="true" class="octicon octicon-three-bars" height="24" version="1.1" viewBox="0 0 12 16" width="18"><path d="M11.41 9H.59C0 9 0 8.59 0 8c0-.59 0-1 .59-1H11.4c.59 0 .59.41.59 1 0 .59 0 1-.59 1h.01zm0-4H.59C0 5 0 4.59 0 4c0-.59 0-1 .59-1H11.4c.59 0 .59.41.59 1 0 .59 0 1-.59 1h.01zM.59 11H11.4c.59 0 .59.41.59 1 0 .59 0 1-.59 1H.59C0 13 0 12.59 0 12c0-.59 0-1 .59-1z"></path></svg>
</button>
<div class="site-header-menu">
<nav class="site-header-nav site-header-nav-main">
<a href="/personal" class="js-selected-navigation-item nav-item nav-item-personal" data-ga-click="Header, click, Nav menu - item:personal" data-selected-links="/personal /personal">
Personal
</a> <a href="/open-source" class="js-selected-navigation-item nav-item nav-item-opensource" data-ga-click="Header, click, Nav menu - item:opensource" data-selected-links="/open-source /open-source">
Open source
</a> <a href="/business" class="js-selected-navigation-item nav-item nav-item-business" data-ga-click="Header, click, Nav menu - item:business" data-selected-links="/business /business/partners /business/features /business/customers /business">
Business
</a> <a href="/explore" class="js-selected-navigation-item nav-item nav-item-explore" data-ga-click="Header, click, Nav menu - item:explore" data-selected-links="/explore /trending /trending/developers /integrations /integrations/feature/code /integrations/feature/collaborate /integrations/feature/ship /explore">
Explore
</a> </nav>
<div class="site-header-actions">
<a class="btn btn-primary site-header-actions-btn" href="/join?source=header-repo" data-ga-click="(Logged out) Header, clicked Sign up, text:sign-up">Sign up</a>
<a class="btn site-header-actions-btn mr-2" href="/login?return_to=%2Fmiracle2k%2Fandroid-platform_sdk%2Fblob%2Fmaster%2Fmonkeyrunner%2Fscripts%2Fmonkey_playback.py" data-ga-click="(Logged out) Header, clicked Sign in, text:sign-in">Sign in</a>
</div>
<nav class="site-header-nav site-header-nav-secondary">
<a class="nav-item" href="/pricing">Pricing</a>
<a class="nav-item" href="/blog">Blog</a>
<a class="nav-item" href="https://help.github.com">Support</a>
<a class="nav-item header-search-link" href="https://github.com/search">Search GitHub</a>
<div class="header-search scoped-search site-scoped-search js-site-search" role="search">
<!-- </textarea> --><!-- '"` --><form accept-charset="UTF-8" action="/miracle2k/android-platform_sdk/search" class="js-site-search-form" data-scoped-search-url="/miracle2k/android-platform_sdk/search" data-unscoped-search-url="/search" method="get"><div style="margin:0;padding:0;display:inline"><input name="utf8" type="hidden" value="✓" /></div>
<label class="form-control header-search-wrapper js-chromeless-input-container">
<div class="header-search-scope">This repository</div>
<input type="text"
class="form-control header-search-input js-site-search-focus js-site-search-field is-clearable"
data-hotkey="s"
name="q"
placeholder="Search"
aria-label="Search this repository"
data-unscoped-placeholder="Search GitHub"
data-scoped-placeholder="Search"
autocapitalize="off">
</label>
</form></div>
</nav>
</div>
</div>
</header>
<div id="start-of-content" class="accessibility-aid"></div>
<div id="js-flash-container">
</div>
<div role="main">
<div itemscope itemtype="http://schema.org/SoftwareSourceCode">
<div id="js-repo-pjax-container" data-pjax-container>
<div class="pagehead repohead instapaper_ignore readability-menu experiment-repo-nav">
<div class="container repohead-details-container">
<ul class="pagehead-actions">
<li>
<a href="/login?return_to=%2Fmiracle2k%2Fandroid-platform_sdk"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to watch a repository" rel="nofollow">
<svg aria-hidden="true" class="octicon octicon-eye" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path d="M8.06 2C3 2 0 8 0 8s3 6 8.06 6C13 14 16 8 16 8s-3-6-7.94-6zM8 12c-2.2 0-4-1.78-4-4 0-2.2 1.8-4 4-4 2.22 0 4 1.8 4 4 0 2.22-1.78 4-4 4zm2-4c0 1.11-.89 2-2 2-1.11 0-2-.89-2-2 0-1.11.89-2 2-2 1.11 0 2 .89 2 2z"></path></svg>
Watch
</a>
<a class="social-count" href="/miracle2k/android-platform_sdk/watchers"
aria-label="8 users are watching this repository">
8
</a>
</li>
<li>
<a href="/login?return_to=%2Fmiracle2k%2Fandroid-platform_sdk"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to star a repository" rel="nofollow">
<svg aria-hidden="true" class="octicon octicon-star" height="16" version="1.1" viewBox="0 0 14 16" width="14"><path d="M14 6l-4.9-.64L7 1 4.9 5.36 0 6l3.6 3.26L2.67 14 7 11.67 11.33 14l-.93-4.74z"></path></svg>
Star
</a>
<a class="social-count js-social-count" href="/miracle2k/android-platform_sdk/stargazers"
aria-label="24 users starred this repository">
24
</a>
</li>
<li>
<a href="/login?return_to=%2Fmiracle2k%2Fandroid-platform_sdk"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to fork a repository" rel="nofollow">
<svg aria-hidden="true" class="octicon octicon-repo-forked" height="16" version="1.1" viewBox="0 0 10 16" width="10"><path d="M8 1a1.993 1.993 0 0 0-1 3.72V6L5 8 3 6V4.72A1.993 1.993 0 0 0 2 1a1.993 1.993 0 0 0-1 3.72V6.5l3 3v1.78A1.993 1.993 0 0 0 5 15a1.993 1.993 0 0 0 1-3.72V9.5l3-3V4.72A1.993 1.993 0 0 0 8 1zM2 4.2C1.34 4.2.8 3.65.8 3c0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3 10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3-10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2z"></path></svg>
Fork
</a>
<a href="/miracle2k/android-platform_sdk/network" class="social-count"
aria-label="66 users are forked this repository">
66
</a>
</li>
</ul>
<h1 class="public ">
<svg aria-hidden="true" class="octicon octicon-repo" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M4 9H3V8h1v1zm0-3H3v1h1V6zm0-2H3v1h1V4zm0-2H3v1h1V2zm8-1v12c0 .55-.45 1-1 1H6v2l-1.5-1.5L3 16v-2H1c-.55 0-1-.45-1-1V1c0-.55.45-1 1-1h10c.55 0 1 .45 1 1zm-1 10H1v2h2v-1h3v1h5v-2zm0-10H2v9h9V1z"></path></svg>
<span class="author" itemprop="author"><a href="/miracle2k" class="url fn" rel="author">miracle2k</a></span><!--
--><span class="path-divider">/</span><!--
--><strong itemprop="name"><a href="/miracle2k/android-platform_sdk" data-pjax="#js-repo-pjax-container">android-platform_sdk</a></strong>
</h1>
</div>
<div class="container">
<nav class="reponav js-repo-nav js-sidenav-container-pjax"
itemscope
itemtype="http://schema.org/BreadcrumbList"
role="navigation"
data-pjax="#js-repo-pjax-container">
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a href="/miracle2k/android-platform_sdk" aria-selected="true" class="js-selected-navigation-item selected reponav-item" data-hotkey="g c" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches /miracle2k/android-platform_sdk" itemprop="url">
<svg aria-hidden="true" class="octicon octicon-code" height="16" version="1.1" viewBox="0 0 14 16" width="14"><path d="M9.5 3L8 4.5 11.5 8 8 11.5 9.5 13 14 8 9.5 3zm-5 0L0 8l4.5 5L6 11.5 2.5 8 6 4.5 4.5 3z"></path></svg>
<span itemprop="name">Code</span>
<meta itemprop="position" content="1">
</a> </span>
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a href="/miracle2k/android-platform_sdk/issues" class="js-selected-navigation-item reponav-item" data-hotkey="g i" data-selected-links="repo_issues repo_labels repo_milestones /miracle2k/android-platform_sdk/issues" itemprop="url">
<svg aria-hidden="true" class="octicon octicon-issue-opened" height="16" version="1.1" viewBox="0 0 14 16" width="14"><path d="M7 2.3c3.14 0 5.7 2.56 5.7 5.7s-2.56 5.7-5.7 5.7A5.71 5.71 0 0 1 1.3 8c0-3.14 2.56-5.7 5.7-5.7zM7 1C3.14 1 0 4.14 0 8s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm1 3H6v5h2V4zm0 6H6v2h2v-2z"></path></svg>
<span itemprop="name">Issues</span>
<span class="counter">0</span>
<meta itemprop="position" content="2">
</a> </span>
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a href="/miracle2k/android-platform_sdk/pulls" class="js-selected-navigation-item reponav-item" data-hotkey="g p" data-selected-links="repo_pulls /miracle2k/android-platform_sdk/pulls" itemprop="url">
<svg aria-hidden="true" class="octicon octicon-git-pull-request" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M11 11.28V5c-.03-.78-.34-1.47-.94-2.06C9.46 2.35 8.78 2.03 8 2H7V0L4 3l3 3V4h1c.27.02.48.11.69.31.21.2.3.42.31.69v6.28A1.993 1.993 0 0 0 10 15a1.993 1.993 0 0 0 1-3.72zm-1 2.92c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zM4 3c0-1.11-.89-2-2-2a1.993 1.993 0 0 0-1 3.72v6.56A1.993 1.993 0 0 0 2 15a1.993 1.993 0 0 0 1-3.72V4.72c.59-.34 1-.98 1-1.72zm-.8 10c0 .66-.55 1.2-1.2 1.2-.65 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2zM2 4.2C1.34 4.2.8 3.65.8 3c0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2z"></path></svg>
<span itemprop="name">Pull requests</span>
<span class="counter">1</span>
<meta itemprop="position" content="3">
</a> </span>
<a href="/miracle2k/android-platform_sdk/pulse" class="js-selected-navigation-item reponav-item" data-selected-links="pulse /miracle2k/android-platform_sdk/pulse">
<svg aria-hidden="true" class="octicon octicon-pulse" height="16" version="1.1" viewBox="0 0 14 16" width="14"><path d="M11.5 8L8.8 5.4 6.6 8.5 5.5 1.6 2.38 8H0v2h3.6l.9-1.8.9 5.4L9 8.5l1.6 1.5H14V8z"></path></svg>
Pulse
</a>
<a href="/miracle2k/android-platform_sdk/graphs" class="js-selected-navigation-item reponav-item" data-selected-links="repo_graphs repo_contributors /miracle2k/android-platform_sdk/graphs">
<svg aria-hidden="true" class="octicon octicon-graph" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path d="M16 14v1H0V0h1v14h15zM5 13H3V8h2v5zm4 0H7V3h2v10zm4 0h-2V6h2v7z"></path></svg>
Graphs
</a>
</nav>
</div>
</div>
<div class="container new-discussion-timeline experiment-repo-nav">
<div class="repository-content">
<a href="/miracle2k/android-platform_sdk/blob/a43666cc4f8d3e9eaf0ef7bdb24696c02c2622b5/monkeyrunner/scripts/monkey_playback.py" class="d-none js-permalink-shortcut" data-hotkey="y">Permalink</a>
<!-- blob contrib key: blob_contributors:v21:662d0810ab048d9a87d95357160e8c3f -->
<div class="file-navigation js-zeroclipboard-container">
<div class="select-menu branch-select-menu js-menu-container js-select-menu float-left">
<button class="btn btn-sm select-menu-button js-menu-target css-truncate" data-hotkey="w"
type="button" aria-label="Switch branches or tags" tabindex="0" aria-haspopup="true">
<i>Branch:</i>
<span class="js-select-button css-truncate-target">master</span>
</button>
<div class="select-menu-modal-holder js-menu-content js-navigation-container" data-pjax aria-hidden="true">
<div class="select-menu-modal">
<div class="select-menu-header">
<svg aria-label="Close" class="octicon octicon-x js-menu-close" height="16" role="img" version="1.1" viewBox="0 0 12 16" width="12"><path d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48z"></path></svg>
<span class="select-menu-title">Switch branches/tags</span>
</div>
<div class="select-menu-filters">
<div class="select-menu-text-filter">
<input type="text" aria-label="Filter branches/tags" id="context-commitish-filter-field" class="form-control js-filterable-field js-navigation-enable" placeholder="Filter branches/tags">
</div>
<div class="select-menu-tabs">
<ul>
<li class="select-menu-tab">
<a href="#" data-tab-filter="branches" data-filter-placeholder="Filter branches/tags" class="js-select-menu-tab" role="tab">Branches</a>
</li>
<li class="select-menu-tab">
<a href="#" data-tab-filter="tags" data-filter-placeholder="Find a tag…" class="js-select-menu-tab" role="tab">Tags</a>
</li>
</ul>
</div>
</div>
<div class="select-menu-list select-menu-tab-bucket js-select-menu-tab-bucket" data-tab-filter="branches" role="menu">
<div data-filterable-for="context-commitish-filter-field" data-filterable-type="substring">
<a class="select-menu-item js-navigation-item js-navigation-open selected"
href="/miracle2k/android-platform_sdk/blob/master/monkeyrunner/scripts/monkey_playback.py"
data-name="master"
data-skip-pjax="true"
rel="nofollow">
<svg aria-hidden="true" class="octicon octicon-check select-menu-item-icon" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5z"></path></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
master
</span>
</a>
</div>
<div class="select-menu-no-results">Nothing to show</div>
</div>
<div class="select-menu-list select-menu-tab-bucket js-select-menu-tab-bucket" data-tab-filter="tags">
<div data-filterable-for="context-commitish-filter-field" data-filterable-type="substring">
</div>
<div class="select-menu-no-results">Nothing to show</div>
</div>
</div>
</div>
</div>
<div class="btn-group float-right">
<a href="/miracle2k/android-platform_sdk/find/master"
class="js-pjax-capture-input btn btn-sm"
data-pjax
data-hotkey="t">
Find file
</a>
<button aria-label="Copy file path to clipboard" class="js-zeroclipboard btn btn-sm zeroclipboard-button tooltipped tooltipped-s" data-copied-hint="Copied!" type="button">Copy path</button>
</div>
<div class="breadcrumb js-zeroclipboard-target">
<span class="repo-root js-repo-root"><span class="js-path-segment"><a href="/miracle2k/android-platform_sdk"><span>android-platform_sdk</span></a></span></span><span class="separator">/</span><span class="js-path-segment"><a href="/miracle2k/android-platform_sdk/tree/master/monkeyrunner"><span>monkeyrunner</span></a></span><span class="separator">/</span><span class="js-path-segment"><a href="/miracle2k/android-platform_sdk/tree/master/monkeyrunner/scripts"><span>scripts</span></a></span><span class="separator">/</span><strong class="final-path">monkey_playback.py</strong>
</div>
</div>
<div class="commit-tease">
<span class="right">
<a class="commit-tease-sha" href="/miracle2k/android-platform_sdk/commit/6db57208c8fb964bba0bc6da098e8aac94ea6b93" data-pjax>
6db5720
</a>
<relative-time datetime="2010-10-18T20:54:38Z">Oct 19, 2010</relative-time>
</span>
<div>
<img alt="@billnapier" class="avatar" height="20" src="https://avatars2.githubusercontent.com/u/163577?v=3&s=40" width="20" />
<a href="/billnapier" class="user-mention" rel="contributor">billnapier</a>
<a href="/miracle2k/android-platform_sdk/commit/6db57208c8fb964bba0bc6da098e8aac94ea6b93" class="message" data-pjax="true" title="Initial cut at MonkeyRecorder.
MonkeyRecorder (and MonkeyPlayback) are a set of tools for using MonkeyRunner to record and playback actions. The current implementation is not very sophisticated, but it works.
Please don't review yet. Needs a lot of style cleanup.
Change-Id: Id300a27294b5dc13a842fade900e8b9916b8a17b">Initial cut at MonkeyRecorder.</a>
</div>
<div class="commit-tease-contributors">
<button type="button" class="btn-link muted-link contributors-toggle" data-facebox="#blob_contributors_box">
<strong>1</strong>
contributor
</button>
</div>
<div id="blob_contributors_box" style="display:none">
<h2 class="facebox-header" data-facebox-id="facebox-header">Users who have contributed to this file</h2>
<ul class="facebox-user-list" data-facebox-id="facebox-description">
<li class="facebox-user-list-item">
<img alt="@billnapier" height="24" src="https://avatars0.githubusercontent.com/u/163577?v=3&s=48" width="24" />
<a href="/billnapier">billnapier</a>
</li>
</ul>
</div>
</div>
<div class="file">
<div class="file-header">
<div class="file-actions">
<div class="btn-group">
<a href="/miracle2k/android-platform_sdk/raw/master/monkeyrunner/scripts/monkey_playback.py" class="btn btn-sm " id="raw-url">Raw</a>
<a href="/miracle2k/android-platform_sdk/blame/master/monkeyrunner/scripts/monkey_playback.py" class="btn btn-sm js-update-url-with-hash">Blame</a>
<a href="/miracle2k/android-platform_sdk/commits/master/monkeyrunner/scripts/monkey_playback.py" class="btn btn-sm " rel="nofollow">History</a>
</div>
<button type="button" class="btn-octicon disabled tooltipped tooltipped-nw"
aria-label="You must be signed in to make or propose changes">
<svg aria-hidden="true" class="octicon octicon-pencil" height="16" version="1.1" viewBox="0 0 14 16" width="14"><path d="M0 12v3h3l8-8-3-3-8 8zm3 2H1v-2h1v1h1v1zm10.3-9.3L12 6 9 3l1.3-1.3a.996.996 0 0 1 1.41 0l1.59 1.59c.39.39.39 1.02 0 1.41z"></path></svg>
</button>
<button type="button" class="btn-octicon btn-octicon-danger disabled tooltipped tooltipped-nw"
aria-label="You must be signed in to make or propose changes">
<svg aria-hidden="true" class="octicon octicon-trashcan" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M11 2H9c0-.55-.45-1-1-1H5c-.55 0-1 .45-1 1H2c-.55 0-1 .45-1 1v1c0 .55.45 1 1 1v9c0 .55.45 1 1 1h7c.55 0 1-.45 1-1V5c.55 0 1-.45 1-1V3c0-.55-.45-1-1-1zm-1 12H3V5h1v8h1V5h1v8h1V5h1v8h1V5h1v9zm1-10H2V3h9v1z"></path></svg>
</button>
</div>
<div class="file-info">
71 lines (55 sloc)
<span class="file-info-divider"></span>
2.11 KB
</div>
</div>
<div itemprop="text" class="blob-wrapper data type-python">
<table class="highlight tab-size js-file-line-container" data-tab-size="8">
<tr>
<td id="L1" class="blob-num js-line-number" data-line-number="1"></td>
<td id="LC1" class="blob-code blob-code-inner js-file-line"><span class="pl-c">#!/usr/bin/env monkeyrunner</span></td>
</tr>
<tr>
<td id="L2" class="blob-num js-line-number" data-line-number="2"></td>
<td id="LC2" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Copyright 2010, The Android Open Source Project</span></td>
</tr>
<tr>
<td id="L3" class="blob-num js-line-number" data-line-number="3"></td>
<td id="LC3" class="blob-code blob-code-inner js-file-line"><span class="pl-c">#</span></td>
</tr>
<tr>
<td id="L4" class="blob-num js-line-number" data-line-number="4"></td>
<td id="LC4" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Licensed under the Apache License, Version 2.0 (the "License");</span></td>
</tr>
<tr>
<td id="L5" class="blob-num js-line-number" data-line-number="5"></td>
<td id="LC5" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># you may not use this file except in compliance with the License.</span></td>
</tr>
<tr>
<td id="L6" class="blob-num js-line-number" data-line-number="6"></td>
<td id="LC6" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># You may obtain a copy of the License at</span></td>
</tr>
<tr>
<td id="L7" class="blob-num js-line-number" data-line-number="7"></td>
<td id="LC7" class="blob-code blob-code-inner js-file-line"><span class="pl-c">#</span></td>
</tr>
<tr>
<td id="L8" class="blob-num js-line-number" data-line-number="8"></td>
<td id="LC8" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># http://www.apache.org/licenses/LICENSE-2.0</span></td>
</tr>
<tr>
<td id="L9" class="blob-num js-line-number" data-line-number="9"></td>
<td id="LC9" class="blob-code blob-code-inner js-file-line"><span class="pl-c">#</span></td>
</tr>
<tr>
<td id="L10" class="blob-num js-line-number" data-line-number="10"></td>
<td id="LC10" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Unless required by applicable law or agreed to in writing, software</span></td>
</tr>
<tr>
<td id="L11" class="blob-num js-line-number" data-line-number="11"></td>
<td id="LC11" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># distributed under the License is distributed on an "AS IS" BASIS,</span></td>
</tr>
<tr>
<td id="L12" class="blob-num js-line-number" data-line-number="12"></td>
<td id="LC12" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span></td>
</tr>
<tr>
<td id="L13" class="blob-num js-line-number" data-line-number="13"></td>
<td id="LC13" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># See the License for the specific language governing permissions and</span></td>
</tr>
<tr>
<td id="L14" class="blob-num js-line-number" data-line-number="14"></td>
<td id="LC14" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># limitations under the License.</span></td>
</tr>
<tr>
<td id="L15" class="blob-num js-line-number" data-line-number="15"></td>
<td id="LC15" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L16" class="blob-num js-line-number" data-line-number="16"></td>
<td id="LC16" class="blob-code blob-code-inner js-file-line"><span class="pl-k">import</span> sys</td>
</tr>
<tr>
<td id="L17" class="blob-num js-line-number" data-line-number="17"></td>
<td id="LC17" class="blob-code blob-code-inner js-file-line"><span class="pl-k">from</span> com.android.monkeyrunner <span class="pl-k">import</span> MonkeyRunner</td>
</tr>
<tr>
<td id="L18" class="blob-num js-line-number" data-line-number="18"></td>
<td id="LC18" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L19" class="blob-num js-line-number" data-line-number="19"></td>
<td id="LC19" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># The format of the file we are parsing is very carfeully constructed.</span></td>
</tr>
<tr>
<td id="L20" class="blob-num js-line-number" data-line-number="20"></td>
<td id="LC20" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Each line corresponds to a single command. The line is split into 2</span></td>
</tr>
<tr>
<td id="L21" class="blob-num js-line-number" data-line-number="21"></td>
<td id="LC21" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># parts with a | character. Text to the left of the pipe denotes</span></td>
</tr>
<tr>
<td id="L22" class="blob-num js-line-number" data-line-number="22"></td>
<td id="LC22" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># which command to run. The text to the right of the pipe is a python</span></td>
</tr>
<tr>
<td id="L23" class="blob-num js-line-number" data-line-number="23"></td>
<td id="LC23" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># dictionary (it can be evaled into existence) that specifies the</span></td>
</tr>
<tr>
<td id="L24" class="blob-num js-line-number" data-line-number="24"></td>
<td id="LC24" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># arguments for the command. In most cases, this directly maps to the</span></td>
</tr>
<tr>
<td id="L25" class="blob-num js-line-number" data-line-number="25"></td>
<td id="LC25" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># keyword argument dictionary that could be passed to the underlying</span></td>
</tr>
<tr>
<td id="L26" class="blob-num js-line-number" data-line-number="26"></td>
<td id="LC26" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># command. </span></td>
</tr>
<tr>
<td id="L27" class="blob-num js-line-number" data-line-number="27"></td>
<td id="LC27" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L28" class="blob-num js-line-number" data-line-number="28"></td>
<td id="LC28" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Lookup table to map command strings to functions that implement that</span></td>
</tr>
<tr>
<td id="L29" class="blob-num js-line-number" data-line-number="29"></td>
<td id="LC29" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># command.</span></td>
</tr>
<tr>
<td id="L30" class="blob-num js-line-number" data-line-number="30"></td>
<td id="LC30" class="blob-code blob-code-inner js-file-line"><span class="pl-c1">CMD_MAP</span> <span class="pl-k">=</span> {</td>
</tr>
<tr>
<td id="L31" class="blob-num js-line-number" data-line-number="31"></td>
<td id="LC31" class="blob-code blob-code-inner js-file-line"> <span class="pl-s"><span class="pl-pds">'</span>TOUCH<span class="pl-pds">'</span></span>: <span class="pl-k">lambda</span> <span class="pl-smi">dev</span>, <span class="pl-smi">arg</span>: dev.touch(<span class="pl-k">**</span>arg),</td>
</tr>
<tr>
<td id="L32" class="blob-num js-line-number" data-line-number="32"></td>
<td id="LC32" class="blob-code blob-code-inner js-file-line"> <span class="pl-s"><span class="pl-pds">'</span>DRAG<span class="pl-pds">'</span></span>: <span class="pl-k">lambda</span> <span class="pl-smi">dev</span>, <span class="pl-smi">arg</span>: dev.drag(<span class="pl-k">**</span>arg),</td>
</tr>
<tr>
<td id="L33" class="blob-num js-line-number" data-line-number="33"></td>
<td id="LC33" class="blob-code blob-code-inner js-file-line"> <span class="pl-s"><span class="pl-pds">'</span>PRESS<span class="pl-pds">'</span></span>: <span class="pl-k">lambda</span> <span class="pl-smi">dev</span>, <span class="pl-smi">arg</span>: dev.press(<span class="pl-k">**</span>arg),</td>
</tr>
<tr>
<td id="L34" class="blob-num js-line-number" data-line-number="34"></td>
<td id="LC34" class="blob-code blob-code-inner js-file-line"> <span class="pl-s"><span class="pl-pds">'</span>TYPE<span class="pl-pds">'</span></span>: <span class="pl-k">lambda</span> <span class="pl-smi">dev</span>, <span class="pl-smi">arg</span>: dev.type(<span class="pl-k">**</span>arg),</td>
</tr>
<tr>
<td id="L35" class="blob-num js-line-number" data-line-number="35"></td>
<td id="LC35" class="blob-code blob-code-inner js-file-line"> <span class="pl-s"><span class="pl-pds">'</span>WAIT<span class="pl-pds">'</span></span>: <span class="pl-k">lambda</span> <span class="pl-smi">dev</span>, <span class="pl-smi">arg</span>: MonkeyRunner.sleep(<span class="pl-k">**</span>arg)</td>
</tr>
<tr>
<td id="L36" class="blob-num js-line-number" data-line-number="36"></td>
<td id="LC36" class="blob-code blob-code-inner js-file-line"> }</td>
</tr>
<tr>
<td id="L37" class="blob-num js-line-number" data-line-number="37"></td>
<td id="LC37" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L38" class="blob-num js-line-number" data-line-number="38"></td>
<td id="LC38" class="blob-code blob-code-inner js-file-line"><span class="pl-c"># Process a single file for the specified device.</span></td>
</tr>
<tr>
<td id="L39" class="blob-num js-line-number" data-line-number="39"></td>
<td id="LC39" class="blob-code blob-code-inner js-file-line"><span class="pl-k">def</span> <span class="pl-en">process_file</span>(<span class="pl-smi">fp</span>, <span class="pl-smi">device</span>):</td>
</tr>
<tr>
<td id="L40" class="blob-num js-line-number" data-line-number="40"></td>
<td id="LC40" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">for</span> line <span class="pl-k">in</span> fp:</td>
</tr>
<tr>
<td id="L41" class="blob-num js-line-number" data-line-number="41"></td>
<td id="LC41" class="blob-code blob-code-inner js-file-line"> (cmd, rest) <span class="pl-k">=</span> line.split(<span class="pl-s"><span class="pl-pds">'</span>|<span class="pl-pds">'</span></span>)</td>
</tr>
<tr>
<td id="L42" class="blob-num js-line-number" data-line-number="42"></td>
<td id="LC42" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">try</span>:</td>
</tr>
<tr>
<td id="L43" class="blob-num js-line-number" data-line-number="43"></td>
<td id="LC43" class="blob-code blob-code-inner js-file-line"> <span class="pl-c"># Parse the pydict</span></td>
</tr>
<tr>
<td id="L44" class="blob-num js-line-number" data-line-number="44"></td>
<td id="LC44" class="blob-code blob-code-inner js-file-line"> rest <span class="pl-k">=</span> <span class="pl-c1">eval</span>(rest)</td>
</tr>
<tr>
<td id="L45" class="blob-num js-line-number" data-line-number="45"></td>
<td id="LC45" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">except</span>:</td>
</tr>
<tr>
<td id="L46" class="blob-num js-line-number" data-line-number="46"></td>
<td id="LC46" class="blob-code blob-code-inner js-file-line"> <span class="pl-c1">print</span> <span class="pl-s"><span class="pl-pds">'</span>unable to parse options<span class="pl-pds">'</span></span></td>
</tr>
<tr>
<td id="L47" class="blob-num js-line-number" data-line-number="47"></td>
<td id="LC47" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">continue</span></td>
</tr>
<tr>
<td id="L48" class="blob-num js-line-number" data-line-number="48"></td>
<td id="LC48" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L49" class="blob-num js-line-number" data-line-number="49"></td>
<td id="LC49" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">if</span> cmd <span class="pl-k">not</span> <span class="pl-k">in</span> <span class="pl-c1">CMD_MAP</span>:</td>
</tr>
<tr>
<td id="L50" class="blob-num js-line-number" data-line-number="50"></td>
<td id="LC50" class="blob-code blob-code-inner js-file-line"> <span class="pl-c1">print</span> <span class="pl-s"><span class="pl-pds">'</span>unknown command: <span class="pl-pds">'</span></span> <span class="pl-k">+</span> cmd</td>
</tr>
<tr>
<td id="L51" class="blob-num js-line-number" data-line-number="51"></td>
<td id="LC51" class="blob-code blob-code-inner js-file-line"> <span class="pl-k">continue</span></td>
</tr>
<tr>
<td id="L52" class="blob-num js-line-number" data-line-number="52"></td>
<td id="LC52" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L53" class="blob-num js-line-number" data-line-number="53"></td>
<td id="LC53" class="blob-code blob-code-inner js-file-line"> <span class="pl-c1">CMD_MAP</span>[cmd](device, rest)</td>
</tr>
<tr>
<td id="L54" class="blob-num js-line-number" data-line-number="54"></td>
<td id="LC54" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L55" class="blob-num js-line-number" data-line-number="55"></td>
<td id="LC55" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L56" class="blob-num js-line-number" data-line-number="56"></td>
<td id="LC56" class="blob-code blob-code-inner js-file-line"><span class="pl-k">def</span> <span class="pl-en">main</span>():</td>
</tr>
<tr>
<td id="L57" class="blob-num js-line-number" data-line-number="57"></td>
<td id="LC57" class="blob-code blob-code-inner js-file-line"> <span class="pl-v">file</span> <span class="pl-k">=</span> sys.argv[<span class="pl-c1">1</span>]</td>
</tr>
<tr>
<td id="L58" class="blob-num js-line-number" data-line-number="58"></td>
<td id="LC58" class="blob-code blob-code-inner js-file-line"> fp <span class="pl-k">=</span> <span class="pl-c1">open</span>(<span class="pl-v">file</span>, <span class="pl-s"><span class="pl-pds">'</span>r<span class="pl-pds">'</span></span>)</td>
</tr>
<tr>
<td id="L59" class="blob-num js-line-number" data-line-number="59"></td>
<td id="LC59" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L60" class="blob-num js-line-number" data-line-number="60"></td>
<td id="LC60" class="blob-code blob-code-inner js-file-line"> device <span class="pl-k">=</span> MonkeyRunner.waitForConnection()</td>
</tr>
<tr>
<td id="L61" class="blob-num js-line-number" data-line-number="61"></td>
<td id="LC61" class="blob-code blob-code-inner js-file-line"> </td>
</tr>
<tr>
<td id="L62" class="blob-num js-line-number" data-line-number="62"></td>
<td id="LC62" class="blob-code blob-code-inner js-file-line"> process_file(fp, device)</td>
</tr>
<tr>
<td id="L63" class="blob-num js-line-number" data-line-number="63"></td>
<td id="LC63" class="blob-code blob-code-inner js-file-line"> fp.close()<span class="pl-id">;</span></td>
</tr>
<tr>
<td id="L64" class="blob-num js-line-number" data-line-number="64"></td>
<td id="LC64" class="blob-code blob-code-inner js-file-line"> </td>
</tr>
<tr>
<td id="L65" class="blob-num js-line-number" data-line-number="65"></td>
<td id="LC65" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L66" class="blob-num js-line-number" data-line-number="66"></td>
<td id="LC66" class="blob-code blob-code-inner js-file-line"><span class="pl-k">if</span> <span class="pl-c1">__name__</span> <span class="pl-k">==</span> <span class="pl-s"><span class="pl-pds">'</span>__main__<span class="pl-pds">'</span></span>:</td>
</tr>
<tr>
<td id="L67" class="blob-num js-line-number" data-line-number="67"></td>
<td id="LC67" class="blob-code blob-code-inner js-file-line"> main()</td>
</tr>
<tr>
<td id="L68" class="blob-num js-line-number" data-line-number="68"></td>
<td id="LC68" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L69" class="blob-num js-line-number" data-line-number="69"></td>
<td id="LC69" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L70" class="blob-num js-line-number" data-line-number="70"></td>
<td id="LC70" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
</table>
</div>
</div>
<button type="button" data-facebox="#jump-to-line" data-facebox-class="linejump" data-hotkey="l" class="d-none">Jump to Line</button>
<div id="jump-to-line" style="display:none">
<!-- </textarea> --><!-- '"` --><form accept-charset="UTF-8" action="" class="js-jump-to-line-form" method="get"><div style="margin:0;padding:0;display:inline"><input name="utf8" type="hidden" value="✓" /></div>
<input class="form-control linejump-input js-jump-to-line-field" type="text" placeholder="Jump to line…" aria-label="Jump to line" autofocus>
<button type="submit" class="btn">Go</button>
</form></div>
</div>
<div class="modal-backdrop js-touch-events"></div>
</div>
</div>
</div>
</div>
<div class="container site-footer-container">
<div class="site-footer" role="contentinfo">
<ul class="site-footer-links float-right">
<li><a href="https://github.com/contact" data-ga-click="Footer, go to contact, text:contact">Contact GitHub</a></li>
<li><a href="https://developer.github.com" data-ga-click="Footer, go to api, text:api">API</a></li>
<li><a href="https://training.github.com" data-ga-click="Footer, go to training, text:training">Training</a></li>
<li><a href="https://shop.github.com" data-ga-click="Footer, go to shop, text:shop">Shop</a></li>
<li><a href="https://github.com/blog" data-ga-click="Footer, go to blog, text:blog">Blog</a></li>
<li><a href="https://github.com/about" data-ga-click="Footer, go to about, text:about">About</a></li>
</ul>
<a href="https://github.com" aria-label="Homepage" class="site-footer-mark" title="GitHub">
<svg aria-hidden="true" class="octicon octicon-mark-github" height="24" version="1.1" viewBox="0 0 16 16" width="24"><path d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"></path></svg>
</a>
<ul class="site-footer-links">
<li>© 2016 <span title="0.06421s from github-fe161-cp1-prd.iad.github.net">GitHub</span>, Inc.</li>
<li><a href="https://github.com/site/terms" data-ga-click="Footer, go to terms, text:terms">Terms</a></li>
<li><a href="https://github.com/site/privacy" data-ga-click="Footer, go to privacy, text:privacy">Privacy</a></li>
<li><a href="https://github.com/security" data-ga-click="Footer, go to security, text:security">Security</a></li>
<li><a href="https://status.github.com/" data-ga-click="Footer, go to status, text:status">Status</a></li>
<li><a href="https://help.github.com" data-ga-click="Footer, go to help, text:help">Help</a></li>
</ul>
</div>
</div>
<div id="ajax-error-message" class="ajax-error-message flash flash-error">
<svg aria-hidden="true" class="octicon octicon-alert" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path d="M8.865 1.52c-.18-.31-.51-.5-.87-.5s-.69.19-.87.5L.275 13.5c-.18.31-.18.69 0 1 .19.31.52.5.87.5h13.7c.36 0 .69-.19.86-.5.17-.31.18-.69.01-1L8.865 1.52zM8.995 13h-2v-2h2v2zm0-3h-2V6h2v4z"></path></svg>
<button type="button" class="flash-close js-flash-close js-ajax-error-dismiss" aria-label="Dismiss error">
<svg aria-hidden="true" class="octicon octicon-x" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48z"></path></svg>
</button>
You can't perform that action at this time.
</div>
<script crossorigin="anonymous" src="https://assets-cdn.github.com/assets/compat-40e365359d1c4db1e36a55be458e60f2b7c24d58b5a00ae13398480e7ba768e0.js"></script>
<script crossorigin="anonymous" src="https://assets-cdn.github.com/assets/frameworks-74e2880351ce368d8f0a52f12a7452b422bef6397d5477d8120207ea79f0dfd9.js"></script>
<script async="async" crossorigin="anonymous" src="https://assets-cdn.github.com/assets/github-2a591b51a4438c7a3e39b82d3119de5d8894bf09aeb9148fc057632c7a2aca9f.js"></script>
<div class="js-stale-session-flash stale-session-flash flash flash-warn flash-banner d-none">
<svg aria-hidden="true" class="octicon octicon-alert" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path d="M8.865 1.52c-.18-.31-.51-.5-.87-.5s-.69.19-.87.5L.275 13.5c-.18.31-.18.69 0 1 .19.31.52.5.87.5h13.7c.36 0 .69-.19.86-.5.17-.31.18-.69.01-1L8.865 1.52zM8.995 13h-2v-2h2v2zm0-3h-2V6h2v4z"></path></svg>
<span class="signed-in-tab-flash">You signed in with another tab or window. <a href="">Reload</a> to refresh your session.</span>
<span class="signed-out-tab-flash">You signed out in another tab or window. <a href="">Reload</a> to refresh your session.</span>
</div>
<div class="facebox" id="facebox" style="display:none;">
<div class="facebox-popup">
<div class="facebox-content" role="dialog" aria-labelledby="facebox-header" aria-describedby="facebox-description">
</div>
<button type="button" class="facebox-close js-facebox-close" aria-label="Close modal">
<svg aria-hidden="true" class="octicon octicon-x" height="16" version="1.1" viewBox="0 0 12 16" width="12"><path d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48z"></path></svg>
</button>
</div>
</div>
</body>
</html>
| hjhjw1991/monkeyJ | monkey_playback.py | Python | mit | 50,743 |
'''
modified by Chongxuan Li ([email protected])
'''
import sys
sys.path.append('..')
sys.path.append('../../data/')
import os, numpy as np
import scipy.io as sio
import time
import anglepy as ap
import anglepy.paramgraphics as paramgraphics
import anglepy.ndict as ndict
import theano
import theano.tensor as T
from collections import OrderedDict
import preprocessing as pp
import color
def zca_dec(zca_mean, zca_winv, data):
return zca_winv.dot(data) + zca_mean
def labelToMat(y):
label = np.unique(y)
newy = np.zeros((len(y), len(label)))
for i in range(len(y)):
newy[i, y[i]] = 1
return newy.T
def main(n_z, n_hidden, dataset, seed, comment, gfx=True):
# Initialize logdir
import time
pre_dir = 'models/gpulearn_z_x_mnist_96-(500, 500)'
if os.environ.has_key('pretrain') and bool(int(os.environ['pretrain'])) == True:
comment+='_pre-train'
if os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True:
comment+='_prior'
pre_dir+='_prior'
if os.environ.has_key('cutoff'):
comment+=('_'+str(int(os.environ['cutoff'])))
if os.environ.has_key('train_residual') and bool(int(os.environ['train_residual'])) == True:
comment+='_train-residual'
pre_dir+='_train-residual'
if os.environ.has_key('sigma_square'):
comment+=('_'+str(float(os.environ['sigma_square'])))
pre_dir+=('_'+str(float(os.environ['sigma_square'])))
pre_dir+='/'
logdir = 'results/gpulearn_z_x_'+dataset+'_'+str(n_z)+'-'+str(n_hidden)+comment+'_'+str(int(time.time()))+'/'
if not os.path.exists(logdir): os.makedirs(logdir)
print 'logdir:', logdir
print 'gpulearn_z_x', n_z, n_hidden, dataset, seed
with open(logdir+'hook.txt', 'a') as f:
print >>f, 'learn_z_x', n_z, n_hidden, dataset, seed
np.random.seed(seed)
gfx_freq = 1
weight_decay = 0
# Init data
if dataset == 'mnist':
import anglepy.data.mnist as mnist
# MNIST
size = 28
train_x, train_y, valid_x, valid_y, test_x, test_y = mnist.load_numpy(size)
f_enc, f_dec = pp.Identity()
if os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True:
color.printBlue('Loading prior')
mnist_prior = sio.loadmat('data/mnist_prior/mnist_prior.mat')
train_mean_prior = mnist_prior['z_train']
test_mean_prior = mnist_prior['z_test']
valid_mean_prior = mnist_prior['z_valid']
else:
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 50000
n_test = 10000
n_valid = 10000
n_batch = 1000
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'higgs':
size = 28
f_enc, f_dec = pp.Identity()
inputfile = 'data/higgs/HIGGS.csv'
print 'loading file.'
x = np.loadtxt(inputfile, dtype='f4', delimiter=',')
print 'done.'
y = x[:,0].reshape((-1,1))
x = x[:,1:]
x = np.array(x, dtype='float32')
y = np.array(y, dtype='float32')
n_train = 10000000
n_valid = 500000
n_test = 500000
n_batch = 1000
derived_feat = 'all'
if os.environ.has_key('derived_feat'):
derived_feat = os.environ['derived_feat']
color.printBlue(derived_feat)
if derived_feat == 'high':
# Only the 7 high level features.
x = x[:, 21:28]
elif derived_feat == 'low':
# Only the 21 raw features.
x = x[:, 0:21]
else:
pass
train_x = x[0:n_train, :].T
y_train = y[0:n_train, :]
valid_x = x[n_train:n_train+n_valid, :].T
y_valid = y[n_train:n_train+n_valid, :]
test_x = x[n_train+n_valid:n_train+n_valid+n_test, :].T
y_test = y[n_train+n_valid:n_train+n_valid+n_test, :]
n_y = 2
n_x = train_x.shape[0]
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'tanh'
if os.environ.has_key('nonlinear'):
nonlinear = os.environ['nonlinear']
color.printBlue(nonlinear)
L_valid = 1
dim_input = (1,size)
type_px = 'gaussian'
colorImg = False
bernoulli_x = False
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'cifar10':
import anglepy.data.cifar10 as cifar10
size = 32
train_x, train_y, test_x, test_y = cifar10.load_numpy()
train_x = train_x.astype(np.float32).T
test_x = test_x.astype(np.float32).T
##
f_enc, f_dec = pp.Identity()
if os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True:
color.printBlue('Loading prior')
cifar_prior = sio.loadmat('data/cifar10_prior/cifar10_prior.mat')
train_mean_prior = cifar_prior['z_train']
test_mean_prior = cifar_prior['z_test']
else:
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
x_valid = x_test
L_valid = 1
n_y = 10
dim_input = (size,size)
n_x = x['x'].shape[0]
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'gaussian'
if os.environ.has_key('type_px'):
type_px = os.environ['type_px']
color.printBlue('Generative type: '+type_px)
n_train = 50000
n_test = 10000
n_batch = 5000
colorImg = True
bernoulli_x = False
byteToFloat = False
#weight_decay = float(n_batch)/n_train
elif dataset == 'cifar10_zca':
import anglepy.data.cifar10 as cifar10
size = 32
train_x, train_y, test_x, test_y = cifar10.load_numpy()
train_x = train_x.astype(np.float32).T
test_x = test_x.astype(np.float32).T
##
f_enc, f_dec = pp.Identity()
zca_mean, zca_w, zca_winv = cifar10.zca(train_x)
train_x = zca_w.dot(train_x-zca_mean)
test_x = zca_w.dot(test_x-zca_mean)
if os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True:
color.printBlue('Loading prior')
cifar_prior = sio.loadmat('data/cifar10_prior/cifar10_prior.mat')
train_mean_prior = cifar_prior['z_train']
test_mean_prior = cifar_prior['z_test']
else:
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
x_valid = x_test
L_valid = 1
dim_input = (size,size)
n_y = 10
n_x = x['x'].shape[0]
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'gaussian'
n_train = 50000
n_test = 10000
n_batch = 5000
colorImg = True
bernoulli_x = False
byteToFloat = False
if os.environ.has_key('type_px'):
type_px = os.environ['type_px']
color.printBlue('Generative type: '+type_px)
nonlinear = 'softplus'
elif dataset == 'mnist_basic':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'mnist_'
tmp = sio.loadmat(data_dir+'train.mat')
#color.printRed(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
'''
x = {'x': train_x.astype(np.float32), 'y': labelToMat(train_y).astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'y': labelToMat(valid_y).astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'y': labelToMat(test_y).astype(np.float32)}
'''
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'rectangle':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'rectangles_'
tmp = sio.loadmat(data_dir+'train.mat')
color.printRed(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,1000:]
valid_y = train_y[1000:]
train_x = train_x[:,:1000]
train_y = train_y[:1000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
'''
x = {'x': train_x.astype(np.float32), 'y': labelToMat(train_y).astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'y': labelToMat(valid_y).astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'y': labelToMat(test_y).astype(np.float32)}
'''
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 2
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 1000
n_valid = 200
n_test = 50000
n_batch = 500
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
#print '3', n_x
elif dataset == 'convex':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'convex_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,6000:]
valid_y = train_y[6000:]
train_x = train_x[:,:6000]
train_y = train_y[:6000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
'''
x = {'x': train_x.astype(np.float32), 'y': labelToMat(train_y).astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'y': labelToMat(valid_y).astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'y': labelToMat(test_y).astype(np.float32)}
'''
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 2
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 6000
n_valid = 2000
n_test = 50000
n_batch = 120
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'rectangle_image':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'rectangles_im_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
'''
x = {'x': train_x.astype(np.float32), 'y': labelToMat(train_y).astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'y': labelToMat(valid_y).astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'y': labelToMat(test_y).astype(np.float32)}
'''
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 2
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_rot':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'mnist_all_rotation_normalized_float_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
f_enc, f_dec = pp.Identity()
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_back_rand':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'mnist_background_random_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
f_enc, f_dec = pp.Identity()
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_back_image':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'mnist_background_images_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
f_enc, f_dec = pp.Identity()
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_back_image_rot':
# MNIST
size = 28
data_dir = os.environ['ML_DATA_PATH']+'/mnist_variations/'+'mnist_all_background_images_rotation_normalized_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
train_y = tmp['t_train'].T.astype(np.int32)
# validation 2000
valid_x = train_x[:,10000:]
valid_y = train_y[10000:]
train_x = train_x[:,:10000]
train_y = train_y[:10000]
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
test_y = tmp['t_test'].T.astype(np.int32)
print train_x.shape
print train_y.shape
print test_x.shape
print test_y.shape
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
f_enc, f_dec = pp.Identity()
x = {'x': train_x.astype(np.float32), 'mean_prior': train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': valid_x.astype(np.float32), 'mean_prior': valid_mean_prior.astype(np.float32)}
x_test = {'x': test_x.astype(np.float32), 'mean_prior': test_mean_prior.astype(np.float32)}
L_valid = 1
dim_input = (size,size)
n_x = size*size
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 10000
n_valid = 2000
n_test = 50000
n_batch = 200
colorImg = False
bernoulli_x = True
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_binarized':
#import anglepy.data.mnist_binarized as mnist_binarized
# MNIST
import anglepy.data.mnist as mnist
size = 28
data_dir = '/home/lichongxuan/regbayes2/data/mat_data/'+'binarized_mnist_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['x_train'].T
#train_y = tmp['t_train'].T.astype(np.int32)
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['x_test'].T
tmp = sio.loadmat(data_dir+'valid.mat')
#print tmp.keys()
valid_x = tmp['x_valid'].T
#test_y = tmp['t_test'].T.astype(np.int32)
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
train_x = np.hstack((train_x, valid_x)).astype(np.float32)
train_mean_prior = np.hstack((train_mean_prior,valid_mean_prior)).astype(np.float32)
print train_mean_prior.shape
print train_x.shape
x = {'x': train_x.astype(np.float32), 'mean_prior':train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': test_x.astype(np.float32),'mean_prior':test_mean_prior.astype(np.float32)}
x_test = x_valid
L_valid = 1
dim_input = (28,28)
n_x = 28*28
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 60000
n_valid = 10000
n_batch = 1000
colorImg = False
bernoulli_x = False
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'mnist_binarized_own':
#import anglepy.data.mnist_binarized as mnist_binarized
# MNIST
import anglepy.data.mnist as mnist
size = 28
data_dir = 'data/mnist_binarized_own/'+'binarized_mnist_'
tmp = sio.loadmat(data_dir+'train.mat')
train_x = tmp['train_x'].T
#train_y = tmp['t_train'].T.astype(np.int32)
tmp = sio.loadmat(data_dir+'test.mat')
test_x = tmp['test_x'].T
tmp = sio.loadmat(data_dir+'valid.mat')
#print tmp.keys()
valid_x = tmp['valid_x'].T
#test_y = tmp['t_test'].T.astype(np.int32)
f_enc, f_dec = pp.Identity()
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
valid_mean_prior = np.zeros((n_z,valid_x.shape[1]))
train_x = np.hstack((train_x, valid_x)).astype(np.float32)
train_mean_prior = np.hstack((train_mean_prior,valid_mean_prior)).astype(np.float32)
print train_mean_prior.shape
print train_x.shape
x = {'x': train_x.astype(np.float32), 'mean_prior':train_mean_prior.astype(np.float32)}
x_train = x
x_valid = {'x': test_x.astype(np.float32),'mean_prior':test_mean_prior.astype(np.float32)}
x_test = x_valid
L_valid = 1
dim_input = (28,28)
n_x = 28*28
n_y = 10
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
nonlinear = 'softplus'
type_px = 'bernoulli'
n_train = 60000
n_valid = 10000
n_batch = 1000
colorImg = False
bernoulli_x = False
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'freyface':
# Frey's face
import anglepy.data.freyface as freyface
n_train = 1600
train_x = freyface.load_numpy()
np.random.shuffle(train_x)
x = {'x': train_x.T[:,0:n_train]}
x_valid = {'x': train_x.T[:,n_train:]}
L_valid = 1
dim_input = (28,20)
n_x = 20*28
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'bounded01'
nonlinear = 'tanh' #tanh works better with freyface #'softplus'
n_batch = 100
colorImg = False
bernoulli_x = False
byteToFloat = False
weight_decay = float(n_batch)/n_train
elif dataset == 'freyface_pca':
# Frey's face
import anglepy.data.freyface as freyface
n_train = 1600
train_x = freyface.load_numpy().T
np.random.shuffle(train_x.T)
f_enc, f_dec, _ = pp.PCA(train_x, 0.99)
train_x = f_enc(train_x)
x = {'x': train_x[:,0:n_train].astype(np.float32)}
x_valid = {'x': train_x[:,n_train:].astype(np.float32)}
L_valid = 1
dim_input = (28,20)
n_x = train_x.shape[0]
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
n_batch = 100
colorImg = False
bernoulli_x = False
byteToFloat = False
elif dataset == 'freyface_bernoulli':
# Frey's face
import anglepy.data.freyface as freyface
n_train = 1600
train_x = freyface.load_numpy().T
np.random.shuffle(train_x.T)
x = {'x': train_x[:,0:n_train].astype(np.float32)}
x_valid = {'x': train_x[:,n_train:].astype(np.float32)}
L_valid = 1
dim_input = (28,20)
n_x = train_x.shape[0]
type_pz = 'gaussianmarg'
type_px = 'bernoulli'
nonlinear = 'softplus'
n_batch = 100
colorImg = False
bernoulli_x = False
byteToFloat = False
elif dataset == 'norb_48_24300_pca':
size = 48
train_x, train_y, test_x, test_y = np.load('data/norb/norb_48_24300.npy')
_x = {'x': train_x, 'y': train_y}
#ndict.shuffleCols(_x)
#train_x = _x['x']
#train_y = _x['y']
#print _x['x'][:,:10000].shape
# Do PCA
print 'pca'
f_enc, f_dec, pca_params = pp.PCA(_x['x'][:,:10000], cutoff=500, toFloat=False)
ndict.savez(pca_params, logdir+'pca_params')
print 'done'
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
x = {'x': f_enc(train_x).astype(np.float32), 'mean_prior' : train_mean_prior.astype(np.float32)}
x_valid = {'x': f_enc(test_x).astype(np.float32), 'mean_prior' : test_mean_prior.astype(np.float32)}
x_test = {'x': f_enc(test_x).astype(np.float32), 'mean_prior' : test_mean_prior.astype(np.float32)}
x_train = x
print x['x'].shape
print x['mean_prior'].shape
L_valid = 1
n_y = 5
n_x = x['x'].shape[0]
dim_input = (size,size)
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
n_batch = 900 #23400/900 = 27
colorImg = False
#binarize = False
bernoulli_x = False
byteToFloat = False
weight_decay= float(n_batch)/train_x.shape[1]
elif dataset == 'norb':
# small NORB dataset
import anglepy.data.norb as norb
size = 48
train_x, train_y, test_x, test_y = norb.load_resized(size, binarize_y=True)
x = {'x': train_x.astype(np.float32)}
x_valid = {'x': test_x.astype(np.float32)}
L_valid = 1
n_x = train_x.shape[0]
dim_input = (size,size)
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
n_batch = 900 #23400/900 = 27
colorImg = False
#binarize = False
byteToFloat = False
bernoulli_x = False
weight_decay= float(n_batch)/train_x.shape[1]
elif dataset == 'norb_pca':
# small NORB dataset
import anglepy.data.norb as norb
size = 48
train_x, train_y, test_x, test_y = norb.load_resized(size, binarize_y=True)
f_enc, f_dec, _ = pp.PCA(train_x, 0.999)
#f_enc, f_dec, _ = pp.normalize_random(train_x)
train_x = f_enc(train_x)
test_x = f_enc(test_x)
x = {'x': train_x.astype(np.float32)}
x_valid = {'x': test_x.astype(np.float32)}
L_valid = 1
n_x = train_x.shape[0]
dim_input = (size,size)
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
n_batch = 900 #23400/900 = 27
colorImg = False
#binarize = False
bernoulli_x = False
byteToFloat = False
weight_decay= float(n_batch)/train_x.shape[1]
elif dataset == 'norb_normalized':
# small NORB dataset
import anglepy.data.norb as norb
size = 48
train_x, train_y, test_x, test_y = norb.load_resized(size, binarize_y=True)
#f_enc, f_dec, _ = pp.PCA(train_x, 0.99)
#f_enc, f_dec, _ = pp.normalize_random(train_x)
f_enc, f_dec, _ = pp.normalize(train_x)
train_x = f_enc(train_x)
test_x = f_enc(test_x)
x = {'x': train_x.astype(np.float32)}
x_valid = {'x': test_x.astype(np.float32)}
L_valid = 1
n_x = train_x.shape[0]
dim_input = (size,size)
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
n_batch = 900 #23400/900 = 27
colorImg = False
#binarize = False
bernoulli_x = False
byteToFloat = False
weight_decay= float(n_batch)/train_x.shape[1]
elif dataset == 'svhn':
# SVHN dataset
#import anglepy.data.svhn as svhn
size = 32
train_x, train_y, test_x, test_y = np.load('data/svhn/svhn.npy')
#extra_x, extra_y = svhn.load_numpy_extra(False, binarize_y=True)
#x = {'x': np.hstack((train_x, extra_x)), 'y':np.hstack((train_y, extra_y))}
#ndict.shuffleCols(x)
x = {'x' : train_x, 'y': train_y}
print 'Performing PCA, can take a few minutes... '
cutoff = 300
if os.environ.has_key('cutoff'):
cutoff = int(os.environ['cutoff'])
color.printBlue('cutoff: '+str(cutoff))
f_enc, f_dec, pca_params = pp.PCA(x['x'][:,:10000], cutoff=cutoff, toFloat=True)
ndict.savez(pca_params, logdir+'pca_params')
print 'Done.'
n_y = 10
if os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True:
color.printBlue('Loading prior')
train_mean_prior, train_y1, test_mean_prior, test_y1 = np.load('data/svhn/svhn_prior.npy')
print np.sum((train_y1 == train_y).astype(np.int32))
print np.sum((test_y1 == test_y).astype(np.int32))
else:
train_mean_prior = np.zeros((n_z,train_x.shape[1]))
test_mean_prior = np.zeros((n_z,test_x.shape[1]))
x = {'x': f_enc(x['x']).astype(np.float32), 'mean_prior':train_mean_prior.astype(np.float32)}
x_train = x
x_test = {'x': f_enc(test_x).astype(np.float32), 'mean_prior':test_mean_prior.astype(np.float32)}
x_valid = x_test
print x_train['x'].shape
print x_test['x'].shape
print train_y.shape
print test_y.shape
print x_train['mean_prior'].shape
print x_test['mean_prior'].shape
L_valid = 1
n_x = x['x'].shape[0]
dim_input = (size,size)
n_batch = 5000
n_train = 604388
n_valid = 26032
n_test = 26032
colorImg = True
bernoulli_x = False
byteToFloat = False
type_qz = 'gaussianmarg'
type_pz = 'gaussianmarg'
type_px = 'gaussian'
nonlinear = 'softplus'
else:
print 'invalid data set'
exit()
#print '2', n_x
# Construct model
from anglepy.models import GPUVAE_Z_X
learning_rate1 = 3e-4
if os.environ.has_key('stepsize'):
learning_rate1 = float(os.environ['stepsize'])
color.printBlue(str(learning_rate1))
if os.environ.has_key('preoption'):
pre = int(os.environ['preoption'])
if pre == 1:
updates = get_adam_optimizer(learning_rate=3e-4, decay1=0.9, decay2=0.999, weight_decay=0)
elif pre ==2:
updates = get_adam_optimizer(learning_rate=3e-4, decay1=0.9, decay2=0.999, weight_decay=weight_decay)
else:
raise Exception('Prepotion unknown')
with open(logdir+'hook.txt', 'a') as f:
print >>f, 'preoption ' + str(pre)
else:
updates = get_adam_optimizer(learning_rate=learning_rate1, weight_decay=weight_decay)
#print '1', n_x
model = GPUVAE_Z_X(updates, n_x, n_hidden, n_z, n_hidden[::-1], nonlinear, nonlinear, type_px, type_qz=type_qz, type_pz=type_pz, prior_sd=100, init_sd=1e-3)
if os.environ.has_key('pretrain') and bool(int(os.environ['pretrain'])) == True:
#dir = '/Users/dpkingma/results/learn_z_x_mnist_binarized_50-(500, 500)_mog_1412689061/'
#dir = '/Users/dpkingma/results/learn_z_x_svhn_bernoulli_300-(1000, 1000)_l1l2_sharing_and_1000HU_1412676966/'
#dir = '/Users/dpkingma/results/learn_z_x_svhn_bernoulli_300-(1000, 1000)_l1l2_sharing_and_1000HU_1412695481/'
#dir = '/Users/dpkingma/results/learn_z_x_mnist_binarized_50-(500, 500)_mog_1412695455/'
#dir = '/Users/dpkingma/results/gpulearn_z_x_svhn_pca_300-(500, 500)__1413904756/'
if len(n_hidden) == 1:
color.printBlue('pre-training-1-layer')
layer_str = '-500'
elif len(n_hidden) == 2:
color.printBlue('pre-training-2-layers')
layer_str = '-(500, 500)'
else:
raise Exception()
pre_str = 'models/gpulearn_z_x_'
if dataset == 'mnist':
#dir = pre_str + 'mnist_'+str(n_z)+layer_str+'_longrun/'
dir = 'models/mnist_z_x_50-500-500_longrun/'
elif dataset == 'mnist_rot':
dir = pre_str + 'mnist_rot_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'mnist_back_rand':
dir = pre_str + 'mnist_back_rand_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'mnist_back_image':
dir = pre_str + 'mnist_back_image_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'mnist_back_image_rot':
dir = pre_str + 'mnist_back_image_rot_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'rectangle':
dir = pre_str + 'rectangle_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'rectangle_image':
dir = pre_str + 'rectangle_image_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'convex':
dir = pre_str + 'convex_'+str(n_z)+layer_str+'_longrun/'
elif dataset == 'mnist_basic':
dir = pre_str + 'mnist_basic_'+str(n_z)+layer_str+'_longrun/'
if dataset == 'svhn':
if (os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True):
print 'prior-------------------'
pre_dir = 'results/gpulearn_z_x_svhn_'+str(n_z)+'-500-500_prior_'+str(cutoff)+'_longrun/'
else:
pre_dir = 'results/gpulearn_z_x_svhn_'+str(n_z)+'-500-500_'+str(cutoff)+'_longrun/'
color.printBlue(pre_dir)
w = ndict.loadz(pre_dir+'w_best.ndict.tar.gz')
v = ndict.loadz(pre_dir+'v_best.ndict.tar.gz')
elif n_z == 50:
print 'n_z = 50', dir
w = ndict.loadz(dir+'w_best.ndict.tar.gz')
v = ndict.loadz(dir+'v_best.ndict.tar.gz')
else:
print 'n_z != 50'
w = ndict.loadz(pre_dir+'w_best.ndict.tar.gz')
v = ndict.loadz(pre_dir+'v_best.ndict.tar.gz')
ndict.set_value2(model.w, w)
ndict.set_value2(model.v, v)
# Some statistics for optimization
ll_valid_stats = [-1e99, 0]
# Progress hook
def hook(epoch, t, ll):
if epoch%10 != 0: return
n_batch_n = n_batch
if n_batch_n > n_valid:
n_batch_n = n_valid
ll_valid, _ = model.est_loglik(x_valid, n_samples=L_valid, n_batch=n_batch_n, byteToFloat=byteToFloat)
ll_test = ll_valid
#if not dataset == 'mnist_binarized':
if not dataset == 'svhn':
ll_test, _ = model.est_loglik(x_test, n_samples=L_valid, n_batch=n_batch, byteToFloat=byteToFloat)
# Log
ndict.savez(ndict.get_value(model.v), logdir+'v')
ndict.savez(ndict.get_value(model.w), logdir+'w')
def infer(data, n_batch=1000):
#print '--', n_batch
size = data['x'].shape[1]
res = np.zeros((sum(n_hidden), size))
res1 = np.zeros((n_z,size))
res2 = np.zeros((n_hidden[-1],size))
res3 = np.zeros((n_z,size))
for i in range(0, size, n_batch):
idx_to = min(size, i+n_batch)
x_batch = ndict.getCols(data, i, idx_to)
# may have bugs
nn_batch = idx_to - i
_x, _z, _z_confab = model.gen_xz(x_batch, {}, nn_batch)
x_samples = _z_confab['x']
for (hi, hidden) in enumerate(_z_confab['hidden']):
res[sum(n_hidden[:hi]):sum(n_hidden[:hi+1]),i:i+nn_batch] = hidden
res1[:,i:i+nn_batch] = _z_confab['mean']
res2[:,i:i+nn_batch] = _z_confab['hidden'][-1]
res3[:,i:i+nn_batch] = _z_confab['logvar']
#print '--'
return res, res1, res2, res3
#print '..', n_batch
#if not dataset == 'mnist_binarized':
if not dataset == 'svhn':
z_test, z_test1, z_test2, vv_test = infer(x_test)
z_train, z_train1, z_train2, vv_train = infer(x_train)
if ll_valid > ll_valid_stats[0]:
ll_valid_stats[0] = ll_valid
ll_valid_stats[1] = 0
ndict.savez(ndict.get_value(model.v), logdir+'v_best')
ndict.savez(ndict.get_value(model.w), logdir+'w_best')
#if not dataset == 'mnist_binarized':
if dataset == 'svhn':
pass
#np.save(logdir+'full_latent', ('z_test': z_test, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train))
#np.save(logdir+'last_latent', ('z_test': z_test2, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train2))
else:
sio.savemat(logdir+'full_latent.mat', {'z_test': z_test, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train})
sio.savemat(logdir+'mean_latent.mat', {'z_test': z_test1, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train1})
sio.savemat(logdir+'last_latent.mat', {'z_test': z_test2, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train2})
else:
ll_valid_stats[1] += 1
# Stop when not improving validation set performance in 100 iterations
if ll_valid_stats[1] > 1000:
print "Finished"
with open(logdir+'hook.txt', 'a') as f:
print >>f, "Finished"
exit()
print epoch, t, ll, ll_valid, ll_test, ll_valid_stats
with open(logdir+'hook.txt', 'a') as f:
print >>f, epoch, t, ll, ll_valid, ll_test, ll_valid_stats
'''
if dataset != 'svhn':
l_t, px_t, pz_t, qz_t = model.test(x_train, n_samples=1, n_batch=n_batch, byteToFloat=byteToFloat)
print 'Elogpx', px_t, 'Elogpz', pz_t, '-Elogqz', qz_t
#sigma_square = float(os.environ['sigma_square'])
print 'var', np.mean(np.exp(vv_train)), 'q', np.mean(np.abs(z_train1)), 'p', np.mean(np.abs(train_mean_prior)), 'd', np.mean(np.abs(z_train1-train_mean_prior))
with open(logdir+'hook.txt', 'a') as f:
print >>f, 'Elogpx', px_t, 'Elogpz', pz_t, '-Elogqz', qz_t
print >>f, 'var', np.mean(np.exp(vv_train)), 'q', np.mean(np.abs(z_train1)), 'p', np.mean(np.abs(train_mean_prior)), 'd', np.mean(np.abs(z_train1-train_mean_prior))
'''
# Graphics
if gfx and epoch%gfx_freq == 0:
#tail = '.png'
tail = '-'+str(epoch)+'.png'
v = {i: model.v[i].get_value() for i in model.v}
w = {i: model.w[i].get_value() for i in model.w}
if 'pca' not in dataset and 'random' not in dataset and 'normalized' not in dataset and 'zca' not in dataset:
if 'w0' in v:
image = paramgraphics.mat_to_img(f_dec(v['w0'][:].T), dim_input, True, colorImg=colorImg)
image.save(logdir+'q_w0'+tail, 'PNG')
image = paramgraphics.mat_to_img(f_dec(w['out_w'][:]), dim_input, True, colorImg=colorImg)
image.save(logdir+'out_w'+tail, 'PNG')
if 'out_unif' in w:
image = paramgraphics.mat_to_img(f_dec(w['out_unif'].reshape((-1,1))), dim_input, True, colorImg=colorImg)
image.save(logdir+'out_unif'+tail, 'PNG')
if n_z == 2:
n_width = 10
import scipy.stats
z = {'z':np.zeros((2,n_width**2))}
for i in range(0,n_width):
for j in range(0,n_width):
z['z'][0,n_width*i+j] = scipy.stats.norm.ppf(float(i)/n_width+0.5/n_width)
z['z'][1,n_width*i+j] = scipy.stats.norm.ppf(float(j)/n_width+0.5/n_width)
x, _, _z = model.gen_xz({}, z, n_width**2)
if dataset == 'mnist':
x = 1 - _z['x']
image = paramgraphics.mat_to_img(f_dec(_z['x']), dim_input)
image.save(logdir+'2dmanifold'+tail, 'PNG')
else:
if 'norb' in dataset or dataset=='svhn':
nn_batch_nn = 64
else:
nn_batch_nn = 144
if not(os.environ.has_key('train_residual') and bool(int(os.environ['train_residual'])) == True) and (os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True):
mp_in = np.random.randint(0,x_train['mean_prior'].shape[1],nn_batch_nn)
m_p = x_train['mean_prior'][:,mp_in]
s_s = 1
if os.environ.has_key('sigma_square'):
s_s = float(os.environ['sigma_square'])
x_samples = model.gen_xz_prior({}, {}, m_p, s_s, n_batch=nn_batch_nn)
x_samples = x_samples['x']
m_p1 = (np.ones((n_z, nn_batch_nn)).T * np.mean(x_train['mean_prior'], axis = 1)).T
x_samples1 = model.gen_xz_prior({}, {}, m_p1.astype(np.float32), s_s, n_batch=nn_batch_nn)
image = paramgraphics.mat_to_img(f_dec(x_samples1['x']), dim_input, colorImg=colorImg)
image.save(logdir+'mean_samples-prior'+tail, 'PNG')
x_samples11 = model.gen_xz_prior11({}, {}, m_p, s_s, n_batch=nn_batch_nn)
image = paramgraphics.mat_to_img(f_dec(x_samples11['x']), dim_input, colorImg=colorImg)
image.save(logdir+'prior-image'+tail, 'PNG')
else:
_x, _, _z_confab = model.gen_xz({}, {}, n_batch=nn_batch_nn)
x_samples = _z_confab['x']
image = paramgraphics.mat_to_img(f_dec(x_samples), dim_input, colorImg=colorImg)
image.save(logdir+'samples-prior'+tail, 'PNG')
#x_samples = _x['x']
#image = paramgraphics.mat_to_img(x_samples, dim_input, colorImg=colorImg)
#image.save(logdir+'samples2'+tail, 'PNG')
else:
# Model with preprocessing
if 'w0' in v:
tmp = f_dec(v['w0'][:].T)
#print dim_input
#print tmp.shape
if 'zca' in dataset or dataset=='svhn':
tmp = zca_dec(zca_mean, zca_winv, tmp)
image = paramgraphics.mat_to_img(tmp, dim_input, True, colorImg=colorImg)
image.save(logdir+'q_w0'+tail, 'PNG')
tmp = f_dec(w['out_w'][:])
if 'zca' in dataset:
tmp = zca_dec(zca_mean, zca_winv, tmp)
image = paramgraphics.mat_to_img(tmp, dim_input, True, colorImg=colorImg)
image.save(logdir+'out_w'+tail, 'PNG')
if dataset == 'svhn':
nn_batch_nn = 64
else:
nn_batch_nn = 144
if not(os.environ.has_key('train_residual') and bool(int(os.environ['train_residual'])) == True) and (os.environ.has_key('prior') and bool(int(os.environ['prior'])) == True):
mp_in = np.random.randint(0,x_train['mean_prior'].shape[1],nn_batch_nn)
m_p = x_train['mean_prior'][:,mp_in]
s_s = 1
if os.environ.has_key('sigma_square'):
s_s = float(os.environ['sigma_square'])
x_samples = model.gen_xz_prior({}, {}, m_p, s_s, n_batch=nn_batch_nn)
x_samples = zca_dec(zca_mean, zca_winv,x_samples['x'])
x_samples = np.minimum(np.maximum(x_samples, 0), 1)
x_samples11 = model.gen_xz_prior11({}, {}, m_p, s_s, n_batch=nn_batch_nn)
x_samples11 = zca_dec(zca_mean,zca_winv,x_samples11['x'])
x_samples11 = np.minimum(np.maximum(x_samples11, 0), 1)
image = paramgraphics.mat_to_img(x_samples11, dim_input, colorImg=colorImg)
image.save(logdir+'prior-image'+tail, 'PNG')
else:
_x, _z, _z_confab = model.gen_xz({}, {}, n_batch=nn_batch_nn)
x_samples = f_dec(_z_confab['x'])
x_samples = np.minimum(np.maximum(x_samples, 0), 1)
image = paramgraphics.mat_to_img(x_samples, dim_input, colorImg=colorImg)
image.save(logdir+'samples'+tail, 'PNG')
'''
def infer(data, n_batch=1000):
#print '--', n_batch
size = data['x'].shape[1]
res = np.zeros((sum(n_hidden), size))
res1 = np.zeros((n_z,size))
res2 = np.zeros((n_hidden[-1],size))
res3 = np.zeros((n_z,size))
for i in range(0, size, n_batch):
idx_to = min(size, i+n_batch)
x_batch = ndict.getCols(data, i, idx_to)
# may have bugs
nn_batch = idx_to - i
_x, _z, _z_confab = model.gen_xz(x_batch, {}, nn_batch)
x_samples = _z_confab['x']
for (hi, hidden) in enumerate(_z_confab['hidden']):
res[sum(n_hidden[:hi]):sum(n_hidden[:hi+1]),i:i+nn_batch] = hidden
res1[:,i:i+nn_batch] = _z_confab['mean']
res2[:,i:i+nn_batch] = _z_confab['hidden'][-1]
res3[:,i:i+nn_batch] = _z_confab['logvar']
#
return res, res1, res2, res3
#print n_batch
#if not dataset == 'mnist_binarized':
z_test, z_test1, z_test2, vv_test = infer(x_test)
z_train, z_train1, z_train2, vv_train = infer(x_train)
l_t, px_t, pz_t, qz_t = model.test(x_train, n_samples=1, n_batch=n_batch, byteToFloat=byteToFloat)
print 'Elogpx', px_t, 'Elogpz', pz_t, '-Elogqz', qz_t
#sigma_square = float(os.environ['sigma_square'])
print 'var', np.mean(np.exp(vv_train)), 'q', np.mean(np.abs(z_train1)), 'p', np.mean(np.abs(train_mean_prior)), 'd', np.mean(np.abs(z_train1-train_mean_prior))
with open(logdir+'hook.txt', 'a') as f:
print >>f, 'Elogpx', px_t, 'Elogpz', pz_t, '-Elogqz', qz_t
print >>f, 'var', np.mean(np.exp(vv_train)), 'q', np.mean(np.abs(z_train1)), 'p', np.mean(np.abs(train_mean_prior)), 'd', np.mean(np.abs(z_train1-train_mean_prior))
#if not dataset == 'mnist_binarized':
sio.savemat(logdir+'full_latent.mat', {'z_test': z_test, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train})
sio.savemat(logdir+'mean_latent.mat', {'z_test': z_test1, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train1})
sio.savemat(logdir+'last_latent.mat', {'z_test': z_test2, 'train_y':train_y, 'test_y':test_y, 'z_train': z_train2})
'''
# Optimize
#SFO
dostep = epoch_vae_adam(model, x, n_batch=n_batch, bernoulli_x=bernoulli_x, byteToFloat=byteToFloat)
loop_va(dostep, hook)
pass
# Training loop for variational autoencoder
def loop_va(doEpoch, hook, n_epochs=1201):
t0 = time.time()
for t in xrange(1, n_epochs):
L = doEpoch()
hook(t, time.time() - t0, L)
print 'Optimization loop finished'
# Learning step for variational auto-encoder
def epoch_vae_adam(model, x, n_batch=100, convertImgs=False, bernoulli_x=False, byteToFloat=False):
print 'Variational Auto-Encoder', n_batch
def doEpoch():
from collections import OrderedDict
n_tot = x.itervalues().next().shape[1]
idx_from = 0
L = 0
while idx_from < n_tot:
idx_to = min(n_tot, idx_from+n_batch)
x_minibatch = ndict.getCols(x, idx_from, idx_to)
idx_from += n_batch
if byteToFloat: x_minibatch['x'] = x_minibatch['x'].astype(np.float32)/256.
if bernoulli_x: x_minibatch['x'] = np.random.binomial(n=1, p=x_minibatch['x']).astype(np.float32)
# Do gradient ascent step
L += model.evalAndUpdate(x_minibatch, {}).sum()
#model.profmode.print_summary()
L /= n_tot
return L
return doEpoch
def get_adam_optimizer(learning_rate=0.001, decay1=0.1, decay2=0.001, weight_decay=0.0):
print 'AdaM', learning_rate, decay1, decay2, weight_decay
def shared32(x, name=None, borrow=False):
return theano.shared(np.asarray(x, dtype='float32'), name=name, borrow=borrow)
def get_optimizer(w, g):
updates = OrderedDict()
it = shared32(0.)
updates[it] = it + 1.
fix1 = 1.-(1.-decay1)**(it+1.) # To make estimates unbiased
fix2 = 1.-(1.-decay2)**(it+1.) # To make estimates unbiased
lr_t = learning_rate * T.sqrt(fix2) / fix1
for i in w:
gi = g[i]
if weight_decay > 0:
gi -= weight_decay * w[i] #T.tanh(w[i])
# mean_squared_grad := E[g^2]_{t-1}
mom1 = shared32(w[i].get_value() * 0.)
mom2 = shared32(w[i].get_value() * 0.)
# Update moments
mom1_new = mom1 + decay1 * (gi - mom1)
mom2_new = mom2 + decay2 * (T.sqr(gi) - mom2)
# Compute the effective gradient and effective learning rate
effgrad = mom1_new / (T.sqrt(mom2_new) + 1e-10)
effstep_new = lr_t * effgrad
# Do update
w_new = w[i] + effstep_new
# Apply update
updates[w[i]] = w_new
updates[mom1] = mom1_new
updates[mom2] = mom2_new
return updates
return get_optimizer | zhenxuan00/mmdgm | mlp-mmdgm/gpulearn_z_x.py | Python | mit | 50,776 |
#!/bin/python3
import praw
import smtplib
import requests
import parsel
import re
import io
import json
import os
from email.header import Header
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from argparse import ArgumentParser
from premailer import Premailer
HEADERS = requests.utils.default_headers()
HEADERS.update({'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0'})
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
REDDIT_CSS = os.path.join(SCRIPT_PATH, 'css', 'reddit.css')
def _concat_css(input_name, output):
with open(input_name, encoding='utf-8') as f:
output.write('\n<style>\n')
output.write(f.read())
output.write('\n</style>\n')
def _extract_external_css(selector):
for p in selector.xpath("/html/head/link[@rel='stylesheet']"):
href = re.sub(r"^//", r"https://", p.xpath("@href").extract_first())
sheet = requests.get(href, headers=HEADERS).text if href else ""
yield sheet
def weekly_page(subreddit, file, css=None):
if isinstance(file, str):
with open(file, 'w', encoding='utf-8') as f:
return weekly_page(subreddit, file=f, css=css)
r = requests.get("https://www.reddit.com/r/{}/top/?sort=top&t=week".format(subreddit),
headers=HEADERS)
if r.status_code != 200:
raise RuntimeError("Request status code is {}.".format(r.status_code))
if r.encoding.lower() != 'utf-8':
raise RuntimeError("Request didn't return a UTF-8 output.")
sel = parsel.Selector(text=r.text)
file.write('<!DOCTYPE html>')
file.write('<html>')
if css == 1: # Download External
file.write('<head>')
file.write('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">')
for stylesheet in _extract_external_css(sel):
file.write('\n<style>\n')
file.write(stylesheet)
file.write('\n</style>\n')
file.write('</head>')
elif css == 2: # Keep External
head = sel.xpath("/html/head").extract_first()
head = re.sub(r'="//', '="https://', head)
file.write(head)
elif isinstance(css, str):
file.write('<head>')
file.write('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">')
_concat_css(css, file)
file.write('</head>')
elif isinstance(css, list):
file.write('<head>')
file.write('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">')
for c in css:
_concat_css(c, file)
file.write('</head>')
else:
file.write('<head>')
file.write('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">')
file.write('</head>')
file.write('<body class="">')
file.write('<div class="content" role="main">')
for spacer in sel.xpath("/html/body/div[@class='content']/div[@class='spacer' and style]"):
content = spacer.extract()
content = re.sub(r'="//', r'="https://', content)
file.write(content)
file.write('</div>')
file.write('</body>')
file.write('</html>')
def send_email(subject, to, message):
fromaddr = os.environ['REWE_SENDER']
frompass = os.environ['REWE_PASS']
msg = MIMEMultipart('alternative')
msg['Subject'] = Header(subject, 'utf-8')
msg['From'] = fromaddr
msg['To'] = to
msg.attach(MIMEText('Weekly Subreddit', 'plain'))
msg.attach(MIMEText(message, 'html'))
with smtplib.SMTP(host='smtp.gmail.com', port=587) as server:
server.ehlo()
server.starttls()
server.ehlo()
server.login(fromaddr, frompass)
server.sendmail(fromaddr, [to], msg.as_string())
def user_subreddits(token):
reddit = praw.Reddit(client_id=os.environ['REWE_APP_ID'],
client_secret=os.environ['REWE_APP_SECRET'],
user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0',
refresh_token=token)
return reddit.user.subreddits()
def send_newsletter(token, email):
for subreddit in user_subreddits(token):
subreddit = subreddit.display_name
with io.StringIO() as body:
print("Sending {} weekly for {}...".format(subreddit, email))
weekly_page(subreddit, body, css=REDDIT_CSS)
email_body = Premailer(body.getvalue(),
base_url='https://www.reddit.com',
disable_leftover_css=True).transform()
send_email(subject='Reddit weekly r/{}'.format(subreddit),
to=email, message=email_body)
def main(filepath):
with io.open(filepath, 'r') as file:
users = json.load(file)
for email in users:
token = users[email]
send_newsletter(token, email)
# usage: ./rewe.py -u, --users=<json>
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('-u', '--users', required=True, help='load users and their tokens from a JSON file')
opt = parser.parse_args()
main(opt.users)
| thelostt/reddit-weekly | rewe.py | Python | mit | 5,215 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import ModelForm, TextInput
from django.contrib import admin
from blog.models import Post
class PostAdmin(admin.ModelAdmin):
list_display = ['id', 'title', 'created', 'status']
list_filter = ('status', )
admin.site.register(Post, PostAdmin) | pythonvlc/PyConES-2015 | pycones/blog/admin.py | Python | mit | 336 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test for BitBucket PR 126:
SConf doesn't work well with 'io' module on pre-3.0 Python. This is because
io.StringIO (used by SCons.SConf.Streamer) accepts only unicode strings.
Non-unicode input causes it to raise an exception.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
# SConstruct
#
# The CheckHello should return 'yes' if everything works fine. Otherwise it
# returns 'failed'.
#
def hello(target, source, env):
import traceback
try:
print 'hello!\\n' # this breaks the script
with open(env.subst('$TARGET', target = target),'w') as f:
f.write('yes')
except:
# write to file, as stdout/stderr is broken
traceback.print_exc(file=open('traceback','w'))
return 0
def CheckHello(context):
import sys
context.Display('Checking whether hello works... ')
stat,out = context.TryAction(hello,'','.in')
if stat and out:
context.Result(out)
else:
context.Result('failed')
return out
env = Environment()
cfg = Configure(env)
cfg.AddTest('CheckHello', CheckHello)
cfg.CheckHello()
env = cfg.Finish()
""")
test.run(arguments = '.')
test.must_contain_all_lines(test.stdout(), ['Checking whether hello works... yes'])
test.must_not_exist('traceback')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| andrewyoung1991/scons | test/Configure/Streamer1.py | Python | mit | 2,529 |
import os
import inspect
import vcr
def build_path(function):
return os.path.join(os.path.dirname(inspect.getfile(function)),
'cassettes',
function.__module__.split('.')[1],
function.__name__ + '.yml')
vcr = vcr.config.VCR(
func_path_generator=build_path,
cassette_library_dir='tests/cassettes',
match_on=['uri', 'method'],
decode_compressed_response=True,
record_mode='once'
)
| alfakini/python-mercadobitcoin | tests/__init__.py | Python | mit | 466 |
import os
class Config(object):
SPOTIPY_REDIRECT_URI = os.environ['SPOTIPY_REDIRECT_URI']
SPOTIPY_CLIENT_ID = os.environ['SPOTIPY_CLIENT_ID']
SPOTIPY_CLIENT_SECRET = os.environ['SPOTIPY_CLIENT_SECRET']
SPOTIFY_ACCESS_SCOPE = 'playlist-modify-public playlist-modify-private playlist-read-private user-library-read'
###########
# Options #
###########
# TRACKS_PER_ARTIST #
# Number of tracks per artist to add to the playlist.
# I recommend 5 or less. Max is 10.
TRACKS_PER_ARTIST = 3
# COLLATE #
# By default, the playlist will be ordered like:
# - ARTIST A TRACK 1
# - ARTIST A TRACK 2
# - ARTIST A TRACK 3
# - ARTIST A TRACK 4
# - ARTIST A TRACK 5
# - ARTIST B TRACK 1
# - ARTIST B TRACK 2
# - ARTIST B TRACK 3
# ...
# if COLLATE is set to True, it will instead be ordered like so:
# - ARTIST A TRACK 1
# - ARTIST B TRACK 1
# - ARTIST C TRACK 1
# ...
# - ARTIST Z TRACK 1
# - ARTIST A TRACK 2
# - ARTIST B TRACK 2
# ...
COLLATE = False
# PUBLIC #
# Default False. Set True to make your generated playlist public.
PUBLIC = False
| jzimbel/artist-expander | config.py | Python | mit | 1,176 |
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: testStream.py
# Purpose: tests for stream.py
#
# Authors: Michael Scott Cuthbert
# Christopher Ariza
#
# Copyright: Copyright © 2009-2014 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
import random
import unittest
import copy
from music21.stream import Stream
from music21.stream import Voice
from music21.stream import Measure
from music21.stream import Score
from music21.stream import Part
from music21 import bar
from music21 import chord
from music21 import clef
from music21 import common
from music21 import duration
from music21 import interval
from music21 import key
from music21 import metadata
from music21 import meter
from music21 import note
from music21 import pitch
from music21.musicxml import m21ToXml
from music21.midi import translate as midiTranslate
from music21 import environment
_MOD = "testStream.py"
environLocal = environment.Environment(_MOD)
#-------------------------------------------------------------------------------
class TestExternal(unittest.TestCase):
def runTest(self):
pass
def testLilySimple(self):
a = Stream()
ts = meter.TimeSignature("3/4")
b = Stream()
q = note.Note(type='quarter')
q.octave = 5
b.repeatInsert(q, [0,1,2,3])
bestC = b.bestClef(allowTreble8vb = True)
a.insert(0, bestC)
a.insert(0, ts)
a.insert(0, b)
a.show('lily.png')
def testLilySemiComplex(self):
a = Stream()
ts = meter.TimeSignature("3/8")
b = Stream()
q = note.Note(type='eighth')
dur1 = duration.Duration()
dur1.type = "eighth"
tup1 = duration.Tuplet()
tup1.tupletActual = [5, dur1]
tup1.tupletNormal = [3, dur1]
q.octave = 2
q.duration.appendTuplet(tup1)
for i in range(0,5):
b.append(copy.deepcopy(q))
b.elements[i].accidental = pitch.Accidental(i - 2)
b.elements[0].duration.tuplets[0].type = "start"
b.elements[-1].duration.tuplets[0].type = "stop"
b.elements[2].editorial.comment.text = "a real C"
bestC = b.bestClef(allowTreble8vb = True)
a.insert(0, bestC)
a.insert(0, ts)
a.insert(0, b)
a.show('lily.png')
def testScoreLily(self):
'''
Test the lilypond output of various score operations.
'''
c = note.Note("C4")
d = note.Note("D4")
ts = meter.TimeSignature("2/4")
s1 = Part()
s1.append(copy.deepcopy(c))
s1.append(copy.deepcopy(d))
s2 = Part()
s2.append(copy.deepcopy(d))
s2.append(copy.deepcopy(c))
score1 = Score()
score1.insert(ts)
score1.insert(s1)
score1.insert(s2)
score1.show('lily.png')
def testMXOutput(self):
'''A simple test of adding notes to measures in a stream.
'''
c = Stream()
for dummy in range(4):
b = Measure()
for p in ['a', 'g', 'c#', 'a#']:
a = note.Note(p)
b.append(a)
c.append(b)
c.show()
def testMxMeasures(self):
'''A test of the automatic partitioning of notes in a measure and the creation of ties.
'''
n = note.Note()
n.quarterLength = 3
a = Stream()
a.repeatInsert(n, list(range(0,120,3)))
#a.show() # default time signature used
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter.TimeSignature("10/4") )
a.show()
def testMultipartStreams(self):
'''Test the creation of multi-part streams by simply having streams within streams.
'''
q = Stream()
r = Stream()
for x in ['c3','a3','g#4','d2'] * 10:
n = note.Note(x)
n.quarterLength = .25
q.append(n)
m = note.Note(x)
m.quarterLength = 1.125
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("3/4") )
s.show()
def testMultipartMeasures(self):
'''This demonstrates obtaining slices from a stream and layering
them into individual parts.
OMIT_FROM_DOCS
TODO: this should show instruments
this is presently not showing instruments
probably b/c when appending to s Stream activeSite is set to that stream
'''
from music21 import corpus, converter
a = converter.parse(corpus.getWork(['mozart', 'k155','movement2.xml']))
b = a[8][4:8]
c = a[8][8:12]
d = a[8][12:16]
s = Stream()
s.insert(b)
s.insert(c)
s.insert(d)
s.show()
def testCanons(self):
'''
A test of creating a canon with shifted presentations of a source melody.
This also demonstrates
the addition of rests to parts that start late or end early.
The addition of rests happens with makeRests(), which is called in
musicxml generation of a Stream.
'''
a = ['c', 'g#', 'd-', 'f#', 'e', 'f' ] * 4
s = Stream()
partOffsetShift = 1.25
partOffset = 0
for junk in range(6):
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
s.insert(p)
partOffset += partOffsetShift
s.show()
def testBeamsPartial(self):
'''This demonstrates a partial beam; a beam that is not connected between more than one note.
'''
q = Stream()
for x in [.125, .25, .25, .125, .125, .125] * 30:
n = note.Note('c')
n.quarterLength = x
q.append(n)
s = Stream() # container
s.insert(q)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("4/4") )
s.show()
def testBeamsStream(self):
'''A test of beams applied to different time signatures.
'''
q = Stream()
r = Stream()
p = Stream()
for x in ['c3','a3','c#4','d3'] * 30:
n = note.Note(x)
#n.quarterLength = random.choice([.25, .125, .5])
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
o = note.Note(x)
o.quarterLength = .125
p.append(o)
s = Stream() # container
s.append(q)
s.append(r)
s.append(p)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("4/4") )
self.assertEqual(len(s.flat.notes), 360)
s.show()
def testBeamsMeasure(self):
aMeasure = Measure()
aMeasure.timeSignature = meter.TimeSignature('4/4')
aNote = note.Note()
aNote.quarterLength = .25
aMeasure.repeatAppend(aNote,16)
bMeasure = aMeasure.makeBeams()
bMeasure.show()
#-------------------------------------------------------------------------------
class Test(unittest.TestCase):
def runTest(self):
pass
def testAdd(self):
import music21 # needed to do fully-qualified isinstance name checking
a = Stream()
for dummy in range(5):
a.insert(0, music21.Music21Object())
self.assertTrue(a.isFlat)
a[2] = note.Note("C#")
self.assertTrue(a.isFlat)
a[3] = Stream()
self.assertFalse(a.isFlat)
def testSort(self):
s = Stream()
s.repeatInsert(note.Note("C#"), [0.0, 2.0, 4.0])
s.repeatInsert(note.Note("D-"), [1.0, 3.0, 5.0])
self.assertFalse(s.isSorted)
y = s.sorted
self.assertTrue(y.isSorted)
g = ""
for myElement in y:
g += "%s: %s; " % (myElement.offset, myElement.name)
self.assertEqual(g, '0.0: C#; 1.0: D-; 2.0: C#; 3.0: D-; 4.0: C#; 5.0: D-; ')
def testFlatSimple(self):
s1 = Score()
s1.id = "s1"
p1 = Part()
p1.id = "p1"
p2 = Part()
p2.id = "p2"
n1 = note.Note('C', type='half')
n2 = note.Note('D', type='quarter')
n3 = note.Note('E', type='quarter')
n4 = note.Note('F', type='half')
n1.id = "n1"
n2.id = "n2"
n3.id = "n3"
n4.id = "n4"
p1.append(n1)
p1.append(n2)
p2.append(n3)
p2.append(n4)
p2.offset = 20.0
s1.insert(p1)
s1.insert(p2)
sf1 = s1.flat
sf1.id = "flat s1"
# for site in n4.sites.getSites():
# print site.id,
# print n4.sites.getOffsetBySite(site)
self.assertEqual(len(sf1), 4)
assert(sf1[1] is n2)
def testActiveSiteCopiedStreams(self):
srcStream = Stream()
srcStream.insert(3, note.Note())
# the note's activeSite is srcStream now
self.assertEqual(srcStream[0].activeSite, srcStream)
midStream = Stream()
for x in range(2):
srcNew = copy.deepcopy(srcStream)
# for n in srcNew:
# offset = n.getOffsetBySite(srcStream)
#got = srcNew[0].getOffsetBySite(srcStream)
#for n in srcNew: pass
srcNew.offset = x * 10
midStream.insert(srcNew)
self.assertEqual(srcNew.offset, x * 10)
# no offset is set yet
self.assertEqual(midStream.offset, 0)
# component streams have offsets
self.assertEqual(midStream[0].getOffsetBySite(midStream), 0)
self.assertEqual(midStream[1].getOffsetBySite(midStream), 10.0)
# component notes still have a location set to srcStream
#self.assertEqual(midStream[1][0].getOffsetBySite(srcStream), 3.0)
# component notes still have a location set to midStream[1]
self.assertEqual(midStream[1][0].getOffsetBySite(midStream[1]), 3.0)
# one location in midstream
self.assertEqual(len(midStream.sites), 1)
#environLocal.printDebug(['srcStream', srcStream])
#environLocal.printDebug(['midStream', midStream])
x = midStream.flat
def testSimpleRecurse(self):
st1 = Stream()
st2 = Stream()
n1 = note.Note()
st2.insert(10, n1)
st1.insert(12, st2)
self.assertTrue(st1.flat.sorted[0] is n1)
self.assertEqual(st1.flat.sorted[0].offset, 22.0)
def testStreamRecursion(self):
srcStream = Stream()
for x in range(6):
n = note.Note('G#')
n.duration = duration.Duration('quarter')
n.offset = x * 1
srcStream.insert(n)
self.assertEqual(len(srcStream), 6)
self.assertEqual(len(srcStream.flat), 6)
self.assertEqual(srcStream.flat[1].offset, 1.0)
# self.assertEqual(len(srcStream.getOverlaps()), 0)
midStream = Stream()
for x in range(4):
srcNew = copy.deepcopy(srcStream)
srcNew.offset = x * 10
midStream.insert(srcNew)
self.assertEqual(len(midStream), 4)
#environLocal.printDebug(['pre flat of mid stream'])
self.assertEqual(len(midStream.flat), 24)
# self.assertEqual(len(midStream.getOverlaps()), 0)
mfs = midStream.flat.sorted
self.assertEqual(mfs[7].getOffsetBySite(mfs), 11.0)
farStream = Stream()
for x in range(7):
midNew = copy.deepcopy(midStream)
midNew.offset = x * 100
farStream.insert(midNew)
self.assertEqual(len(farStream), 7)
self.assertEqual(len(farStream.flat), 168)
# self.assertEqual(len(farStream.getOverlaps()), 0)
#
# get just offset times
# elementsSorted returns offset, dur, element
offsets = [a.offset for a in farStream.flat]
# create what we epxect to be the offsets
offsetsMatch = list(range(0, 6))
offsetsMatch += [x + 10 for x in range(0, 6)]
offsetsMatch += [x + 20 for x in range(0, 6)]
offsetsMatch += [x + 30 for x in range(0, 6)]
offsetsMatch += [x + 100 for x in range(0, 6)]
offsetsMatch += [x + 110 for x in range(0, 6)]
self.assertEqual(offsets[:len(offsetsMatch)], offsetsMatch)
def testStreamSortRecursion(self):
farStream = Stream()
for x in range(4):
midStream = Stream()
for y in range(4):
nearStream = Stream()
for z in range(4):
n = note.Note("G#")
n.duration = duration.Duration('quarter')
nearStream.insert(z * 2, n) # 0, 2, 4, 6
midStream.insert(y * 5, nearStream) # 0, 5, 10, 15
farStream.insert(x * 13, midStream) # 0, 13, 26, 39
# get just offset times
# elementsSorted returns offset, dur, element
fsfs = farStream.flat.sorted
offsets = [a.offset for a in fsfs] # safer is a.getOffsetBySite(fsfs)
offsetsBrief = offsets[:20]
self.assertEquals(offsetsBrief, [0, 2, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 15, 16, 17, 17, 18, 19, 19])
def testOverlapsA(self):
a = Stream()
# here, the third item overlaps with the first
for offset, dur in [(0,12), (3,2), (11,3)]:
n = note.Note('G#')
n.duration = duration.Duration()
n.duration.quarterLength = dur
n.offset = offset
a.insert(n)
includeDurationless = True
includeEndBoundary = False
simultaneityMap, overlapMap = a._findLayering(a.flat,
includeDurationless, includeEndBoundary)
self.assertEqual(simultaneityMap, [[], [], []])
self.assertEqual(overlapMap, [[1,2], [0], [0]])
dummy = a._consolidateLayering(a.flat, overlapMap)
# print dummy
#found = a.getOverlaps(includeDurationless, includeEndBoundary)
# there should be one overlap group
#self.assertEqual(len(found.keys()), 1)
# there should be three items in this overlap group
#self.assertEqual(len(found[0]), 3)
a = Stream()
# here, the thir item overlaps with the first
for offset, dur in [(0,1), (1,2), (2,3)]:
n = note.Note('G#')
n.duration = duration.Duration()
n.duration.quarterLength = dur
n.offset = offset
a.insert(n)
includeDurationless = True
includeEndBoundary = True
simultaneityMap, overlapMap = a._findLayering(a.flat,
includeDurationless, includeEndBoundary)
self.assertEqual(simultaneityMap, [[], [], []])
self.assertEqual(overlapMap, [[1], [0,2], [1]])
dummy = a._consolidateLayering(a.flat, overlapMap)
def testOverlapsB(self):
a = Stream()
for x in range(4):
n = note.Note('G#')
n.duration = duration.Duration('quarter')
n.offset = x * 1
a.insert(n)
d = a.getOverlaps(True, False)
# no overlaps
self.assertEqual(len(d), 0)
# including coincident boundaries
d = a.getOverlaps(includeDurationless=True, includeEndBoundary=True)
environLocal.printDebug(['testOverlapsB', d])
# return one dictionary that has a reference to each note that
# is in the same overlap group
self.assertEqual(len(d), 1)
self.assertEqual(len(d[0]), 4)
# a = Stream()
# for x in [0,0,0,0,13,13,13]:
# n = note.Note('G#')
# n.duration = duration.Duration('half')
# n.offset = x
# a.insert(n)
# d = a.getOverlaps()
# len(d[0])
# 4
# len(d[13])
# 3
# a = Stream()
# for x in [0,0,0,0,3,3,3]:
# n = note.Note('G#')
# n.duration = duration.Duration('whole')
# n.offset = x
# a.insert(n)
#
# # default is to not include coincident boundaries
# d = a.getOverlaps()
# len(d[0])
# 7
def testStreamDuration(self):
a = Stream()
q = note.Note(type='quarter')
a.repeatInsert(q, [0,1,2,3])
self.assertEqual(a.highestOffset, 3)
self.assertEqual(a.highestTime, 4)
self.assertEqual(a.duration.quarterLength, 4.0)
newDuration = duration.Duration("half")
self.assertEqual(newDuration.quarterLength, 2.0)
a.duration = newDuration
self.assertEqual(a.duration.quarterLength, 2.0)
self.assertEqual(a.highestTime, 4)
def testMeasureStream(self):
'''An approach to setting TimeSignature measures in offsets and durations
'''
a = meter.TimeSignature('3/4')
b = meter.TimeSignature('5/4')
c = meter.TimeSignature('2/4')
a.duration = duration.Duration()
b.duration = duration.Duration()
c.duration = duration.Duration()
# 20 measures of 3/4
a.duration.quarterLength = 20 * a.barDuration.quarterLength
# 10 measures of 5/4
b.duration.quarterLength = 10 * b.barDuration.quarterLength
# 5 measures of 2/4
c.duration.quarterLength = 5 * c.barDuration.quarterLength
m = Stream()
m.append(a)
m.append(b)
m.append(c)
self.assertEqual(m[1].offset, (20 * a.barDuration.quarterLength))
self.assertEqual(m[2].offset, ((20 * a.barDuration.quarterLength) +
(10 * b.barDuration.quarterLength)))
def testMultipartStream(self):
'''Test the creation of streams with multiple parts. See versions
of this tests in TestExternal for more details
'''
q = Stream()
r = Stream()
for x in ['c3','a3','g#4','d2'] * 10:
n = note.Note(x)
n.quarterLength = .25
q.append(n)
m = note.Note(x)
m.quarterLength = 1
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("3/4") )
self.assertEqual(len(s.flat.notes), 80)
from music21 import corpus, converter
thisWork = corpus.getWork('corelli/opus3no1/1grave')
a = converter.parse(thisWork)
b = a[7][5:10]
environLocal.printDebug(['b', b, b.sites.getSiteIds()])
c = a[7][10:15]
environLocal.printDebug(['c', c, c.sites.getSiteIds()])
d = a[7][15:20]
environLocal.printDebug(['d', d, d.sites.getSiteIds()])
s2 = Stream()
environLocal.printDebug(['s2', s2, id(s2)])
s2.insert(b)
s2.insert(c)
s2.insert(d)
def testActiveSites(self):
'''Test activeSite relationships.
Note that here we see why sometimes qualified class names are needed.
This test passes fine with class names Part and Measure when run interactively,
creating a Test instance. When run from the command line
Part and Measure do not match, and instead music21.stream.Part has to be
employed instead.
'''
import music21.stream # needed to do fully-qualified isinstance name checking
from music21 import corpus
a = corpus.parse('corelli/opus3no1/1grave')
# test basic activeSite relationships
b = a[8]
self.assertEqual(isinstance(b, music21.stream.Part), True)
self.assertEqual(b.activeSite, a)
# this, if called, actively destroys the activeSite relationship!
# on the measures (as new Elements are not created)
#m = b.getElementsByClass('Measure')[5]
#self.assertEqual(isinstance(m, Measure), True)
# this false b/c, when getting the measures, activeSites are lost
#self.assertEqual(m.activeSite, b) #measures activeSite should be part
# NOTE: this is dependent on raw element order, and might change
# due to importing changes
#b.show('t')
self.assertEqual(isinstance(b[15], music21.stream.Measure), True)
self.assertEqual(b[8].activeSite, b) #measures activeSite should be part
# a different test derived from a TestExternal
q = Stream()
r = Stream()
for x in ['c3','a3','c#4','d3'] * 30:
n = note.Note(x)
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
self.assertEqual(q.activeSite, s)
self.assertEqual(r.activeSite, s)
def testActiveSitesMultiple(self):
'''Test an object having multiple activeSites.
'''
a = Stream()
b = Stream()
n = note.Note("G#")
n.offset = 10
a.insert(n)
b.insert(n)
# the objects elements has been transfered to each activeSite
# stream in the same way
self.assertEqual(n.getOffsetBySite(a), n.getOffsetBySite(b))
self.assertEqual(n.getOffsetBySite(a), 10)
def testExtractedNoteAssignLyric(self):
from music21 import converter, corpus, text
a = converter.parse(corpus.getWork('corelli/opus3no1/1grave'))
b = a.parts[1]
c = b.flat
for thisNote in c.getElementsByClass('Note'):
thisNote.lyric = thisNote.name
textStr = text.assembleLyrics(b)
self.assertEqual(textStr.startswith('A A G F E'),
True)
def testGetInstrumentFromMxl(self):
'''Test getting an instrument from an mxl file
'''
from music21 import corpus, converter
# manually set activeSite to associate
a = converter.parse(corpus.getWork(['corelli', 'opus3no1',
'1grave.xml']))
b = a.parts[2]
# by calling the .part property, we create a new stream; thus, the
# activeSite of b is no longer a
# self.assertEqual(b.activeSite, None)
instObj = b.getInstrument()
self.assertEqual(instObj.partName, u'Violone e Organo')
def testGetInstrumentManual(self):
from music21 import defaults
#import pdb; pdb.set_trace()
# search activeSite from a measure within
# a different test derived from a TestExternal
q = Stream()
r = Stream()
for x in ['c3','a3','c#4','d3'] * 15:
n = note.Note(x)
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
instObj = q.getInstrument()
self.assertEqual(instObj.partName, defaults.partName)
instObj = r.getInstrument()
self.assertEqual(instObj.partName, defaults.partName)
instObj = s.getInstrument()
self.assertEqual(instObj.partName, defaults.partName)
# test mx generation of parts
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(q).decode('utf-8')
unused_mx = GEX.parse(r).decode('utf-8')
# test mx generation of score
unused_mx = GEX.parse(s).decode('utf-8')
def testMeasureAndTieCreation(self):
'''A test of the automatic partitioning of notes in a measure and the creation of ties.
'''
n = note.Note()
n.quarterLength = 3
a = Stream()
a.repeatInsert(n, list(range(0,120,3)))
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter.TimeSignature("10/4") )
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(a).decode('utf-8')
def testStreamCopy(self):
'''Test copying a stream
'''
#import pdb; pdb.set_trace()
# search activeSite from a measure within
# a different test derived from a TestExternal
q = Stream()
r = Stream()
for x in ['c3','a3','c#4','d3'] * 30:
n = note.Note(x)
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
# copying the whole: this works
unused_w = copy.deepcopy(s)
post = Stream()
# copying while looping: this gets increasingly slow
for aElement in s:
environLocal.printDebug(['copying and inserting an element',
aElement, len(aElement.sites)])
bElement = copy.deepcopy(aElement)
post.insert(aElement.offset, bElement)
def testIteration(self):
'''This test was designed to illustrate a past problem with stream
Iterations.
'''
q = Stream()
r = Stream()
for x in ['c3','a3','c#4','d3'] * 5:
n = note.Note(x)
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
src = Stream() # container
src.insert(q)
src.insert(r)
a = Stream()
for obj in src.getElementsByClass('Stream'):
a.insert(obj)
environLocal.printDebug(['expected length', len(a)])
counter = 0
for x in a:
if counter >= 4:
environLocal.printDebug(['infinite loop', counter])
break
environLocal.printDebug([x])
junk = x.getInstrument(searchActiveSite=True)
del junk
counter += 1
def testGetTimeSignatures(self):
#getTimeSignatures
n = note.Note()
n.quarterLength = 3
a = Stream()
a.autoSort = False
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter.TimeSignature("10/4") )
offsets = [x.offset for x in a]
self.assertEqual(offsets, [0.0, 10.0, 3.0, 20.0, 40.0])
# fill with notes
a.repeatInsert(n, list(range(0,120,3)))
b = a.getTimeSignatures(sortByCreationTime=False)
self.assertEqual(len(b), 5)
self.assertEqual(b[0].numerator, 5)
self.assertEqual(b[4].numerator, 10)
self.assertEqual(b[4].activeSite, b)
# none of the offsets are being copied
offsets = [x.offset for x in b]
# with autoSort is passed on from elements search
#self.assertEqual(offsets, [0.0, 3.0, 10.0, 20.0, 40.0])
self.assertEqual(offsets, [0.0, 10.0, 3.0, 20.0, 40.0])
def testElements(self):
'''Test basic Elements wrapping non music21 objects
'''
import music21 # needed to do fully-qualified isinstance name checking
a = Stream()
a.insert(50, music21.Music21Object())
self.assertEqual(len(a), 1)
# there are two locations, default and the one just added
self.assertEqual(len(a[0].sites), 2)
# this works
# self.assertEqual(a[0].sites.getOffsetByIndex(-1), 50.0)
# self.assertEqual(a[0].sites.getSiteByIndex(-1), a)
self.assertEqual(a[0].getOffsetBySite(a), 50.0)
self.assertEqual(a[0].offset, 50.0)
def testClefs(self):
s = Stream()
for x in ['c3','a3','c#4','d3'] * 5:
n = note.Note(x)
s.append(n)
clefObj = s.bestClef()
self.assertEqual(clefObj.sign, 'F')
measureStream = s.makeMeasures()
clefObj = measureStream[0].clef
self.assertEqual(clefObj.sign, 'F')
def testFindConsecutiveNotes(self):
s = Stream()
n1 = note.Note("c3")
n1.quarterLength = 1
n2 = chord.Chord(["c4", "e4", "g4"])
n2.quarterLength = 4
s.insert(0, n1)
s.insert(1, n2)
l1 = s.findConsecutiveNotes()
self.assertTrue(l1[0] is n1)
self.assertTrue(l1[1] is n2)
l2 = s.findConsecutiveNotes(skipChords = True)
self.assertTrue(len(l2) == 1)
self.assertTrue(l2[0] is n1)
r1 = note.Rest()
s2 = Stream()
s2.insert([0.0, n1,
1.0, r1,
2.0, n2])
l3 = s2.findConsecutiveNotes()
self.assertTrue(l3[1] is None)
l4 = s2.findConsecutiveNotes(skipRests = True)
self.assertTrue(len(l4) == 2)
s3 = Stream()
s3.insert([0.0, n1,
1.0, r1,
10.0, n2])
l5 = s3.findConsecutiveNotes(skipRests = False)
self.assertTrue(len(l5) == 3) # not 4 because two Nones allowed in a row!
l6 = s3.findConsecutiveNotes(skipRests = True, skipGaps = True)
self.assertTrue(len(l6) == 2)
n1.quarterLength = 10
n3 = note.Note("B-")
s4 = Stream()
s4.insert([0.0, n1,
1.0, n2,
10.0, n3])
l7 = s4.findConsecutiveNotes()
self.assertTrue(len(l7) == 2) # n2 is hidden because it is in an overlap
l8 = s4.findConsecutiveNotes(getOverlaps = True)
self.assertTrue(len(l8) == 3)
self.assertTrue(l8[1] is n2)
l9 = s4.findConsecutiveNotes(getOverlaps = True, skipChords = True)
self.assertTrue(len(l9) == 3)
self.assertTrue(l9[1] is None)
n4 = note.Note("A#")
n1.quarterLength = 1
n2.quarterLength = 1
s5 = Stream()
s5.insert([0.0, n1,
1.0, n2,
2.0, n3,
3.0, n4])
l10 = s5.findConsecutiveNotes()
self.assertTrue(len(l10) == 4)
l11 = s5.findConsecutiveNotes(skipUnisons = True)
self.assertTrue(len(l11) == 3)
self.assertTrue(l11[2] is n3)
n5 = note.Note("c4")
s6 = Stream()
s6.insert([0.0, n1,
1.0, n5,
2.0, n2])
l12 = s6.findConsecutiveNotes(noNone = True)
self.assertTrue(len(l12) == 3)
l13 = s6.findConsecutiveNotes(noNone = True, skipUnisons = True)
self.assertTrue(len(l13) == 3)
l14 = s6.findConsecutiveNotes(noNone = True, skipOctaves = True)
self.assertTrue(len(l14) == 2)
self.assertTrue(l14[0] is n1)
self.assertTrue(l14[1] is n2)
def testMelodicIntervals(self):
c4 = note.Note("C4")
d5 = note.Note("D5")
r1 = note.Rest()
b4 = note.Note("B4")
s1 = Stream()
s1.append([c4, d5, r1, b4])
intS1 = s1.melodicIntervals(skipRests=True)
self.assertTrue(len(intS1) == 2)
M9 = intS1[0]
self.assertEqual(M9.niceName, "Major Ninth")
## TODO: Many more tests
def testStripTiesBuiltA(self):
s1 = Stream()
n1 = note.Note("D#2")
n1.quarterLength = 6
s1.append(n1)
self.assertEqual(len(s1.notes), 1)
s1 = s1.makeMeasures()
s1.makeTies() # makes ties but no end tie positions!
# flat version has 2 notes
self.assertEqual(len(s1.flat.notes), 2)
sUntied = s1.stripTies()
self.assertEqual(len(sUntied.notes), 1)
self.assertEqual(sUntied.notes[0].quarterLength, 6)
n = note.Note()
n.quarterLength = 3
a = Stream()
a.repeatInsert(n, list(range(0,120,3)))
self.assertEqual(len(a), 40)
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter.TimeSignature("10/4") )
b = a.makeMeasures()
b.makeTies()
# we now have 65 notes, as ties have been created
self.assertEqual(len(b.flat.notes), 65)
c = b.stripTies() # gets flat, removes measures
self.assertEqual(len(c.notes), 40)
def testStripTiesImportedA(self):
from music21 import converter
from music21.musicxml import testPrimitive
a = converter.parse(testPrimitive.multiMeasureTies)
p1 = a.parts[0]
self.assertEqual(len(p1.flat.notesAndRests), 16)
p1.stripTies(inPlace=True, retainContainers=True)
self.assertEqual(len(p1.flat.notesAndRests), 6)
p2 = a.parts[1]
self.assertEqual(len(p2.flat.notesAndRests), 16)
p2Stripped = p2.stripTies(inPlace=False, retainContainers=True)
self.assertEqual(len(p2Stripped.flat.notesAndRests), 5)
# original part should not be changed
self.assertEqual(len(p2.flat.notesAndRests), 16)
p3 = a.parts[2]
self.assertEqual(len(p3.flat.notesAndRests), 16)
p3.stripTies(inPlace=True, retainContainers=True)
self.assertEqual(len(p3.flat.notesAndRests), 3)
p4 = a.parts[3]
self.assertEqual(len(p4.flat.notesAndRests), 16)
p4Notes = p4.stripTies(retainContainers=False)
# original should be unchanged
self.assertEqual(len(p4.flat.notesAndRests), 16)
# lesser notes
self.assertEqual(len(p4Notes.notesAndRests), 10)
def testGetElementsByOffsetZeroLength(self):
'''
Testing multiple zero-length elements with mustBeginInSpan:
'''
c = clef.TrebleClef()
ts = meter.TimeSignature('4/4')
ks = key.KeySignature(2)
s = Stream()
s.insert(0.0, c)
s.insert(0.0, ts)
s.insert(0.0, ks)
l1 = len(s.getElementsByOffset(0.0, mustBeginInSpan=True))
l2 = len(s.getElementsByOffset(0.0, mustBeginInSpan=False))
self.assertEqual(l1, 3)
self.assertEqual(l2, 3)
def testStripTiesScore(self):
'''Test stripTies using the Score method
'''
from music21 import corpus, converter
from music21.musicxml import testPrimitive
# This score has 4 parts, each with eight measures, and 2 half-notes
# per measure, equaling 16 half notes, but with differing tie type.
# 1: . .~|~. .~|~.~~.~|~. .~|~. .~|~.~~. | .~~.~|~.~~. ||
# 2: .~~.~|~.~~. | .~~.~|~.~~. | .~~.~|~.~~. | .~~.~|~. . ||
# 3: .~~.~|~. .~|~.~~. | .~~.~|~.~~.~|~.~~.~|~.~~.~|~.~~. ||
# 4: . . | .~~. | . .~|~.~~. | . .~|~. .~|~. .~|~. . ||
s = converter.parse(testPrimitive.multiMeasureTies)
self.assertEqual(len(s.parts), 4)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 16)
# first, in place false
sPost = s.stripTies(inPlace=False)
self.assertEqual(len(sPost.parts[0].flat.notesAndRests), 6)
self.assertEqual(len(sPost.parts[1].flat.notesAndRests), 5)
self.assertEqual(len(sPost.parts[2].flat.notesAndRests), 3)
self.assertEqual(len(sPost.parts[3].flat.notesAndRests), 10)
# make sure original is unchchanged
self.assertEqual(len(s.parts[0].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 16)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 16)
# second, in place true
sPost = s.stripTies(inPlace=True)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 6)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 5)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 3)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 10)
# just two ties here
s = corpus.parse('bach/bwv66.6')
self.assertEqual(len(s.parts), 4)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 37)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 42)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 45)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 41)
# perform strip ties in place
s.stripTies(inPlace=True)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 36)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 42)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 44)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 41)
def testTwoStreamMethods(self):
from music21.note import Note
(n11,n12,n13,n14) = (Note(), Note(), Note(), Note())
(n21,n22,n23,n24) = (Note(), Note(), Note(), Note())
n11.step = "C"
n12.step = "D"
n13.step = "E"
n14.step = "F"
n21.step = "G"
n22.step = "A"
n23.step = "B"
n24.step = "C"
n24.octave = 5
n11.duration.type = "half"
n12.duration.type = "whole"
n13.duration.type = "eighth"
n14.duration.type = "half"
n21.duration.type = "half"
n22.duration.type = "eighth"
n23.duration.type = "whole"
n24.duration.type = "eighth"
stream1 = Stream()
stream1.append([n11,n12,n13,n14])
stream2 = Stream()
stream2.append([n21,n22,n23,n24])
attackedTogether = stream1.simultaneousAttacks(stream2)
self.assertEqual(len(attackedTogether), 3) # nx1, nx2, nx4
thisNote = stream2.getElementsByOffset(attackedTogether[1])[0]
self.assertTrue(thisNote is n22)
playingWhenAttacked = stream1.playingWhenAttacked(n23)
self.assertTrue(playingWhenAttacked is n12)
allPlayingWhileSounding = stream2.allPlayingWhileSounding(n14)
self.assertEqual(len(allPlayingWhileSounding), 1)
self.assertTrue(allPlayingWhileSounding[0] is n24)
# trimPlayingWhileSounding = \
# stream2.trimPlayingWhileSounding(n12)
# assert trimPlayingWhileSounding[0] == n22
# assert trimPlayingWhileSounding[1].duration.quarterLength == 3.5
def testMeasureRange(self):
from music21 import corpus
a = corpus.parse('bach/bwv324.xml')
b = a.parts[3].measures(4,6)
self.assertEqual(len(b.getElementsByClass('Measure')), 3)
#b.show('t')
# first measure now has keu sig
unused_bMeasureFirst = b.getElementsByClass('Measure')[0]
self.assertEqual(len(b.flat.getElementsByClass(
key.KeySignature)), 1)
# first measure now has meter
self.assertEqual(len(b.flat.getElementsByClass(
meter.TimeSignature)), 1)
# first measure now has clef
self.assertEqual(len(b.flat.getElementsByClass(clef.Clef)), 1)
#b.show()
# get first part
p1 = a.parts[0]
# get measure by class; this will not manipulate the measure
mExRaw = p1.getElementsByClass('Measure')[5]
self.assertEqual(str([n for n in mExRaw.notes]), '[<music21.note.Note B>, <music21.note.Note D>]')
self.assertEqual(len(mExRaw.flat), 3)
# get measure by using method; this will add elements
mEx = p1.measure(6)
self.assertEqual(str([n for n in mEx.notes]), '[<music21.note.Note B>, <music21.note.Note D>]')
self.assertEqual(len(mEx.flat), 3)
# make sure source has not chnaged
mExRaw = p1.getElementsByClass('Measure')[5]
self.assertEqual(str([n for n in mExRaw.notes]), '[<music21.note.Note B>, <music21.note.Note D>]')
self.assertEqual(len(mExRaw.flat), 3)
# test measures with no measure numbesr
c = Stream()
for dummy in range(4):
m = Measure()
n = note.Note()
m.repeatAppend(n, 4)
c.append(m)
#c.show()
d = c.measures(2,3)
self.assertEqual(len(d), 2)
#d.show()
# try the score method
a = corpus.parse('bach/bwv324.xml')
b = a.measures(2,4)
self.assertEqual(len(b[0].flat.getElementsByClass(clef.Clef)), 1)
self.assertEqual(len(b[1].flat.getElementsByClass(clef.Clef)), 1)
self.assertEqual(len(b[2].flat.getElementsByClass(clef.Clef)), 1)
self.assertEqual(len(b[3].flat.getElementsByClass(clef.Clef)), 1)
self.assertEqual(len(b[0].flat.getElementsByClass(key.KeySignature)), 1)
self.assertEqual(len(b[1].flat.getElementsByClass(key.KeySignature)), 1)
self.assertEqual(len(b[2].flat.getElementsByClass(key.KeySignature)), 1)
self.assertEqual(len(b[3].flat.getElementsByClass(key.KeySignature)), 1)
#b.show()
def testMeasureOffsetMap(self):
from music21 import corpus
a = corpus.parse('bach/bwv324.xml')
mOffsetMap = a.parts[0].measureOffsetMap()
self.assertEqual(sorted(list(mOffsetMap.keys())),
[0.0, 4.0, 8.0, 12.0, 16.0, 20.0, 24.0, 34.0, 38.0] )
# try on a complete score
a = corpus.parse('bach/bwv324.xml')
mOffsetMap = a.measureOffsetMap()
#environLocal.printDebug([mOffsetMap])
self.assertEqual(sorted(list(mOffsetMap.keys())),
[0.0, 4.0, 8.0, 12.0, 16.0, 20.0, 24.0, 34.0, 38.0] )
for unused_key, value in mOffsetMap.items():
# each key contains 4 measures, one for each part
self.assertEqual(len(value), 4)
# we can get this information from Notes too!
a = corpus.parse('bach/bwv324.xml')
# get notes from one measure
mOffsetMap = a.parts[0].flat.measureOffsetMap(note.Note)
self.assertEqual(sorted(list(mOffsetMap.keys())), [0.0, 4.0, 8.0, 12.0, 16.0, 20.0, 24.0, 34.0, 38.0] )
self.assertEqual(str(mOffsetMap[0.0]), '[<music21.stream.Measure 1 offset=0.0>]')
self.assertEqual(str(mOffsetMap[4.0]), '[<music21.stream.Measure 2 offset=4.0>]')
# TODO: getting inconsistent results with these
# instead of storing a time value for locations, use an index
# count
m1 = a.parts[0].getElementsByClass('Measure')[1]
#m1.show('text')
mOffsetMap = m1.measureOffsetMap(note.Note)
# offset here is that of measure that originally contained this note
#environLocal.printDebug(['m1', m1, 'mOffsetMap', mOffsetMap])
self.assertEqual(sorted(list(mOffsetMap.keys())), [4.0] )
m2 = a.parts[0].getElementsByClass('Measure')[2]
mOffsetMap = m2.measureOffsetMap(note.Note)
# offset here is that of measure that originally contained this note
self.assertEqual(sorted(list(mOffsetMap.keys())), [8.0] )
# this should work but does not yet
# it seems that the flat score does not work as the flat part
# mOffsetMap = a.flat.measureOffsetMap('Note')
# self.assertEqual(sorted(mOffsetMap.keys()), [0.0, 4.0, 8.0, 12.0, 16.0, 20.0, 24.0, 28.0, 32.0] )
def testMeasureOffsetMapPostTie(self):
from music21 import corpus, stream
a = corpus.parse('bach/bwv4.8.xml')
# alto line syncopated/tied notes across bars
#a.show()
alto = a.parts[1]
self.assertEqual(len(alto.flat.notesAndRests), 73)
# offset map for measures looking at the part's Measures
# note that pickup bar is taken into account
post = alto.measureOffsetMap()
self.assertEqual(sorted(list(post.keys())), [0.0, 1.0, 5.0, 9.0, 13.0, 17.0, 21.0, 25.0, 29.0, 33.0, 37.0, 41.0, 45.0, 49.0, 53.0, 57.0, 61.0] )
# looking at Measure and Notes: no problem
post = alto.flat.measureOffsetMap([Measure, note.Note])
self.assertEqual(sorted(list(post.keys())), [0.0, 1.0, 5.0, 9.0, 13.0, 17.0, 21.0, 25.0, 29.0, 33.0, 37.0, 41.0, 45.0, 49.0, 53.0, 57.0, 61.0] )
# after stripping ties, we have a stream with fewer notes
altoPostTie = a.parts[1].stripTies()
# we can get the length of this directly b/c we just of a stream of
# notes, no Measures
self.assertEqual(len(altoPostTie.notesAndRests), 69)
# we can still get measure numbers:
mNo = altoPostTie.notesAndRests[3].getContextByClass(stream.Measure).number
self.assertEqual(mNo, 1)
mNo = altoPostTie.notesAndRests[8].getContextByClass(stream.Measure).number
self.assertEqual(mNo, 2)
mNo = altoPostTie.notesAndRests[15].getContextByClass(stream.Measure).number
self.assertEqual(mNo, 4)
# can we get an offset Measure map by looking for measures
post = altoPostTie.measureOffsetMap(stream.Measure)
# nothing: no Measures:
self.assertEqual(list(post.keys()), [])
# but, we can get an offset Measure map by looking at Notes
post = altoPostTie.measureOffsetMap(note.Note)
# nothing: no Measures:
self.assertEqual(sorted(list(post.keys())), [0.0, 1.0, 5.0, 9.0, 13.0, 17.0, 21.0, 25.0, 29.0, 33.0, 37.0, 41.0, 45.0, 49.0, 53.0, 57.0, 61.0])
#from music21 import graph
#graph.plotStream(altoPostTie, 'scatter', values=['pitchclass','offset'])
def testMusicXMLGenerationViaPropertyA(self):
'''Test output tests above just by calling the musicxml attribute
'''
a = ['c', 'g#', 'd-', 'f#', 'e', 'f' ] * 4
partOffset = 7.5
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
p.transferOffsetToElements()
junk = p.getTimeSignatures(searchContext=True, sortByCreationTime=True)
p.makeRests(refStreamOrTimeRange=[0, 100],
inPlace=True)
self.assertEqual(p.lowestOffset, 0)
self.assertEqual(p.highestTime, 100.0)
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(p).decode('utf-8')
# can only recreate problem in the context of two Streams
s = Stream()
partOffsetShift = 1.25
partOffset = 7.5
for unused_x in range(2):
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
s.insert(p)
partOffset += partOffsetShift
#s.show()
unused_mx = GEX.parse(p).decode('utf-8')
def testMusicXMLGenerationViaPropertyB(self):
'''Test output tests above just by calling the musicxml attribute
'''
n = note.Note()
n.quarterLength = 3
a = Stream()
a.repeatInsert(n, list(range(0,120,3)))
#a.show() # default time signature used
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter.TimeSignature("10/4") )
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(a).decode('utf-8')
def testMusicXMLGenerationViaPropertyC(self):
'''Test output tests above just by calling the musicxml attribute
'''
a = ['c', 'g#', 'd-', 'f#', 'e', 'f' ] * 4
s = Stream()
partOffsetShift = 1.25
partOffset = 0
for unused_part in range(6):
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
s.insert(p)
partOffset += partOffsetShift
#s.show()
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(p).decode('utf-8')
def testContextNestedA(self):
'''Testing getting clefs from higher-level streams
'''
s1 = Stream()
s2 = Stream()
n1 = note.Note()
c1 = clef.AltoClef()
s1.append(n1) # this is the model of a stream with a single part
s2.append(s1)
s2.insert(0, c1)
# from the lower level stream, we should be able to get to the
# higher level clef
post = s1.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True)
# we can also use getClefs to get this from s1 or s2
post = s1.getClefs()[0]
self.assertEqual(isinstance(post, clef.AltoClef), True)
post = s2.getClefs()[0]
self.assertEqual(isinstance(post, clef.AltoClef), True)
#environLocal.printDebug(['sites.get() of s1', s1.sites.get()])
# attempting to move the substream into a new stream
s3 = Stream()
s3.insert(s1) # insert at same offset as s2
# we cannot get the alto clef from s3; this makes sense
post = s3.getClefs()[0]
self.assertEqual(isinstance(post, clef.TrebleClef), True)
# s1 has both streams as sites
self.assertEqual(s1.hasSite(s3), True)
self.assertEqual(s1.hasSite(s2), True)
# but if we search s1, shuold not it find an alto clef?
post = s1.getClefs()
#environLocal.printDebug(['should not be treble clef:', post])
self.assertEqual(isinstance(post[0], clef.AltoClef), True)
# this all works fine
sMeasures = s2.makeMeasures(finalBarline='regular')
self.assertEqual(len(sMeasures), 1)
self.assertEqual(len(sMeasures.getElementsByClass('Measure')), 1) # one measure
self.assertEqual(len(sMeasures[0]), 3)
# first is clef
self.assertEqual(isinstance(sMeasures[0][0], clef.AltoClef), True)
# second is sig
self.assertEqual(str(sMeasures[0][1]), '<music21.meter.TimeSignature 4/4>')
#environLocal.printDebug(['here', sMeasures[0][2]])
#sMeasures.show('t')
# the third element is a Note; we get it from flattening during
# makeMeasures
self.assertEqual(isinstance(sMeasures[0][2], note.Note), True)
# this shows the proper outpt withs the proper clef.
#sMeasures.show()
# we cannot get clefs from sMeasures b/c that is the topmost
# stream container; there are no clefs here, only at a lower leve
post = sMeasures.getElementsByClass(clef.Clef)
self.assertEqual(len(post), 0)
def testContextNestedB(self):
'''Testing getting clefs from higher-level streams
'''
sInner = Stream()
sInner.id = 'innerStream'
n1 = note.Note()
sInner.append(n1) # this is the model of a stream with a single part
sOuter = Stream()
sOuter.id = 'outerStream'
sOuter.append(sInner)
c1 = clef.AltoClef()
sOuter.insert(0, c1)
# this works fine
post = sInner.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True)
# if we flatten sInner, we cannot still get the clef: why?
sInnerFlat = sInner.flat
sInnerFlat.id = 'sInnerFlat'
# # but it has sOuter has a context
# self.assertEqual(sInnerFlat.hasSite(sOuter), True)
# #environLocal.printDebug(['sites.get() of sInnerFlat', sInnerFlat.sites.get()])
# #environLocal.printDebug(['sites.siteDict of sInnerFlat', sInnerFlat.sites.siteDict])
#
#
# self.assertEqual(sInnerFlat.hasSite(sOuter), True)
#
# # this returns the proper dictionary entry
# #environLocal.printDebug(
# # ['sInnerFlat.sites.siteDict[id(sInner)', sInnerFlat.sites.siteDict[id(sOuter)]])
# # we can extract out the same reference
# unused_sOuterOut = sInnerFlat.sites.getById(id(sOuter))
# this works
post = sInnerFlat.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True, "post %r is not an AltoClef" % post)
# 2014 April -- timeSpans version -- not needed...
## this will only work if the callerFirst is manually set to sInnerFlat
## otherwise, this interprets the DefinedContext object as the first
## caller
#post = sInnerFlat.sites.getObjByClass(clef.Clef, callerFirst=sInnerFlat)
#self.assertEqual(isinstance(post, clef.AltoClef), True)
def testContextNestedC(self):
'''Testing getting clefs from higher-level streams
'''
from music21 import sites
s1 = Stream()
s1.id = 's1'
s2 = Stream()
s2.id = 's2'
n1 = note.Note()
c1 = clef.AltoClef()
s1.append(n1) # this is the model of a stream with a single part
s2.append(s1)
s2.insert(0, c1)
# this works fine
post = s1.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True)
# this is a key tool of the serial reverse search
post = s2.getElementAtOrBefore(0, [clef.Clef])
self.assertEqual(isinstance(post, clef.AltoClef), True)
# this is a key tool of the serial reverse search
post = s2.flat.getElementAtOrBefore(0, [clef.Clef])
self.assertEqual(isinstance(post, clef.AltoClef), True)
# s1 is in s2; but s1.flat is not in s2! -- not true if isFlat is true
self.assertEqual(s2.elementOffset(s1), 0.0)
self.assertRaises(sites.SitesException, s2.elementOffset, s1.flat)
# this did not work before; the clef is in s2; its not in a context of s2
post = s2.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True)
# we can find the clef from the flat version of 21
post = s1.flat.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.AltoClef), True)
def testContextNestedD(self):
'''
Testing getting clefs from higher-level streams
'''
n1 = note.Note()
n2 = note.Note()
s1 = Part()
s1.id = 's1'
s2 = Part()
s2.id = 's2'
sOuter = Score()
sOuter.id = 'sOuter'
s1.append(n1)
s2.append(n2)
sOuter.insert(0, s1)
sOuter.insert(0, s2)
self.assertEqual(s1.activeSite, sOuter)
ac = clef.AltoClef()
ac.priority = -1
sOuter.insert(0, ac)
# both output parts have alto clefs
# get clef form higher level stream; only option
self.assertEqual(s1.activeSite, sOuter)
post = s1.getClefs()[0]
self.assertTrue(isinstance(post, clef.AltoClef))
self.assertEqual(s1.activeSite, sOuter)
post = s2.getClefs()[0]
self.assertTrue(isinstance(post, clef.AltoClef))
# now we in sort a clef in s2; s2 will get this clef first
s2.insert(0, clef.TenorClef())
# only second part should have tenor clef
post = s2.getClefs()[0]
self.assertTrue(isinstance(post, clef.TenorClef))
# but stream s1 should get the alto clef still
#print list(s1.contextSites())
post = s1.getContextByClass('Clef')
#print post
self.assertTrue(isinstance(post, clef.AltoClef))
# s2 flat gets the tenor clef; it was inserted in it
post = s2.flat.getClefs()[0]
self.assertTrue(isinstance(post, clef.TenorClef))
# a copy copies the clef; so we still get the same clef
s2FlatCopy = copy.deepcopy(s2.flat)
post = s2FlatCopy.getClefs()[0]
self.assertTrue(isinstance(post, clef.TenorClef))
# s1 flat will get the alto clef; it still has a pathway
post = s1.flat.getClefs()[0]
self.assertTrue(isinstance(post, clef.AltoClef))
# once we create a deepcopy of s1, it is no longer connected to
# its parent if we purge orphans and it is not in sOuter
s1Flat = s1.flat
s1Flat.id = 's1Flat'
s1FlatCopy = copy.deepcopy(s1Flat)
s1FlatCopy.id = 's1FlatCopy'
self.assertEqual(len(s1FlatCopy.getClefs(returnDefault=False)), 1)
post = s1FlatCopy.getClefs(returnDefault=False)[0]
self.assertTrue(isinstance(post, clef.AltoClef), "post %r is not an AltoClef" % post)
post = s1Flat.getClefs()[0]
self.assertTrue(isinstance(post, clef.AltoClef), post)
#environLocal.printDebug(['s1.activeSite', s1.activeSite])
self.assertTrue(sOuter in s1.sites.getSites())
s1Measures = s1.makeMeasures()
#print s1Measures[0].clef
# this used to be True, but I think it's better as False now...
#self.assertTrue(isinstance(s1Measures[0].clef, clef.AltoClef), s1Measures[0].clef)
self.assertTrue(isinstance(s1Measures[0].clef, clef.TrebleClef), s1Measures[0].clef)
s2Measures = s2.makeMeasures()
self.assertTrue(isinstance(s2Measures[0].clef, clef.TenorClef))
# try making a deep copy of s3
s3copy = copy.deepcopy(sOuter)
#s1Measures = s3copy[0].makeMeasures()
# TODO: had to comment out with changes to getElementAtOrBefore
# problem is sort order of found elements at or before
# if two elements of the same class are found at the same offset
# they cannot be distinguished
# perhaps need to return more than one;
# or getElementAtOrBefore needs to return a list
s2Measures = s3copy.getElementsByClass('Stream')[1].makeMeasures()
self.assertEqual(isinstance(s2Measures[0].clef, clef.TenorClef), True)
#s2Measures.show() # this shows the proper clef
#TODO: this still returns tenor clef for both parts
# need to examine
# now we in sert a clef in s2; s2 will get this clef first
s1.insert(0, clef.BassClef())
post = s1.getClefs()[0]
self.assertEqual(isinstance(post, clef.BassClef), True)
#s3.show()
def testMakeRestsA(self):
a = ['c', 'g#', 'd-', 'f#', 'e', 'f' ] * 4
partOffsetShift = 1.25
partOffset = 2 # start at non zero
for unused_part in range(6):
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
self.assertEqual(p.lowestOffset, 0)
p.transferOffsetToElements()
self.assertEqual(p.lowestOffset, partOffset)
p.makeRests()
#environLocal.printDebug(['first element', p[0], p[0].duration])
# by default, initial rest should be made
sub = p.getElementsByClass(note.Rest)
self.assertEqual(len(sub), 1)
self.assertEqual(sub.duration.quarterLength, partOffset)
# first element should have offset of first dur
self.assertEqual(p[1].offset, sub.duration.quarterLength)
partOffset += partOffsetShift
def testMakeRestsB(self):
# test makeRests fillGaps
from music21 import stream
s = stream.Stream()
m1 = stream.Measure()
m1.timeSignature = meter.TimeSignature('4/4')
m1.insert(2, note.Note())
m2 = stream.Measure()
m2.insert(1, note.Note())
self.assertEqual(m2.isSorted, True)
s.insert(0, m1)
s.insert(4, m2)
# must connect Measures to Streams before filling gaps
m1.makeRests(fillGaps=True, timeRangeFromBarDuration=True)
m2.makeRests(fillGaps=True, timeRangeFromBarDuration=True)
self.assertEqual(m2.isSorted, True)
#m2.sort()
match = str([(n.offset, n, n.duration) for n in m2.flat.notesAndRests])
self.assertEqual(match, '[(0.0, <music21.note.Rest rest>, <music21.duration.Duration 1.0>), (1.0, <music21.note.Note C>, <music21.duration.Duration 1.0>), (2.0, <music21.note.Rest rest>, <music21.duration.Duration 2.0>)]')
match = str([(n.offset, n, n.duration) for n in m2.flat])
self.assertEqual(match, '[(0.0, <music21.note.Rest rest>, <music21.duration.Duration 1.0>), (1.0, <music21.note.Note C>, <music21.duration.Duration 1.0>), (2.0, <music21.note.Rest rest>, <music21.duration.Duration 2.0>)]')
#m2.show()
match = str([n for n in s.flat.notesAndRests])
self.assertEqual(match, '[<music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>]')
match = str([(n, n.duration) for n in s.flat.notesAndRests])
self.assertEqual(match, '[(<music21.note.Rest rest>, <music21.duration.Duration 2.0>), (<music21.note.Note C>, <music21.duration.Duration 1.0>), (<music21.note.Rest rest>, <music21.duration.Duration 1.0>), (<music21.note.Rest rest>, <music21.duration.Duration 1.0>), (<music21.note.Note C>, <music21.duration.Duration 1.0>), (<music21.note.Rest rest>, <music21.duration.Duration 2.0>)]')
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s).decode('utf-8')
#s.show('text')
#s.show()
def testMakeMeasuresInPlace(self):
sScr = Stream()
sScr.insert(0, clef.TrebleClef())
sScr.insert(0, meter.TimeSignature('3/4'))
sScr.append(note.Note('C4', quarterLength = 3.0))
sScr.append(note.Note('D4', quarterLength = 3.0))
sScr.makeMeasures(inPlace = True)
self.assertEqual(len(sScr.getElementsByClass('Measure')), 2)
self.assertEqual(sScr.measure(1).notes[0].name, 'C')
self.assertEqual(sScr.measure(2).notes[0].name, 'D')
def testMakeMeasuresMeterStream(self):
'''Testing making measures of various sizes with a supplied single element meter stream. This illustrates an approach to partitioning elements by various sized windows.
'''
from music21 import corpus
sBach = corpus.parse('bach/bwv324.xml')
meterStream = Stream()
meterStream.insert(0, meter.TimeSignature('2/4'))
# need to call make ties to allocate notes
sPartitioned = sBach.flat.makeMeasures(meterStream).makeTies(
inPlace=False)
self.assertEqual(len(sPartitioned.getElementsByClass('Measure')), 21)
meterStream = Stream()
meterStream.insert(0, meter.TimeSignature('1/4'))
# need to call make ties to allocate notes
sPartitioned = sBach.flat.makeMeasures(meterStream).makeTies(
inPlace=False)
self.assertEqual(len(sPartitioned.getElementsByClass('Measure')), 42)
meterStream = Stream()
meterStream.insert(0, meter.TimeSignature('3/4'))
# need to call make ties to allocate notes
sPartitioned = sBach.flat.makeMeasures(meterStream).makeTies(
inPlace=False)
self.assertEqual(len(sPartitioned.getElementsByClass('Measure')), 14)
meterStream = Stream()
meterStream.insert(0, meter.TimeSignature('12/4'))
# need to call make ties to allocate notes
sPartitioned = sBach.flat.makeMeasures(meterStream).makeTies(
inPlace=False)
self.assertEqual(len(sPartitioned.getElementsByClass('Measure')), 4)
meterStream = Stream()
meterStream.insert(0, meter.TimeSignature('48/4'))
# need to call make ties to allocate notes
sPartitioned = sBach.flat.makeMeasures(meterStream).makeTies(
inPlace=False)
self.assertEqual(len(sPartitioned.getElementsByClass('Measure')), 1)
def testMakeMeasuresWithBarlines(self):
'''Test makeMeasures with optional barline parameters.
'''
from music21 import stream
s = stream.Stream()
s.repeatAppend(note.Note(quarterLength=.5), 20)
s.insert(0, meter.TimeSignature('5/8'))
# default is no normal barlines, but a final barline
barred1 = s.makeMeasures()
self.assertEqual(
str(barred1.getElementsByClass('Measure')[-1].rightBarline),
'<music21.bar.Barline style=final>')
#barred1.show()
barred2 = s.makeMeasures(innerBarline='dashed', finalBarline='double')
match = [str(m.rightBarline) for m in
barred2.getElementsByClass('Measure')]
self.assertEqual(match, ['<music21.bar.Barline style=dashed>', '<music21.bar.Barline style=dashed>', '<music21.bar.Barline style=dashed>', '<music21.bar.Barline style=double>'])
#barred2.show()
# try using bar objects
bar1 = bar.Barline('none')
bar2 = bar.Barline('short')
barred3 = s.makeMeasures(innerBarline=bar1, finalBarline=bar2)
#barred3.show()
match = [str(m.rightBarline) for m in
barred3.getElementsByClass('Measure')]
self.assertEqual(match, ['<music21.bar.Barline style=none>', '<music21.bar.Barline style=none>', '<music21.bar.Barline style=none>', '<music21.bar.Barline style=short>'])
# setting to None will not set a barline object at all
barred4 = s.makeMeasures(innerBarline=None, finalBarline=None)
match = [str(m.rightBarline) for m in
barred4.getElementsByClass('Measure')]
self.assertEqual(match, ['None', 'None', 'None', 'None'] )
def testRemove(self):
'''Test removing components from a Stream.
'''
s = Stream()
n1 = note.Note('g')
n2 = note.Note('g#')
n3 = note.Note('a')
s.insert(0, n1)
s.insert(10, n3)
s.insert(5, n2)
self.assertEqual(len(s), 3)
self.assertEqual(n1.activeSite, s)
s.remove(n1)
self.assertEqual(len(s), 2)
# activeSite is Now sent to None
self.assertEqual(n1.activeSite, None)
def testRemoveByClass(self):
from music21 import stream
s = stream.Stream()
s.repeatAppend(clef.BassClef(), 2)
s.repeatAppend(note.Note(), 2)
s.repeatAppend(clef.TrebleClef(), 2)
self.assertEqual(len(s), 6)
s.removeByClass('BassClef')
self.assertEqual(len(s), 4)
self.assertEqual(len(s.notes), 2)
s.removeByClass(clef.Clef)
self.assertEqual(len(s), 2)
self.assertEqual(len(s.notes), 2)
s.removeByClass(['Music21Object'])
self.assertEqual(len(s.notes), 0)
def testReplace(self):
'''Test replacing components from a Stream.
'''
s = Stream()
n1 = note.Note('g')
n2 = note.Note('g#')
n3 = note.Note('a')
n4 = note.Note('c')
s.insert(0, n1)
s.insert(5, n2)
self.assertEqual(len(s), 2)
s.replace(n1, n3)
self.assertEqual([s[0], s[1]], [n3, n2])
s.replace(n2, n4)
self.assertEqual([s[0], s[1]], [n3, n4])
s.replace(n4, n1)
self.assertEqual([s[0], s[1]], [n3, n1])
def testReplaceA1(self):
from music21 import corpus
sBach = corpus.parse('bach/bwv324.xml')
partSoprano = sBach.parts[0]
c1 = partSoprano.flat.getElementsByClass('Clef')[0]
self.assertEqual(isinstance(c1, clef.TrebleClef), True)
# now, replace with a different clef
c2 = clef.AltoClef()
partSoprano.flat.replace(c1, c2, allDerived=True)
# all views of the Stream have been updated
cTest = sBach.parts[0].flat.getElementsByClass('Clef')[0]
self.assertEqual(isinstance(cTest, clef.AltoClef), True)
def testReplaceB(self):
n1 = note.Note('g')
n2 = note.Note('g#')
s0 = Stream()
s1 = copy.deepcopy(s0)
s2 = copy.deepcopy(s1)
s3 = Stream()
s0.insert( 0, n1)
s1.insert(10, n1)
s2.insert(20, n1)
s3.insert(30, n1)
s1.replace(n1, n2, allDerived=True)
# s1 is derived from s0 so n1 is replaced
self.assertIs(s0[0], n2)
self.assertEqual(s0[0].getOffsetBySite(s0), 0)
# s1 was the replacement stream, so definitely n1 becomes n2
self.assertIs(s1[0], n2)
self.assertEqual(s1[0].getOffsetBySite(s1), 10)
# s2 was derived from s0, not vice versa, so n1 is left alone.
self.assertIs(s2[0], n1)
self.assertEqual(s2[0].getOffsetBySite(s2), 20)
# s3 is completely out of any derivation chain, so left alone
self.assertIs(s3[0], n1)
self.assertEqual(s3[0].getOffsetBySite(s3), 30)
def testReplaceDerivated(self):
from music21 import corpus
qj = corpus.parse('ciconia/quod_jactatur').parts[0].measures(1,2)
qj.id = 'measureExcerpt'
qjflat = qj.flat
dc = list(qjflat.derivation.chain())
self.assertIs(dc[0], qj)
k1 = qjflat.getElementsByClass(key.KeySignature)[0]
self.assertEqual(k1.sharps, -1)
k3flats = key.KeySignature(-3)
# put k1 in an unrelated site:
mUnrelated = Measure()
mUnrelated.insert(0, k1)
# here's the big one
qjflat.replace(k1, k3flats, allDerived=True)
kWhich = qjflat.getElementsByClass(key.KeySignature)[0]
self.assertIs(kWhich, k3flats)
self.assertEqual(kWhich.sharps, -3)
kWhich2 = qj.recurse().getElementsByClass(key.KeySignature)[0]
self.assertIs(kWhich2, k3flats)
self.assertEqual(kWhich2.sharps, -3)
# check that unrelated is untouched
self.assertIs(mUnrelated[0], k1)
def testDoubleStreamPlacement(self):
n1 = note.Note()
s1 = Stream()
s1.insert(n1)
#environLocal.printDebug(['n1.siteIds after one insertion', n1, n1.getSites(), n1.sites.getSiteIds()])
s2 = Stream()
s2.insert(s1)
#environLocal.printDebug(['n1.siteIds after container insertion', n1, n1.getSites(), n1.sites.getSiteIds()])
s2Flat = s2.flat
#environLocal.printDebug(['s1', s1, id(s1)])
#environLocal.printDebug(['s2', s2, id(s2)])
#environLocal.printDebug(['s2flat', s2Flat, id(s2Flat)])
#environLocal.printDebug(['n1.siteIds', n1, n1.getSites(), n1.sites.getSiteIds()])
# previously, one of these raised an error
unused_s3 = copy.deepcopy(s2Flat)
s3 = copy.deepcopy(s2.flat)
unused_s3Measures = s3.makeMeasures()
def testBestTimeSignature(self):
'''Get a time signature based on components in a measure.
'''
m = Measure()
for ql in [2,3,2]:
n = note.Note()
n.quarterLength = ql
m.append(n)
ts = m.bestTimeSignature()
self.assertEqual(ts.numerator, 7)
self.assertEqual(ts.denominator, 4)
m = Measure()
for ql in [1.5, 1.5]:
n = note.Note()
n.quarterLength = ql
m.append(n)
ts = m.bestTimeSignature()
self.assertEqual(ts.numerator, 6)
self.assertEqual(ts.denominator, 8)
m = Measure()
for ql in [.25, 1.5]:
n = note.Note()
n.quarterLength = ql
m.append(n)
ts = m.bestTimeSignature()
self.assertEqual(ts.numerator, 7)
self.assertEqual(ts.denominator, 16)
def testGetKeySignatures(self):
'''Searching contexts for key signatures
'''
s = Stream()
ks1 = key.KeySignature(3)
ks2 = key.KeySignature(-3)
s.append(ks1)
s.append(ks2)
post = s.getKeySignatures()
self.assertEqual(post[0], ks1)
self.assertEqual(post[1], ks2)
# try creating a key signature in one of two measures
# try to get last active key signature
ks1 = key.KeySignature(3)
m1 = Measure()
n1 = note.Note()
n1.quarterLength = 4
m1.append(n1)
m1.keySignature = ks1 # assign to measure via property
m2 = Measure()
n2 = note.Note()
n2.quarterLength = 4
m2.append(n2)
s = Stream()
s.append(m1)
s.append(m2)
# can get from measure
post = m1.getKeySignatures()
self.assertEqual(post[0], ks1)
# we can get from the Stream by flattening
post = s.flat.getKeySignatures()
self.assertEqual(post[0], ks1)
# we can get the key signature in m1 from m2
post = m2.getKeySignatures()
self.assertEqual(post[0], ks1)
def testGetKeySignaturesThreeMeasures(self):
'''Searching contexts for key signatures
'''
ks1 = key.KeySignature(3)
ks3 = key.KeySignature(5)
m1 = Measure()
n1 = note.Note()
n1.quarterLength = 4
m1.append(n1)
m1.keySignature = ks1 # assign to measure via property
m2 = Measure()
n2 = note.Note()
n2.quarterLength = 4
m2.append(n2)
m3 = Measure()
n3 = note.Note()
n3.quarterLength = 4
m3.append(n3)
m3.keySignature = ks3 # assign to measure via property
s = Stream()
s.append(m1)
s.append(m2)
s.append(m3)
# can get from measure
post = m1.getKeySignatures()
self.assertEqual(post[0], ks1)
# we can get the key signature in m1 from m2
post = m2.getKeySignatures()
self.assertEqual(post[0], ks1)
# if we search m3, we get the key signature in m3
post = m3.getKeySignatures()
self.assertEqual(post[0], ks3)
def testMakeAccidentalsA(self):
'''Test accidental display setting
'''
s = Stream()
n1 = note.Note('a#')
n2 = note.Note('a4')
r1 = note.Rest()
c1 = chord.Chord(['a#2', 'a4', 'a5'])
n3 = note.Note('a4')
s.append(n1)
s.append(r1)
s.append(n2)
s.append(c1)
s.append(n3)
s.makeAccidentals()
self.assertEqual(n2.pitch.accidental.displayStatus, True)
# both a's in the chord now have naturals but are hidden
self.assertEqual(c1.pitches[1].accidental, None)
#self.assertEqual(c1.pitches[2].accidental.displayStatus, True)
# not getting a natural here because of chord tones
#self.assertEqual(n3.pitch.accidental.displayStatus, True)
#self.assertEqual(n3.pitch.accidental, None)
#s.show()
s = Stream()
n1 = note.Note('a#')
n2 = note.Note('a')
r1 = note.Rest()
c1 = chord.Chord(['a#2', 'a4', 'a5'])
s.append(n1)
s.append(r1)
s.append(n2)
s.append(c1)
s.makeAccidentals(cautionaryPitchClass=False)
# a's in the chord do not have naturals
self.assertEqual(c1.pitches[1].accidental, None)
self.assertEqual(c1.pitches[2].accidental, None)
def testMakeAccidentalsB(self):
from music21 import corpus
s = corpus.parse('monteverdi/madrigal.5.3.rntxt')
m34 = s.parts[0].getElementsByClass('Measure')[33]
c = m34.getElementsByClass('Chord')
# assuming not showing accidental b/c of key
self.assertEqual(str(c[1].pitches), '(<music21.pitch.Pitch B-4>, <music21.pitch.Pitch D5>, <music21.pitch.Pitch F5>)')
# because of key
self.assertEqual(str(c[1].pitches[0].accidental.displayStatus), 'False')
s = corpus.parse('monteverdi/madrigal.5.4.rntxt')
m74 = s.parts[0].getElementsByClass('Measure')[73]
c = m74.getElementsByClass('Chord')
# has correct pitches but natural not showing on C
self.assertEqual(str(c[0].pitches), '(<music21.pitch.Pitch C5>, <music21.pitch.Pitch E5>, <music21.pitch.Pitch G5>)')
self.assertEqual(str(c[0].pitches[0].accidental), 'None')
def testMakeAccidentalsC(self):
from music21 import stream
# this isolates the case where a new measure uses an accidental
# that was used in a past measure
m1 = stream.Measure()
m1.repeatAppend(note.Note('f4'), 2)
m1.repeatAppend(note.Note('f#4'), 2)
m2 = stream.Measure()
m2.repeatAppend(note.Note('f#4'), 4)
ex = stream.Part()
ex.append([m1, m2])
# without applying make accidentals, all sharps are shown
self.assertEqual(len(ex.flat.notes), 8)
self.assertEqual(len(ex.flat.notes[2:]), 6)
#ex.flat.notes[2:].show()
# all sharps, unknown display status (displayStatus == None)
acc = [str(n.pitch.accidental) for n in ex.flat.notes[2:]]
self.assertEqual(acc, ['<accidental sharp>', '<accidental sharp>', '<accidental sharp>', '<accidental sharp>', '<accidental sharp>', '<accidental sharp>'])
display = [n.pitch.accidental.displayStatus for n in ex.flat.notes[2:]]
self.assertEqual(display, [None, None, None, None, None, None])
# call make accidentals
# cautionaryNotImmediateRepeat=True is default
# cautionaryPitchClass=True is default
ex.makeAccidentals(inPlace=True)
display = [n.pitch.accidental.displayStatus for n in ex.flat.notes[2:]]
# need the second true b/c it is the start of a new measure
self.assertEqual(display, [True, False, True, False, False, False])
p = stream.Part()
p.insert(0, meter.TimeSignature('2/4'))
tuplet1 = note.Note("E-4", quarterLength=1.0/3.0)
tuplet2 = note.Note("F#4", quarterLength=2.0/3.0)
p.repeatAppend(tuplet1, 10)
p.repeatAppend(tuplet2, 7)
ex = p.makeNotation()
#ex.show('text')
display = [n.pitch.accidental.displayStatus for n in ex.flat.notes]
self.assertEqual(display, [1,0,0, 0,0,0, 1,0,0, 0, 1, 1, 0, 0, 1, 0, 0])
def testMakeAccidentalsD(self):
from music21 import stream
p1 = stream.Part()
m1 = stream.Measure()
m1.append(meter.TimeSignature('4/4'))
m1.append(note.Note('C#', type='half'))
m1.append(note.Note('C#', type='half'))
m1.rightBarline = 'final'
p1.append(m1)
p1.makeNotation(inPlace=True)
match = [p.accidental.displayStatus for p in p1.pitches]
self.assertEqual(match, [True, False])
m = p1.measure(1)
self.assertEqual(str(m.rightBarline), '<music21.bar.Barline style=final>')
def testMakeAccidentalsWithKeysInMeasures(self):
scale1 = ['c4', 'd4', 'e4', 'f4', 'g4', 'a4', 'b4', 'c5']
scale2 = ['c', 'd', 'e-', 'f', 'g', 'a-', 'b-', 'c5']
scale3 = ['c#', 'd#', 'e#', 'f#', 'g#', 'a#', 'b#', 'c#5']
s = Stream()
for scale in [scale1, scale2, scale3]:
for ks in [key.KeySignature(0), key.KeySignature(2),
key.KeySignature(4), key.KeySignature(7), key.KeySignature(-1),
key.KeySignature(-3)]:
m = Measure()
m.timeSignature = meter.TimeSignature('4/4')
m.keySignature = ks
for p in scale*2:
n = note.Note(p)
n.quarterLength = .25
n.addLyric(n.pitch.name)
m.append(n)
m.makeBeams(inPlace=True)
m.makeAccidentals(inPlace=True)
s.append(m)
# TODO: add tests
#s.show()
def testMakeAccidentalsTies(self):
'''
tests to make sure that Accidental display status is correct after a tie.
'''
from music21 import converter
bm = converter.parse(
"tinynotation: 4/4 c#'2 b-2~ b-8 c#'8~ c#'8 b-8 c#'8 b-8~ b-8~ b-8",
makeNotation=False)
bm.makeNotation(inPlace = True, cautionaryNotImmediateRepeat = False)
allNotes = bm.flat.notes
# 0C# 1B-~ | 2B- 3C#~ 4C# 6B- 7C# 8B-~ 9B-~ 10B-
ds = [True, True, False, True, False, True, False, False, False, False]
for i in range(len(allNotes)):
self.assertEqual(allNotes[i].pitch.accidental.displayStatus,
ds[i],
"%d failed, %s != %s" %
(i, allNotes[i].pitch.accidental.displayStatus, ds[i]))
# add another B-flat just after the tied one...
bm = converter.parse(
"tinynotation: 4/4 c#'2 b-2~ b-8 b-8 c#'8~ c#'8 b-8 c#'8 b-8~ b-8~ b-8",
makeNotation=False)
bm.makeNotation(inPlace = True, cautionaryNotImmediateRepeat = False)
allNotes = bm.flat.notes
# 0C# 1B-~ | 2B- 3B- 4C#~ 5C# 6B- 7C# 8B-~ 9B-~ | 10B-
ds = [True, True, False, True, True, False, False, False, False, False, False]
for i in range(len(allNotes)):
self.assertEqual(allNotes[i].pitch.accidental.displayStatus,
ds[i],
"%d failed, %s != %s" %
(i, allNotes[i].pitch.accidental.displayStatus, ds[i]))
def testMakeAccidentalsOctaveKS(self):
s = Stream()
k = key.KeySignature(-3)
s.append(k)
s.append(note.Note('B-2'))
s.append(note.Note('B-1'))
for n in s.notes:
self.assertEqual(n.pitch.accidental.displayStatus, None)
s.makeAccidentals(inPlace = True)
for n in s.notes:
self.assertEqual(n.pitch.accidental.displayStatus, False)
def testScaleOffsetsBasic(self):
'''
'''
from music21 import stream
def procCompare(s, scalar, match):
oListSrc = [e.offset for e in s]
oListSrc.sort()
sNew = s.scaleOffsets(scalar, inPlace=False)
oListPost = [e.offset for e in sNew]
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
# test equally spaced half notes starting at zero
n = note.Note()
n.quarterLength = 2
s = stream.Stream()
s.repeatAppend(n, 10)
# provide start of resulting values
# half not spacing becomes whole note spacing
procCompare(s, 2, [0.0, 4.0, 8.0])
procCompare(s, 4, [0.0, 8.0, 16.0, 24.0])
procCompare(s, 3, [0.0, 6.0, 12.0, 18.0])
procCompare(s, .5, [0.0, 1.0, 2.0, 3.0])
procCompare(s, .25, [0.0, 0.5, 1.0, 1.5])
# test equally spaced quarter notes start at non-zero
n = note.Note()
n.quarterLength = 1
s = stream.Stream()
s.repeatInsert(n, list(range(100, 110)))
procCompare(s, 1, [100, 101, 102, 103])
procCompare(s, 2, [100, 102, 104, 106])
procCompare(s, 4, [100, 104, 108, 112])
procCompare(s, 1.5, [100, 101.5, 103.0, 104.5])
procCompare(s, .5, [100, 100.5, 101.0, 101.5])
procCompare(s, .25, [100, 100.25, 100.5, 100.75])
# test non equally spaced notes starting at zero
s = stream.Stream()
n1 = note.Note()
n1.quarterLength = 1
s.repeatInsert(n, list(range(0, 30, 3)))
n2 = note.Note()
n2.quarterLength = 2
s.repeatInsert(n, list(range(1, 30, 3)))
# procCompare will sort offsets; this test non sorted operation
procCompare(s, 1, [0.0, 1.0, 3.0, 4.0, 6.0, 7.0])
procCompare(s, .5, [0.0, 0.5, 1.5, 2.0, 3.0, 3.5])
procCompare(s, 2, [0.0, 2.0, 6.0, 8.0, 12.0, 14.0])
# test non equally spaced notes starting at non-zero
s = stream.Stream()
n1 = note.Note()
n1.quarterLength = 1
s.repeatInsert(n, list(range(100, 130, 3)))
n2 = note.Note()
n2.quarterLength = 2
s.repeatInsert(n, list(range(101, 130, 3)))
# procCompare will sort offsets; this test non sorted operation
procCompare(s, 1, [100.0, 101.0, 103.0, 104.0, 106.0, 107.0])
procCompare(s, .5, [100.0, 100.5, 101.5, 102.0, 103.0, 103.5])
procCompare(s, 2, [100.0, 102.0, 106.0, 108.0, 112.0, 114.0])
procCompare(s, 6, [100.0, 106.0, 118.0, 124.0, 136.0, 142.0])
def testScaleOffsetsBasicInPlaceA(self):
'''
'''
from music21 import stream
def procCompare(s, scalar, match):
# test equally spaced half notes starting at zero
n = note.Note()
n.quarterLength = 2
s = stream.Stream()
s.repeatAppend(n, 10)
oListSrc = [e.offset for e in s]
oListSrc.sort()
s.scaleOffsets(scalar, inPlace=True)
oListPost = [e.offset for e in s]
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
s = None # placeholder
# provide start of resulting values
# half not spacing becomes whole note spacing
procCompare(s, 2, [0.0, 4.0, 8.0])
procCompare(s, 4, [0.0, 8.0, 16.0, 24.0])
procCompare(s, 3, [0.0, 6.0, 12.0, 18.0])
procCompare(s, .5, [0.0, 1.0, 2.0, 3.0])
procCompare(s, .25, [0.0, 0.5, 1.0, 1.5])
def testScaleOffsetsBasicInPlaceB(self):
'''
'''
from music21 import stream
def procCompare(s, scalar, match):
# test equally spaced quarter notes start at non-zero
n = note.Note()
n.quarterLength = 1
s = stream.Stream()
s.repeatInsert(n, list(range(100, 110)))
oListSrc = [e.offset for e in s]
oListSrc.sort()
s.scaleOffsets(scalar, inPlace=True)
oListPost = [e.offset for e in s]
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
s = None # placeholder
procCompare(s, 1, [100, 101, 102, 103])
procCompare(s, 2, [100, 102, 104, 106])
procCompare(s, 4, [100, 104, 108, 112])
procCompare(s, 1.5, [100, 101.5, 103.0, 104.5])
procCompare(s, .5, [100, 100.5, 101.0, 101.5])
procCompare(s, .25, [100, 100.25, 100.5, 100.75])
def testScaleOffsetsBasicInPlaceC(self):
'''
'''
from music21 import stream
def procCompare(s, scalar, match):
# test non equally spaced notes starting at zero
s = stream.Stream()
n1 = note.Note()
n1.quarterLength = 1
s.repeatInsert(n1, list(range(0, 30, 3)))
n2 = note.Note()
n2.quarterLength = 2
s.repeatInsert(n2, list(range(1, 30, 3)))
oListSrc = [e.offset for e in s]
oListSrc.sort()
s.scaleOffsets(scalar, inPlace=True)
oListPost = [e.offset for e in s]
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
# procCompare will sort offsets; this test non sorted operation
s = None # placeholder
procCompare(s, 1, [0.0, 1.0, 3.0, 4.0, 6.0, 7.0])
procCompare(s, .5, [0.0, 0.5, 1.5, 2.0, 3.0, 3.5])
procCompare(s, 2, [0.0, 2.0, 6.0, 8.0, 12.0, 14.0])
def testScaleOffsetsBasicInPlaceD(self):
'''
'''
from music21 import stream
def procCompare(s, scalar, match):
# test non equally spaced notes starting at non-zero
s = stream.Stream()
n1 = note.Note()
n1.quarterLength = 1
s.repeatInsert(n1, list(range(100, 130, 3)))
n2 = note.Note()
n2.quarterLength = 2
s.repeatInsert(n2, list(range(101, 130, 3)))
oListSrc = [e.offset for e in s]
oListSrc.sort()
s.scaleOffsets(scalar, inPlace=True)
oListPost = [e.offset for e in s]
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
# procCompare will sort offsets; this test non sorted operation
s = None # placeholder
procCompare(s, 1, [100.0, 101.0, 103.0, 104.0, 106.0, 107.0])
procCompare(s, .5, [100.0, 100.5, 101.5, 102.0, 103.0, 103.5])
procCompare(s, 2, [100.0, 102.0, 106.0, 108.0, 112.0, 114.0])
procCompare(s, 6, [100.0, 106.0, 118.0, 124.0, 136.0, 142.0])
def testScaleOffsetsNested(self):
'''
'''
from music21 import stream
def offsetMap(s): # lists of offsets, with lists of lists
post = []
for e in s:
sub = []
sub.append(e.offset)
#if hasattr(e, 'elements'):
if e.isStream:
sub.append(offsetMap(e))
post.append(sub)
return post
def procCompare(s, scalar, anchorZeroRecurse, match):
oListSrc = offsetMap(s)
oListSrc.sort()
sNew = s.scaleOffsets(scalar, anchorZeroRecurse=anchorZeroRecurse,
inPlace=False)
oListPost = offsetMap(sNew)
oListPost.sort()
#environLocal.printDebug(['scaleOffsets', oListSrc, '\npost scaled by:', scalar, oListPost])
self.assertEqual(oListPost[:len(match)], match)
# test equally spaced half notes starting at zero
n1 = note.Note()
n1.quarterLength = 2
s1 = stream.Stream()
s1.repeatAppend(n1, 4)
n2 = note.Note()
n2.quarterLength = .5
s2 = stream.Stream()
s2.repeatAppend(n2, 4)
s1.append(s2)
# offset map gives us a nested list presentation of all offsets
# usefulfor testing
self.assertEquals(offsetMap(s1),
[[0.0], [2.0], [4.0], [6.0], [8.0, [[0.0], [0.5], [1.0], [1.5]]]])
# provide start of resulting values
# half not spacing becomes whole note spacing
procCompare(s1, 2, 'lowest',
[[0.0], [4.0], [8.0], [12.0], [16.0, [[0.0], [1.0], [2.0], [3.0]]]]
)
procCompare(s1, 4, 'lowest',
[[0.0], [8.0], [16.0], [24.0], [32.0, [[0.0], [2.0], [4.0], [6.0]]]]
)
procCompare(s1, .25, 'lowest',
[[0.0], [0.5], [1.0], [1.5], [2.0, [[0.0], [0.125], [0.25], [0.375]]]]
)
# test unequally spaced notes starting at non-zero
n1 = note.Note()
n1.quarterLength = 1
s1 = stream.Stream()
s1.repeatInsert(n1, [10,14,15,17])
n2 = note.Note()
n2.quarterLength = .5
s2 = stream.Stream()
s2.repeatInsert(n2, [40,40.5,41,41.5])
s1.append(s2)
s1.append(copy.deepcopy(s2))
s1.append(copy.deepcopy(s2))
# note that, with these nested streams,
# the first value of an embeded stream stays in the same
# position relative to that stream.
# it might be necessary, in this case, to scale the start
# time of the first elemen
# that is, it should have no shift
# provide anchorZeroRecurse value
self.assertEquals(offsetMap(s1),
[[10.0], [14.0], [15.0], [17.0],
[18.0, [[40.0], [40.5], [41.0], [41.5]]],
[60.0, [[40.0], [40.5], [41.0], [41.5]]],
[102.0, [[40.0], [40.5], [41.0], [41.5]]]]
)
procCompare(s1, 2, 'lowest',
[[10.0], [18.0], [20.0], [24.0],
[26.0, [[40.0], [41.0], [42.0], [43.0]]],
[110.0, [[40.0], [41.0], [42.0], [43.0]]],
[194.0, [[40.0], [41.0], [42.0], [43.0]]]]
)
# if anchorZeroRecurse is None, embedded stream that do not
# start at zero are scaled proportionally
procCompare(s1, 2, None,
[[10.0], [18.0], [20.0], [24.0],
[26.0, [[80.0], [81.0], [82.0], [83.0]]],
[110.0, [[80.0], [81.0], [82.0], [83.0]]],
[194.0, [[80.0], [81.0], [82.0], [83.0]]]]
)
procCompare(s1, .25, 'lowest',
[[10.0], [11.0], [11.25], [11.75],
[12.0, [[40.0], [40.125], [40.25], [40.375]]],
[22.5, [[40.0], [40.125], [40.25], [40.375]]],
[33.0, [[40.0], [40.125], [40.25], [40.375]]]]
)
# if anchorZeroRecurse is None, embedded stream that do not
# start at zero are scaled proportionally
procCompare(s1, .25, None,
[[10.0], [11.0], [11.25], [11.75],
[12.0, [[10.0], [10.125], [10.25], [10.375]]],
[22.5, [[10.0], [10.125], [10.25], [10.375]]],
[33.0, [[10.0], [10.125], [10.25], [10.375]]]]
)
def testScaleDurationsBasic(self):
'''Scale some durations, independent of offsets.
'''
def procCompare(s, scalar, match):
#oListSrc = [e.quarterLength for e in s]
sNew = s.scaleDurations(scalar, inPlace=False)
oListPost = [e.quarterLength for e in sNew]
self.assertEqual(oListPost[:len(match)], match)
n1 = note.Note()
n1.quarterLength = .5
s1 = Stream()
s1.repeatInsert(n1, list(range(6)))
# test inPlace v/ not inPlace
sNew = s1.scaleDurations(2, inPlace=False)
self.assertEqual([e.duration.quarterLength for e in s1], [0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
self.assertEqual([e.duration.quarterLength for e in sNew], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
# basic test
procCompare(s1, .5, [0.25, 0.25, 0.25])
procCompare(s1, 3, [1.5, 1.5, 1.5])
# a sequence of Durations of different values
s1 = Stream()
for ql in [.5, 1.5, 2, 3, .25, .25, .5]:
n = note.Note('g')
n.quarterLength = ql
s1.append(n)
procCompare(s1, .5, [0.25, 0.75, 1.0, 1.5, 0.125, 0.125, 0.25] )
procCompare(s1, .25, [0.125, 0.375, 0.5, 0.75, 0.0625, 0.0625, 0.125] )
procCompare(s1, 4, [2.0, 6.0, 8, 12, 1.0, 1.0, 2.0])
def testAugmentOrDiminishBasic(self):
def procCompare(s, scalar, matchOffset, matchDuration):
#oListSrc = [e.offset for e in s]
#qlListSrc = [e.quarterLength for e in s]
sNew = s.augmentOrDiminish(scalar, inPlace=False)
oListPost = [e.offset for e in sNew]
qlListPost = [e.quarterLength for e in sNew]
self.assertEqual(oListPost[:len(matchOffset)], matchOffset)
self.assertEqual(qlListPost[:len(matchDuration)], matchDuration)
# test that the last offset is the highest offset
self.assertEqual(matchOffset[-1], sNew.highestOffset)
self.assertEqual(matchOffset[-1]+matchDuration[-1],
sNew.highestTime)
# test making measures on this
unused_post = sNew.makeMeasures()
#sNew.show()
# a sequence of Durations of different values
s1 = Stream()
for ql in [.5, 1.5, 2, 3, .25, .25, .5]:
n = note.Note('g')
n.quarterLength = ql
s1.append(n)
# provide offsets, then durations
procCompare(s1, .5,
[0.0, 0.25, 1.0, 2.0, 3.5, 3.625, 3.75] ,
[0.25, 0.75, 1.0, 1.5, 0.125, 0.125, 0.25] )
procCompare(s1, 1.5,
[0.0, 0.75, 3.0, 6.0, 10.5, 10.875, 11.25] ,
[0.75, 2.25, 3.0, 4.5, 0.375, 0.375, 0.75] )
procCompare(s1, 3,
[0.0, 1.5, 6.0, 12.0, 21.0, 21.75, 22.5] ,
[1.5, 4.5, 6, 9, 0.75, 0.75, 1.5] )
def testAugmentOrDiminishHighestTimes(self):
'''Need to make sure that highest offset and time are properly updated
'''
from music21 import corpus
src = corpus.parse('bach/bwv324.xml')
# get some measures of the soprano; just get the notes
ex = src.parts[0].flat.notesAndRests[0:30]
self.assertEqual(ex.highestOffset, 38.0)
self.assertEqual(ex.highestTime, 42.0)
# try first when doing this not in place
newEx = ex.augmentOrDiminish(2, inPlace=False)
self.assertEqual(newEx.notesAndRests[0].offset, 0.0)
self.assertEqual(newEx.notesAndRests[1].offset, 4.0)
self.assertEqual(newEx.highestOffset, 76.0)
self.assertEqual(newEx.highestTime, 84.0)
# try in place
ex.augmentOrDiminish(2, inPlace=True)
self.assertEqual(ex.notesAndRests[1].getOffsetBySite(ex), 4.0)
self.assertEqual(ex.notesAndRests[1].offset, 4.0)
self.assertEqual(ex.highestOffset, 76.0)
self.assertEqual(ex.highestTime, 84.0)
def testAugmentOrDiminishCorpus(self):
'''Extract phrases from the corpus and use for testing
'''
from music21 import corpus
# first method: iterating through notes
src = corpus.parse('bach/bwv324.xml')
# get some measures of the soprano; just get the notes
#environLocal.printDebug(['testAugmentOrDiminishCorpus()', 'extracting notes:'])
ex = src.parts[0].flat.notesAndRests[0:30]
# attach a couple of transformations
s = Score()
for scalar in [.5, 1.5, 2, .25]:
#n = note.Note()
part = Part()
#environLocal.printDebug(['testAugmentOrDiminishCorpus()', 'pre augment or diminish', 'ex', ex, 'id(ex)', id(ex)])
for n in ex.augmentOrDiminish(scalar, inPlace=False):
part.append(n)
s.insert(0, part)
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s).decode('utf-8')
# second method: getting flattened stream
src = corpus.parse('bach/bwv323.xml')
# get notes from one part
ex = src.parts[0].flat.notesAndRests
s = Score()
for scalar in [1, 2, .5, 1.5]:
part = ex.augmentOrDiminish(scalar, inPlace=False)
s.insert(0, part)
unused_mx = GEX.parse(s).decode('utf-8')
#s.show()
def testMeasureBarDurationProportion(self):
from fractions import Fraction
from music21 import stream
m = stream.Measure()
m.timeSignature = meter.TimeSignature('3/4')
n = note.Note("B--2")
n.quarterLength = 1
m.append(copy.deepcopy(n))
self.assertEqual(m.notes[0].offset, 0)
self.assertEqual(m.barDurationProportion(), Fraction(1, 3), 4)
self.assertEqual(m.barDuration.quarterLength, 3, 4)
# temporarily commented out
# m.shiftElementsAsAnacrusis()
# self.assertEqual(m.notesAndRests[0].hasSite(m), True)
# self.assertEqual(m.notesAndRests[0].offset, 2.0)
# # now the duration is full
# self.assertAlmostEqual(m.barDurationProportion(), 1.0, 4)
# self.assertAlmostEqual(m.highestOffset, 2.0, 4)
m = stream.Measure()
m.timeSignature = meter.TimeSignature('5/4')
n1 = note.Note()
n1.quarterLength = .5
n2 = note.Note()
n2.quarterLength = 1.5
m.append(n1)
m.append(n2)
self.assertEqual(m.barDurationProportion(), Fraction(2, 5), 4)
self.assertEqual(m.barDuration.quarterLength, 5.0)
# m.shiftElementsAsAnacrusis()
# self.assertEqual(m.notesAndRests[0].offset, 3.0)
# self.assertEqual(n1.offset, 3.0)
# self.assertEqual(n2.offset, 3.5)
# self.assertAlmostEqual(m.barDurationProportion(), 1.0, 4)
def testInsertAndShiftBasic(self):
offsets = [0, 2, 4, 6, 8, 10, 12]
n = note.Note()
n.quarterLength = 2
s = Stream()
s.repeatInsert(n, offsets)
# qL, insertOffset, newHighOffset, newHighTime
data = [
(.25, 0, 12.25, 14.25),
(3, 0, 15, 17),
(6.5, 0, 18.5, 20.5),
# shifting at a positing where another element starts
(.25, 4, 12.25, 14.25),
(3, 4, 15, 17),
(6.5, 4, 18.5, 20.5),
# shift the same duration at different insert points
(1, 2, 13, 15),
(2, 2, 14, 16),
# this is overlapping element at 2 by 1, ending at 4
# results in no change in new high values
(1, 3, 12, 14),
# since duration is here 2, extend new starts to 5
(2, 3, 13, 15),
(1, 4, 13, 15),
(2, 4, 14, 16),
# here, we do not shift the element at 4, only event at 6
(2, 4.5, 12.5, 14.5),
# here, we insert the start of an element and can shift it
(2.5, 4, 14.5, 16.5),
]
for qL, insertOffset, newHighOffset, newHighTime in data:
sProc = copy.deepcopy(s)
self.assertEqual(sProc.highestOffset, 12)
self.assertEqual(sProc.highestTime, 14)
nAlter = note.Note()
nAlter.quarterLength = qL
sProc.insertAndShift(insertOffset, nAlter)
self.assertEqual(sProc.highestOffset, newHighOffset)
self.assertEqual(sProc.highestTime, newHighTime)
self.assertEqual(len(sProc), len(s)+1)
# try the same with scrambled elements
sProc = copy.deepcopy(s)
random.shuffle(sProc._elements)
sProc.elementsChanged()
self.assertEqual(sProc.highestOffset, 12)
self.assertEqual(sProc.highestTime, 14)
nAlter = note.Note()
nAlter.quarterLength = qL
sProc.insertAndShift(insertOffset, nAlter)
self.assertEqual(sProc.highestOffset, newHighOffset)
self.assertEqual(sProc.highestTime, newHighTime)
self.assertEqual(len(sProc), len(s)+1)
def testInsertAndShiftNoDuration(self):
offsets = [0, 2, 4, 6, 8, 10, 12]
n = note.Note()
n.quarterLength = 2
s = Stream()
s.repeatInsert(n, offsets)
# qL, insertOffset, newHighOffset, newHighTime
data = [
(0, 12, 14),
(0, 12, 14),
(0, 12, 14),
(4, 12, 14),
(4, 12, 14),
(4, 12, 14),
(2, 12, 14),
(2, 12, 14),
(3, 12, 14),
]
for insertOffset, newHighOffset, newHighTime in data:
sProc = copy.deepcopy(s)
self.assertEqual(sProc.highestOffset, 12)
self.assertEqual(sProc.highestTime, 14)
c = clef.Clef()
sProc.insertAndShift(insertOffset, c)
self.assertEqual(sProc.highestOffset, newHighOffset)
self.assertEqual(sProc.highestTime, newHighTime)
self.assertEqual(len(sProc), len(s)+1)
def testInsertAndShiftMultipleElements(self):
offsets = [0, 2, 4, 6, 8, 10, 12]
n = note.Note()
n.quarterLength = 2
s = Stream()
s.repeatInsert(n, offsets)
# qL, insertOffset, newHighOffset, newHighTime
data = [
(.25, 0, 12.25, 14.25),
(3, 0, 15, 17),
(6.5, 0, 18.5, 20.5),
# shifting at a positing where another element starts
(.25, 4, 12.25, 14.25),
(3, 4, 15, 17),
(6.5, 4, 18.5, 20.5),
# shift the same duration at different insert points
(1, 2, 13, 15),
(2, 2, 14, 16),
# this is overlapping element at 2 by 1, ending at 4
# results in no change in new high values
(1, 3, 12, 14),
# since duration is here 2, extend new starts to 5
(2, 3, 13, 15),
(1, 4, 13, 15),
(2, 4, 14, 16),
# here, we do not shift the element at 4, only event at 6
(2, 4.5, 12.5, 14.5),
# here, we insert the start of an element and can shift it
(2.5, 4, 14.5, 16.5),
]
for qL, insertOffset, newHighOffset, newHighTime in data:
sProc = copy.deepcopy(s)
self.assertEqual(sProc.highestOffset, 12)
self.assertEqual(sProc.highestTime, 14)
# fill with sixteenth notes
nAlter = note.Note()
nAlter.quarterLength = .25
itemList = []
o = insertOffset
while o < insertOffset + qL:
itemList.append(o)
itemList.append(copy.deepcopy(nAlter))
o += .25
#environLocal.printDebug(['itemList', itemList])
sProc.insertAndShift(itemList)
self.assertEqual(sProc.highestOffset, newHighOffset)
self.assertEqual(sProc.highestTime, newHighTime)
self.assertEqual(len(sProc), len(s)+len(itemList) / 2)
def testMetadataOnStream(self):
s = Stream()
n1 = note.Note()
s.append(n1)
s.metadata = metadata.Metadata()
s.metadata.composer = 'Frank the Composer'
s.metadata.title = 'work title' # will get as movement name if not set
#s.metadata.movementName = 'movement name'
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s).decode('utf-8')
#s.show()
def testMeasureBarline(self):
m1 = Measure()
m1.timeSignature = meter.TimeSignature('3/4')
self.assertEqual(len(m1), 1)
b1 = bar.Barline('heavy')
# this adds to elements list
m1.leftBarline = b1
self.assertEqual(len(m1), 2)
self.assertEqual(m1[0], b1) # this is on elements
self.assertEqual(m1.rightBarline, None) # this is on elements
b2 = bar.Barline('heavy')
self.assertEqual(m1.barDuration.quarterLength, 3.0)
m1.rightBarline = b2
# now have barline, ts, and barline
self.assertEqual(len(m1), 3)
b3 = bar.Barline('double')
b4 = bar.Barline('heavy')
m1.leftBarline = b3
# length should be the same, as we replaced
self.assertEqual(len(m1), 3)
self.assertEqual(m1.leftBarline, b3)
m1.rightBarline = b4
self.assertEqual(len(m1), 3)
self.assertEqual(m1.rightBarline, b4)
p = Part()
p.append(copy.deepcopy(m1))
p.append(copy.deepcopy(m1))
#p.show()
# add right barline first, w/o a time signature
m2 = Measure()
self.assertEqual(len(m2), 0)
m2.rightBarline = b4
self.assertEqual(len(m2), 1)
self.assertEqual(m2.leftBarline, None) # this is on elements
self.assertEqual(m2.rightBarline, b4) # this is on elements
def testMeasureLayout(self):
# test both system layout and measure width
# Note: Measure.layoutWidth is not currently read by musicxml
from music21 import layout
s = Stream()
for i in range(1,10):
n = note.Note()
m = Measure()
m.append(n)
m.layoutWidth = i*100
if i % 2 == 0:
sl = layout.SystemLayout(isNew=True)
m.insert(0, sl)
s.append(m)
#s.show()
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s).decode('utf-8')
def testYieldContainers(self):
from music21 import stream
n1 = note.Note()
n1.id = 'n(1a)'
n2 = note.Note()
n2.id = 'n2(2b)'
n3 = note.Note()
n3.id = 'n3(3b)'
n4 = note.Note()
n4.id = 'n4(3b)'
s1 = stream.Stream()
s1.id = '1a'
s1.append(n1)
s2 = stream.Stream()
s2.id = '2a'
s3 = stream.Stream()
s3.id = '2b'
s3.append(n2)
s4 = stream.Stream()
s4.id = '2c'
s5 = stream.Stream()
s5.id = '3a'
s6 = stream.Stream()
s6.id = '3b'
s6.append(n3)
s6.append(n4)
s7 = stream.Stream()
s7.id = '3c'
s8 = stream.Stream()
s8.id = '3d'
s9 = stream.Stream()
s9.id = '3e'
s10 = stream.Stream()
s10.id = '3f'
#environLocal.printDebug(['s1, s2, s3, s4', s1, s2, s3, s4])
s2.append(s5)
s2.append(s6)
s2.append(s7)
s3.append(s8)
s3.append(s9)
s4.append(s10)
s1.append(s2)
s1.append(s3)
s1.append(s4)
#environLocal.printDebug(['downward:'])
match = []
for x in s1.recurse(streamsOnly=True):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['1a', '2a', '3a', '3b', '3c', '2b', '3d', '3e', '2c', '3f'])
#environLocal.printDebug(['downward with elements:'])
match = []
for x in s1.recurse(streamsOnly=False):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['1a', 'n(1a)', '2a', '3a', '3b', 'n3(3b)', 'n4(3b)', '3c', '2b', 'n2(2b)', '3d', '3e', '2c', '3f'])
#environLocal.printDebug(['downward from non-topmost element:'])
match = []
for x in s2.recurse(streamsOnly=False):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
# test downward
self.assertEqual(match, ['2a', '3a', '3b', 'n3(3b)', 'n4(3b)', '3c'])
#environLocal.printDebug(['upward, with skipDuplicates:'])
match = []
# must provide empty list for memo
for x in s7._yieldReverseUpwardsSearch([], streamsOnly=True, skipDuplicates=True):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['3c', '2a', '1a', '2b', '2c', '3a', '3b'] )
#environLocal.printDebug(['upward from a single node, with skipDuplicates'])
match = []
for x in s10._yieldReverseUpwardsSearch([], streamsOnly=True):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['3f', '2c', '1a', '2a', '2b'] )
#environLocal.printDebug(['upward with skipDuplicates=False:'])
match = []
for x in s10._yieldReverseUpwardsSearch([], streamsOnly=True, skipDuplicates=False):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['3f', '2c', '1a', '2a', '1a', '2b', '1a'] )
#environLocal.printDebug(['upward, with skipDuplicates, streamsOnly=False:'])
match = []
# must provide empty list for memo
for x in s8._yieldReverseUpwardsSearch([], streamsOnly=False,
skipDuplicates=True):
match.append(x.id)
environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
self.assertEqual(match, ['3d', 'n2(2b)', '2b', 'n(1a)', '1a', '2a', '2c', '3e'] )
#environLocal.printDebug(['upward, with skipDuplicates, streamsOnly=False:'])
match = []
# must provide empty list for memo
for x in s4._yieldReverseUpwardsSearch([], streamsOnly=False,
skipDuplicates=True):
match.append(x.id)
#environLocal.printDebug([x, x.id, 'activeSite', x.activeSite])
# notice that this does not get the nonConatainers for 2b
self.assertEqual(match, ['2c', 'n(1a)', '1a', '2a', '2b'] )
def testMidiEventsBuilt(self):
def procCompare(mf, match):
triples = []
for i in range(0, len(mf.tracks[0].events), 2):
d = mf.tracks[0].events[i] # delta
e = mf.tracks[0].events[i+1] # events
triples.append((d.time, e.type, e.pitch))
# TODO: temporary removed
#self.assertEqual(triples, match)
s = Stream()
n = note.Note('g#3')
n.quarterLength = .5
s.repeatAppend(n, 6)
#post = s.midiTracks # get a lost
post = midiTranslate.streamHierarchyToMidiTracks(s)
self.assertEqual(len(post[0].events), 30)
# must be an even number
self.assertEqual(len(post[0].events) % 2, 0)
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None), (0, 'PITCH_BEND', None), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (512, 'NOTE_OFF', 56), (0, 'END_OF_TRACK', None)]
procCompare(mf, match)
s = Stream()
n = note.Note('g#3')
n.quarterLength = 1.5
s.repeatAppend(n, 3)
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None), (0, 'PITCH_BEND', None), (0, 'NOTE_ON', 56), (1536, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (1536, 'NOTE_OFF', 56), (0, 'NOTE_ON', 56), (1536, 'NOTE_OFF', 56), (0, 'END_OF_TRACK', None)]
procCompare(mf, match)
# combinations of different pitches and durs
s = Stream()
data = [('c2', .25), ('c#3', .5), ('g#3', 1.5), ('a#2', 1), ('a4', 2)]
for p, d in data:
n = note.Note(p)
n.quarterLength = d
s.append(n)
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None), (0, 'NOTE_ON', 36), (256, 'NOTE_OFF', 36), (0, 'NOTE_ON', 49), (512, 'NOTE_OFF', 49), (0, 'NOTE_ON', 56), (1536, 'NOTE_OFF', 56), (0, 'NOTE_ON', 46), (1024, 'NOTE_OFF', 46), (0, 'NOTE_ON', 69), (2048, 'NOTE_OFF', 69), (0, 'END_OF_TRACK', None)]
procCompare(mf, match)
# rests, basic
#environLocal.printDebug(['rests'])
s = Stream()
data = [('c2', 1), (None, .5), ('c#3', 1), (None, .5), ('a#2', 1), (None, .5), ('a4', 1)]
for p, d in data:
if p == None:
n = note.Rest()
else:
n = note.Note(p)
n.quarterLength = d
s.append(n)
#s.show('midi')
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None),
(0, 'NOTE_ON', 36), (1024, 'NOTE_OFF', 36),
(512, 'NOTE_ON', 49), (1024, 'NOTE_OFF', 49),
(512, 'NOTE_ON', 46), (1024, 'NOTE_OFF', 46),
(512, 'NOTE_ON', 69), (1024, 'NOTE_OFF', 69),
(0, 'END_OF_TRACK', None)]
procCompare(mf, match)
#environLocal.printDebug(['rests, varied sizes'])
s = Stream()
data = [('c2', 1), (None, .25), ('c#3', 1), (None, 1.5), ('a#2', 1), (None, 2), ('a4', 1)]
for p, d in data:
if p == None:
n = note.Rest()
else:
n = note.Note(p)
n.quarterLength = d
s.append(n)
#s.show('midi')
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None),
(0, 'NOTE_ON', 36), (1024, 'NOTE_OFF', 36),
(256, 'NOTE_ON', 49), (1024, 'NOTE_OFF', 49),
(1536, 'NOTE_ON', 46), (1024, 'NOTE_OFF', 46),
(2048, 'NOTE_ON', 69), (1024, 'NOTE_OFF', 69),
(0, 'END_OF_TRACK', None)]
procCompare(mf, match)
#environLocal.printDebug(['rests, multiple in a row'])
s = Stream()
data = [('c2', 1), (None, 1), (None, 1), ('c#3', 1), ('c#3', 1), (None, .5), (None, .5), (None, .5), (None, .5), ('a#2', 1), (None, 2), ('a4', 1)]
for p, d in data:
if p == None:
n = note.Rest()
else:
n = note.Note(p)
n.quarterLength = d
s.append(n)
#s.show('midi')
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None),
(0, 'NOTE_ON', 36), (1024, 'NOTE_OFF', 36),
(2048, 'NOTE_ON', 49), (1024, 'NOTE_OFF', 49),
(0, 'NOTE_ON', 49), (1024, 'NOTE_OFF', 49),
(2048, 'NOTE_ON', 46), (1024, 'NOTE_OFF', 46),
(2048, 'NOTE_ON', 69), (1024, 'NOTE_OFF', 69),
(0, 'END_OF_TRACK', None)]
procCompare(mf, match)
#environLocal.printDebug(['w/ chords'])
s = Stream()
data = [('c2', 1), (None, 1), (['f3', 'a-4', 'c5'], 1), (None, .5), ('a#2', 1), (None, 2), (['d2', 'a4'], .5), (['d-2', 'a#3', 'g#6'], .5), (None, 1), (['f#3', 'a4', 'c#5'], 4)]
for p, d in data:
if p == None:
n = note.Rest()
elif isinstance(p, list):
n = chord.Chord(p)
else:
n = note.Note(p)
n.quarterLength = d
s.append(n)
#s.show('midi')
mf = midiTranslate.streamToMidiFile(s)
match = [(0, 'SEQUENCE_TRACK_NAME', None), (0, 'NOTE_ON', 36), (1024, 'NOTE_OFF', 36), (1024, 'NOTE_ON', 53), (0, 'NOTE_ON', 68), (0, 'NOTE_ON', 72), (1024, 'NOTE_OFF', 53), (0, 'NOTE_OFF', 68), (0, 'NOTE_OFF', 72), (512, 'NOTE_ON', 46), (1024, 'NOTE_OFF', 46), (2048, 'NOTE_ON', 38), (0, 'NOTE_ON', 69), (512, 'NOTE_OFF', 38), (0, 'NOTE_OFF', 69), (0, 'NOTE_ON', 37), (0, 'NOTE_ON', 58), (0, 'NOTE_ON', 92), (512, 'NOTE_OFF', 37), (0, 'NOTE_OFF', 58), (0, 'NOTE_OFF', 92), (1024, 'NOTE_ON', 54), (0, 'NOTE_ON', 69), (0, 'NOTE_ON', 73), (4096, 'NOTE_OFF', 54), (0, 'NOTE_OFF', 69), (0, 'NOTE_OFF', 73), (0, 'END_OF_TRACK', None)]
procCompare(mf, match)
def testMidiEventsImported(self):
from music21 import corpus
def procCompare(mf, match):
triples = []
for i in range(0, len(mf.tracks[0].events), 2):
d = mf.tracks[0].events[i] # delta
e = mf.tracks[0].events[i+1] # events
triples.append((d.time, e.type, e.pitch))
self.assertEqual(triples, match)
s = corpus.parse('bach/bwv66.6')
part = s.parts[0].measures(6,9) # last meausres
#part.show('musicxml')
#part.show('midi')
mf = midiTranslate.streamToMidiFile(part)
match = [(0, 'SEQUENCE_TRACK_NAME', None), (0, 'PROGRAM_CHANGE', None), (0, 'PITCH_BEND', None), (0, 'PROGRAM_CHANGE', None), (0, 'KEY_SIGNATURE', None), (0, 'TIME_SIGNATURE', None), (0, 'NOTE_ON', 69), (1024, 'NOTE_OFF', 69), (0, 'NOTE_ON', 71), (1024, 'NOTE_OFF', 71), (0, 'NOTE_ON', 73), (1024, 'NOTE_OFF', 73), (0, 'NOTE_ON', 69), (1024, 'NOTE_OFF', 69), (0, 'NOTE_ON', 68), (1024, 'NOTE_OFF', 68), (0, 'NOTE_ON', 66), (1024, 'NOTE_OFF', 66), (0, 'NOTE_ON', 68), (2048, 'NOTE_OFF', 68), (0, 'NOTE_ON', 66), (2048, 'NOTE_OFF', 66), (0, 'NOTE_ON', 66), (1024, 'NOTE_OFF', 66), (0, 'NOTE_ON', 66), (2048, 'NOTE_OFF', 66), (0, 'NOTE_ON', 66), (512, 'NOTE_OFF', 66), (0, 'NOTE_ON', 65), (512, 'NOTE_OFF', 65), (0, 'NOTE_ON', 66), (1024, 'NOTE_OFF', 66), (0, 'END_OF_TRACK', None)]
procCompare(mf, match)
def testFindGaps(self):
s = Stream()
n = note.Note()
s.repeatInsert(n, [0, 1.5, 2.5, 4, 8])
post = s.findGaps()
test = [(e.offset, e.offset+e.duration.quarterLength) for e in post]
match = [(1.0, 1.5), (3.5, 4.0), (5.0, 8.0)]
self.assertEqual(test, match)
self.assertEqual(len(s), 5)
s.makeRests(fillGaps=True)
self.assertEqual(len(s), 8)
self.assertEqual(len(s.getElementsByClass(note.Rest)), 3)
def testQuantize(self):
def procCompare(srcOffset, srcDur, dstOffset, dstDur, divList):
s = Stream()
for i in range(len(srcDur)):
n = note.Note()
n.quarterLength = srcDur[i]
s.insert(srcOffset[i], n)
s.quantize(divList, processOffsets=True, processDurations=True, inPlace=True)
targetOffset = [e.offset for e in s]
targetDur = [e.duration.quarterLength for e in s]
self.assertEqual(targetOffset, dstOffset)
self.assertEqual(targetDur, dstDur)
#environLocal.printDebug(['quantization results:', targetOffset, targetDur])
from fractions import Fraction as F
procCompare([0.01, .24, .57, .78], [0.25, 0.25, 0.25, 0.25],
[0.0, .25, .5, .75], [0.25, 0.25, 0.25, 0.25],
[4]) # snap to .25
procCompare([0.01, .24, .52, .78], [0.25, 0.25, 0.25, 0.25],
[0.0, .25, .5, .75], [0.25, 0.25, 0.25, 0.25],
[8]) # snap to .125
procCompare([0.01, .345, .597, 1.02, 1.22],
[0.31, 0.32, 0.33, 0.25, 0.25],
[0.0, F('1/3'), F('2/3'), 1.0, 1.25],
[F('1/3'), F('1/3'), F('1/3'), 0.25, 0.25],
[4, 3]) # snap to .125 and .3333
procCompare([0.01, .345, .687, 0.99, 1.28],
[0.31, 0.32, 0.33, 0.22, 0.21],
[0.0, F('1/3'), F('2/3'), 1.0, 1.25],
[F('1/3'), F('1/3'), F('1/3'), 0.25, 0.25],
[8, 3]) # snap to .125 and .3333
procCompare([0.03, .335, .677, 1.02, 1.28],
[0.32, 0.35, 0.33, 0.22, 0.21],
[0.0, F('1/3'), F('2/3'), 1.0, 1.25],
[F('1/3'), F('1/3'), F('1/3'), 0.25, 0.25],
[8, 6]) # snap to .125 and .1666666
def testAnalyze(self):
from music21 import corpus
s = corpus.parse('bach/bwv66.6')
sub = [s.parts[0], s.parts[1], s.measures(4,5),
s.parts[2].measures(4,5)]
matchAmbitus = [interval.Interval(12),
interval.Interval(15),
interval.Interval(26),
interval.Interval(10)]
for i in range(len(sub)):
sTest = sub[i]
post = sTest.analyze('ambitus')
self.assertEqual(str(post), str(matchAmbitus[i]))
# match values for different analysis strings
for idStr in ['range', 'ambitus', 'span']:
for i in range(len(sub)):
sTest = sub[i]
post = sTest.analyze(idStr)
self.assertEqual(str(post), str(matchAmbitus[i]))
# only match first two values
matchKrumhansl = [(pitch.Pitch('F#'), 'minor'),
(pitch.Pitch('C#'), 'minor'),
(pitch.Pitch('E'), 'major') ,
(pitch.Pitch('E'), 'major') ]
for i in range(len(sub)):
sTest = sub[i]
post = sTest.analyze('KrumhanslSchmuckler')
# returns three values; match 2
self.assertEqual(post.tonic.name, matchKrumhansl[i][0].name)
self.assertEqual(post.mode, matchKrumhansl[i][1])
# match values under different strings provided to analyze
for idStr in ['krumhansl']:
for i in range(len(sub)):
sTest = sub[i]
post = sTest.analyze(idStr)
# returns three values; match 2
self.assertEqual(post.tonic.name, matchKrumhansl[i][0].name)
self.assertEqual(post.mode, matchKrumhansl[i][1])
matchArden = [(pitch.Pitch('F#'), 'minor'),
(pitch.Pitch('C#'), 'minor'),
(pitch.Pitch('F#'), 'minor') ,
(pitch.Pitch('E'), 'major') ]
for idStr in ['arden']:
for i in range(len(sub)):
sTest = sub[i]
post = sTest.analyze(idStr)
# returns three values; match 2
self.assertEqual(post.tonic.name, matchArden[i][0].name)
self.assertEqual(post.mode, matchArden[i][1])
def testMakeTupletBracketsA(self):
'''Creating brackets
'''
from music21.stream import makeNotation
def collectType(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].type)
else:
post.append(None)
return post
def collectBracket(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].bracket)
else:
post.append(None)
return post
# case of incomplete, single tuplet ending the Stream
# remove bracket
s = Stream()
qlList = [1, 2, .5, 1/6.]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, None, None, 'startStop'])
self.assertEqual(collectBracket(s), [None, None, None, False])
#s.show()
def testMakeTupletBracketsB(self):
'''Creating brackets
'''
from music21.stream import makeNotation
def collectType(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].type)
else:
post.append(None)
return post
def collectBracket(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].bracket)
else:
post.append(None)
return post
s = Stream()
qlList = [1, 1/3., 1/3., 1/3., 1, 1]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, 'start', None, 'stop', None, None])
#s.show()
s = Stream()
qlList = [1, 1/6., 1/6., 1/6., 1/6., 1/6., 1/6., 1, 1]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
# this is the correct type settings but this displays by dividing
# into two brackets
self.assertEqual(collectType(s), [None, 'start', None, 'stop', 'start', None, 'stop', None, None] )
#s.show()
# case of tuplet ending the Stream
s = Stream()
qlList = [1, 2, .5, 1/6., 1/6., 1/6., ]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, None, None, 'start', None, 'stop'] )
#s.show()
# case of incomplete, single tuplets in the middle of a Strem
s = Stream()
qlList = [1, 1/3., 1, 1/3., 1, 1/3.]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, 'startStop', None, 'startStop', None, 'startStop'])
self.assertEqual(collectBracket(s), [None, False, None, False, None, False])
#s.show()
# diverse groups that sum to a whole
s = Stream()
qlList = [1, 1/3., 2/3., 2/3., 1/6., 1/6., 1]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, 'start', 'stop','start', None, 'stop', None])
#s.show()
# diverse groups that sum to a whole
s = Stream()
qlList = [1, 1/3., 2/3., 1, 1/6., 1/3., 1/3., 1/6. ]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, 'start', 'stop', None, 'start', 'stop', 'start', 'stop'] )
self.assertEqual(collectBracket(s), [None, True, True, None, True, True, True, True])
#s.show()
# quintuplets
s = Stream()
qlList = [1, 1/5., 1/5., 1/10., 1/10., 1/5., 1/5., 2. ]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
makeNotation.makeTupletBrackets(s, inPlace = True)
self.assertEqual(collectType(s), [None, 'start', None, None, None, None, 'stop', None] )
self.assertEqual(collectBracket(s), [None, True, True, True, True, True, True, None] )
#s.show()
def testMakeNotationA(self):
'''This is a test of many make procedures
'''
def collectTupletType(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].type)
else:
post.append(None)
return post
def collectTupletBracket(s):
post = []
for e in s:
if e.duration.tuplets:
post.append(e.duration.tuplets[0].bracket)
else:
post.append(None)
return post
# s = Stream()
# qlList = [1, 1/3., 1/3., 1/3., 1, 1, 1/3., 1/3., 1/3., 1, 1]
# for ql in qlList:
# n = note.Note()
# n.quarterLength = ql
# s.append(n)
# postMake = s.makeNotation()
# self.assertEqual(collectTupletType(postMake.flat.notesAndRests), [None, 'start', None, 'stop', None, None, 'start', None, 'stop', None, None])
# #s.show()
s = Stream()
qlList = [1/3.,]
for ql in qlList:
n = note.Note()
n.quarterLength = ql
s.append(n)
postMake = s.makeNotation()
self.assertEqual(collectTupletType(postMake.flat.notes), ['startStop'])
self.assertEqual(collectTupletBracket(postMake.flat.notes), [False])
#s.show()
def testMakeNotationB(self):
'''Testing voices making routines within make notation
'''
from music21 import stream
s = stream.Stream()
s.insert(0, note.Note('C4', quarterLength=8))
s.repeatInsert(note.Note('b-4', quarterLength=.5), [x*.5 for x in range(0,16)])
s.repeatInsert(note.Note('f#5', quarterLength=2), [0, 2, 4, 6])
sPost = s.makeNotation()
#sPost.show()
# make sure original is not changed
self.assertEqual(len(s.voices), 0)
self.assertEqual(len(s.notes), 21)
# we have generated measures, beams, and voices
self.assertEqual(len(sPost.getElementsByClass('Measure')), 2)
self.assertEqual(len(sPost.getElementsByClass('Measure')[0].voices), 3)
self.assertEqual(len(sPost.getElementsByClass('Measure')[1].voices), 3)
# check beaming
for m in sPost.getElementsByClass('Measure'):
for n in m.voices[1].notes: # middle voice has beams
self.assertEqual(len(n.beams) > 0, True)
def testMakeNotationC(self):
'''Test creating diverse, overlapping durations and notes
'''
# TODO: the output of this is missing a tie to the last dotted half
from music21 import stream
s = stream.Stream()
for duration in [.5, 1.5, 3]:
for offset in [0, 1.5, 4, 6]:
# create a midi pitch value from duration
s.insert(offset, note.Note(50+(duration*2)+(offset*2),
quarterLength=duration))
#s.show()
sPost = s.makeNotation()
self.assertEqual(len(sPost.getElementsByClass('Measure')), 3)
self.assertEqual(len(sPost.getElementsByClass('Measure')[0].voices), 4)
self.assertEqual(len(sPost.getElementsByClass('Measure')[1].voices), 4)
def testMakeNotationScoreA(self):
'''Test makeNotation on Score objects
'''
from music21 import stream
s = stream.Score()
p1 = stream.Stream()
p2 = stream.Stream()
for p in [p1, p2]:
p.repeatAppend(note.Note(), 12)
s.insert(0, p)
# this is true as the sub-stream contain notes
self.assertEqual(s.hasPartLikeStreams(), True)
self.assertEqual(s.getElementsByClass('Stream')[0].hasMeasures(), False)
self.assertEqual(s.getElementsByClass('Stream')[1].hasMeasures(), False)
post = s.makeNotation(inPlace=False)
self.assertEqual(post.hasPartLikeStreams(), True)
# three measures are made by default
self.assertEqual(len(post.getElementsByClass(
'Stream')[0].getElementsByClass('Measure')), 3)
self.assertEqual(len(post.getElementsByClass(
'Stream')[1].getElementsByClass('Measure')), 3)
self.assertEqual(len(post.flat.getElementsByClass('TimeSignature')), 2)
self.assertEqual(len(post.flat.getElementsByClass('Clef')), 2)
def testMakeNotationScoreB(self):
'''Test makeNotation on Score objects
'''
from music21 import stream
s = stream.Score()
p1 = stream.Stream()
p2 = stream.Stream()
for p in [p1, p2]:
p.repeatAppend(note.Note(), 12)
s.insert(0, p)
# this is true as the sub-stream contain notes
self.assertEqual(s.hasPartLikeStreams(), True)
self.assertEqual(s.getElementsByClass('Stream')[0].hasMeasures(), False)
self.assertEqual(s.getElementsByClass('Stream')[1].hasMeasures(), False)
# supply a meter stream
post = s.makeNotation(inPlace=False, meterStream=stream.Stream(
[meter.TimeSignature('3/4')]))
self.assertEqual(post.hasPartLikeStreams(), True)
# four measures are made due to passed-in time signature
self.assertEqual(len(post.getElementsByClass(
'Stream')[0].getElementsByClass('Measure')), 4)
self.assertEqual(len(post.getElementsByClass(
'Stream')[1].getElementsByClass('Measure')), 4)
self.assertEqual(len(post.flat.getElementsByClass('TimeSignature')), 2)
self.assertEqual(len(post.flat.getElementsByClass('Clef')), 2)
def testMakeNotationScoreC(self):
'''Test makeNotation on Score objects
'''
from music21 import stream
s = stream.Score()
p1 = stream.Stream()
p2 = stream.Stream()
for p in [p1, p2]:
p.repeatAppend(note.Note(), 12)
s.insert(0, p)
# create measures in the first part
s.getElementsByClass('Stream')[0].makeNotation(inPlace=True,
meterStream=stream.Stream([meter.TimeSignature('3/4')]))
self.assertEqual(s.getElementsByClass('Stream')[0].hasMeasures(), True)
self.assertEqual(s.getElementsByClass('Stream')[1].hasMeasures(), False)
post = s.makeNotation(inPlace=False)
self.assertEqual(len(post.getElementsByClass(
'Stream')[0].getElementsByClass('Measure')), 4)
self.assertEqual(len(post.getElementsByClass(
'Stream')[1].getElementsByClass('Measure')), 3)
self.assertEqual(len(post.flat.getElementsByClass('TimeSignature')), 2)
self.assertEqual(len(post.flat.getElementsByClass('Clef')), 2)
def testMakeTies(self):
from music21 import corpus
def collectAccidentalDisplayStatus(s):
post = []
for e in s.flat.notesAndRests:
if e.pitch.accidental != None:
post.append((e.pitch.name, e.pitch.accidental.displayStatus))
else: # mark as not having an accidental
post.append('x')
return post
s = corpus.parse('bach/bwv66.6')
# this has accidentals in measures 2 and 6
sSub = s.parts[3].measures(2,6)
#sSub.show()
# only notes that deviate from key signature are True
self.assertEqual(collectAccidentalDisplayStatus(sSub), ['x', (u'C#', False), 'x', 'x', (u'E#', True), (u'F#', False), 'x', (u'C#', False), (u'F#', False), (u'F#', False), (u'G#', False), (u'F#', False), (u'G#', False), 'x', 'x', 'x', (u'C#', False), (u'F#', False), (u'G#', False), 'x', 'x', 'x', 'x', (u'E#', True), (u'F#', False)] )
# this removes key signature
sSub = sSub.flat.notesAndRests
self.assertEqual(len(sSub), 25)
sSub.insert(0, meter.TimeSignature('3/8'))
sSub.augmentOrDiminish(2, inPlace=True)
# explicitly call make measures and make ties
mStream = sSub.makeMeasures(finalBarline=None)
mStream.makeTies(inPlace=True)
self.assertEqual(len(mStream.flat), 45)
#mStream.show()
# this as expected: the only True accidental display status is those
# that were in the orignal. in Finale display, however, sharps are
# displayed when the should not be.
self.assertEqual(collectAccidentalDisplayStatus(mStream), ['x', (u'C#', False), (u'C#', False), 'x', 'x', 'x', 'x', (u'E#', True), (u'E#', False), (u'F#', False), 'x', (u'C#', False), (u'C#', False), (u'F#', False), (u'F#', False), (u'F#', False), (u'F#', False), (u'G#', False), (u'G#', False), (u'F#', False), (u'G#', False), 'x', 'x', 'x', 'x', (u'C#', False), (u'C#', False), (u'F#', False), (u'F#', False), (u'G#', False), (u'G#', False), 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', (u'E#', True), (u'E#', False), (u'F#', False), (u'F#', False)]
)
# transposing should reset all transposed accidentals
mStream.flat.transpose('p5', inPlace=True)
#mStream.show()
# after transposition all accidentals are reset
# note: last d# is not showing in Finale, but this seems to be a
# finale error, as the musicxml is the same in all D# cases
self.assertEqual(collectAccidentalDisplayStatus(mStream), ['x', ('G#', None), ('G#', None), 'x', 'x', 'x', 'x', ('B#', None), ('B#', None), ('C#', None), ('F#', None), ('G#', None), ('G#', None), ('C#', None), ('C#', None), ('C#', None), ('C#', None), ('D#', None), ('D#', None), ('C#', None), ('D#', None), 'x', 'x', ('F#', None), ('F#', None), ('G#', None), ('G#', None), ('C#', None), ('C#', None), ('D#', None), ('D#', None), 'x', 'x', 'x', 'x', 'x', 'x', ('F#', None), ('F#', None), ('B#', None), ('B#', None), ('C#', None), ('C#', None)]
)
def testMeasuresAndMakeMeasures(self):
from music21 import converter
s = converter.parse('tinynotation: 2/8 g8 e f g e f g a')
sSub = s.measures(3,3)
self.assertEqual(str(sSub.pitches), "[<music21.pitch.Pitch E4>, <music21.pitch.Pitch F4>]")
#sSub.show()
def testSortAndAutoSort(self):
s = Stream()
s.autoSort = False
n1 = note.Note('A')
n2 = note.Note('B')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 has a higher index than n2
self.assertEqual([x.name for x in s], ['B', 'A'])
# try getting sorted
sSorted = s.sorted
# original unchanged
self.assertEqual([x.name for x in s], ['B', 'A'])
# new is chnaged
self.assertEqual([x.name for x in sSorted], ['A', 'B'])
# sort in place
s.sort()
self.assertEqual([x.name for x in s], ['A', 'B'])
# test getElements sorting through .notesAndRests w/ autoSort
s = Stream()
s.autoSort = True
n1 = note.Note('A')
n2 = note.Note('B')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
# if we get .notesAndRests, we are getting elements by class, and thus getting
# sorted version
self.assertEqual([x.name for x in s.notesAndRests], ['A', 'B'])
# test getElements sorting through .notesAndRests w/o autoSort
s = Stream()
s.autoSort = False
n1 = note.Note('a')
n2 = note.Note('b')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
self.assertEqual([x.name for x in s.notesAndRests], ['B', 'A'])
# test __getitem__ calls w/ autoSort
s = Stream()
s.autoSort = False
n1 = note.Note('A')
n2 = note.Note('B')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
self.assertEqual(s[0].name, 'B')
self.assertEqual(s[1].name, 'A')
# test __getitem__ calls w autoSort
s = Stream()
s.autoSort = True
n1 = note.Note('a')
n2 = note.Note('b')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
self.assertEqual(s[0].name, 'A')
self.assertEqual(s[1].name, 'B')
# test .elements calls w/ autoSort
s = Stream()
s.autoSort = False
n1 = note.Note('a')
n2 = note.Note('b')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
self.assertEqual(s.elements[0].name, 'B')
self.assertEqual(s.elements[1].name, 'A')
# test .elements calls w autoSort
s = Stream()
s.autoSort = True
n1 = note.Note('a')
n2 = note.Note('b')
s.insert(100, n2) # add 'b' first
s.insert(0, n1) # now n1 (A) has a higher index than n2 (B)
self.assertEqual(s.elements[0].name, 'A')
self.assertEqual(s.elements[1].name, 'B')
# test possible problematic casses of overlapping parts
# store start time, dur
pairs = [(20, 2), (15, 10), (22,1), (10, 2), (5, 25), (8, 10), (0, 2), (0, 30)]
# with autoSort false
s = Stream()
s.autoSort = False
for o, d in pairs:
n = note.Note()
n.quarterLength = d
s.insert(o, n)
match = []
for n in s.notesAndRests:
match.append((n.offset, n.quarterLength))
self.assertEqual(pairs, match)
# with autoSort True
s = Stream()
s.autoSort = True
for o, d in pairs:
n = note.Note()
n.quarterLength = d
s.insert(o, n)
match = []
for n in s.notesAndRests:
match.append((n.offset, n.quarterLength))
self.assertEqual([(0.0, 2), (0.0, 30), (5.0, 25), (8.0, 10), (10.0, 2), (15.0, 10), (20.0, 2), (22.0, 1.0)], match)
def testMakeChordsBuiltA(self):
from music21 import stream
# test with equal durations
pitchCol = [('A2', 'C2'),
('A#1', 'C-3', 'G5'),
('D3', 'B-1', 'C4', 'D#2')]
# try with different duration assignments; should always get
# the same results
for durCol in [[1, 1, 1], [.5, 2, 3], [.25, .25, .5], [6, 6, 8]]:
s = stream.Stream()
o = 0
for i in range(len(pitchCol)):
ql = durCol[i]
for pStr in pitchCol[i]:
n = note.Note(pStr)
n.quarterLength = ql
s.insert(o, n)
o += ql
self.assertEqual(len(s), 9)
self.assertEqual(len(s.getElementsByClass('Chord')), 0)
# do both in place and not in place, compare results
sMod = s.makeChords(inPlace=False)
s.makeChords(inPlace=True)
for sEval in [s, sMod]:
self.assertEqual(len(sEval.getElementsByClass('Chord')), 3)
# make sure we have all the original pitches
for i in range(len(pitchCol)):
match = [p.nameWithOctave for p in
sEval.getElementsByClass('Chord')[i].pitches]
self.assertEqual(match, list(pitchCol[i]))
# print 'post makeChords'
# s.show('t')
#sMod.show('t')
#s.show()
def testMakeChordsBuiltB(self):
from music21 import stream
n1 = note.Note('c2')
n1.quarterLength = 2
n2 = note.Note('d3')
n2.quarterLength = .5
n3 = note.Note('e4')
n3.quarterLength = 2
n4 = note.Note('f5')
n4.quarterLength = .5
s = stream.Stream()
s.insert(0, n1)
s.insert(1, n2) # overlapping, starting after n1 but finishing before
s.insert(2, n3)
s.insert(3, n4) # overlapping, starting after n3 but finishing before
self.assertEqual([e.offset for e in s], [0.0, 1.0, 2.0, 3.0])
# this results in two chords; n2 and n4 are effectively shifted
# to the start of n1 and n3
sMod = s.makeChords(inPlace=False)
s.makeChords(inPlace=True)
for sEval in [s, sMod]:
self.assertEqual(len(sEval.getElementsByClass('Chord')), 2)
self.assertEqual([c.offset for c in sEval], [0.0, 2.0])
# do the same, but reverse the short/long duration relation
# because the default min window is .25, the first and last
# notes are not gathered into chords
# into a chord
n1 = note.Note('c2')
n1.quarterLength = .5
n2 = note.Note('d3')
n2.quarterLength = 1.5
n3 = note.Note('e4')
n3.quarterLength = .5
n4 = note.Note('f5')
n4.quarterLength = 1.5
s = stream.Stream()
s.insert(0, n1)
s.insert(1, n2) # overlapping, starting after n1 but finishing before
s.insert(2, n3)
s.insert(3, n4) # overlapping, starting after n3 but finishing before
#s.makeRests(fillGaps=True)
# this results in two chords; n2 and n4 are effectively shifted
# to the start of n1 and n3
sMod = s.makeChords(inPlace=False)
#sMod.show()
s.makeChords(inPlace=True)
for sEval in [s, sMod]:
# have three chords, even though 1 only has more than 1 pitch
# might change this?
self.assertEqual(len(sEval.getElementsByClass('Chord')), 3)
self.assertEqual([c.offset for c in sEval], [0.0, 0.5, 1.0, 2.5, 3.0] )
def testMakeChordsBuiltC(self):
# test removal of redundant pitches
from music21 import stream
n1 = note.Note('c2')
n1.quarterLength = .5
n2 = note.Note('c2')
n2.quarterLength = .5
n3 = note.Note('g2')
n3.quarterLength = .5
n4 = note.Note('e4')
n4.quarterLength = .5
n5 = note.Note('e4')
n5.quarterLength = .5
n6 = note.Note('f#4')
n6.quarterLength = .5
s1 = stream.Stream()
s1.insert(0, n1)
s1.insert(0, n2)
s1.insert(0, n3)
s1.insert(.5, n4)
s1.insert(.5, n5)
s1.insert(.5, n6)
sMod = s1.makeChords(inPlace=False, removeRedundantPitches=True)
self.assertEquals([p.nameWithOctave for p in sMod.getElementsByClass('Chord')[0].pitches], ['C2', 'G2'])
self.assertEquals([p.nameWithOctave for p in sMod.getElementsByClass('Chord')[1].pitches], ['E4', 'F#4'])
# without redundant pitch gathering
sMod = s1.makeChords(inPlace=False, removeRedundantPitches=False)
self.assertEquals([p.nameWithOctave for p in sMod.getElementsByClass('Chord')[0].pitches], ['C2', 'C2', 'G2'])
self.assertEquals([p.nameWithOctave for p in sMod.getElementsByClass('Chord')[1].pitches], ['E4', 'E4', 'F#4'] )
def testMakeChordsBuiltD(self):
# attempt to isolate case
from music21 import stream
p1 = stream.Part()
p1.append([note.Note('G4', quarterLength=2),
note.Note('B4', quarterLength=2),
note.Note('C4', quarterLength=4),
note.Rest(quarterLength=1),
note.Note('C4', quarterLength=1),
note.Note('B4', quarterLength=1),
note.Note('A4', quarterLength=1),
])
p2 = stream.Part()
p2.append([note.Note('A3', quarterLength=4),
note.Note('F3', quarterLength=4),])
p3 = stream.Part()
p3.append([note.Rest(quarterLength=8),
note.Rest(quarterLength=4),
])
s = stream.Score()
s.insert([0, p1])
s.insert([0, p2])
s.insert([0, p3])
post = s.flat.makeChords()
#post.show('t')
self.assertEqual(len(post.getElementsByClass('Rest')), 1)
self.assertEqual(len(post.getElementsByClass('Chord')), 5)
#post.show()
def testMakeChordsImported(self):
from music21 import corpus
s = corpus.parse('bach/bwv66.6')
#s.show()
# using in place to get the stored flat version
sMod = s.flat.makeChords(includePostWindow=False)
self.assertEqual(len(sMod.getElementsByClass('Chord')), 35)
#sMod.show()
self.assertEqual(
[len(c.pitches) for c in sMod.getElementsByClass('Chord')],
[3, 4, 4, 3, 4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 4, 3, 4, 3, 4, 3, 4, 4, 4, 4, 3, 4, 4, 2, 4, 3, 4, 4])
# when we include post-window, we get more tones, per chord
# but the same number of chords
sMod = s.flat.makeChords(includePostWindow=True)
self.assertEqual(len(sMod.getElementsByClass('Chord')), 35)
self.assertEqual(
[len(c.pitches) for c in sMod.getElementsByClass('Chord')],
[6, 4, 4, 3, 4, 5, 5, 4, 4, 4, 4, 5, 4, 4, 5, 5, 5, 4, 5, 5, 3, 4, 3, 4, 4, 4, 7, 5, 4, 6, 2, 6, 4, 5, 4] )
#sMod.show()
def testGetElementAtOrBeforeBarline(self):
'''
problems with getting elements at or before
when triplets are involved...
'''
from music21 import converter
import os
bugtestFile = os.path.join(common.getSourceFilePath(), 'stream', 'tripletOffsetBugtest.xml')
s = converter.parse(bugtestFile)
p = s.parts[0]
m = p.getElementAtOrBefore(2)
self.assertEqual(m.number, 2)
def testElementsHighestTimeA(self):
'''Test adding elements at the highest time position
'''
n1 = note.Note()
n1.quarterLength = 30
n2 = note.Note()
n2.quarterLength = 20
b1 = bar.Barline()
s = Stream()
s.append(n1)
self.assertEqual(s.highestTime, 30)
self.assertEqual(len(s), 1)
self.assertEqual(s[0], n1)
self.assertEqual(s.index(n1), 0)
self.assertEqual(s[0].activeSite, s)
# insert bar in highest time position
s.storeAtEnd(b1)
self.assertEqual(len(s), 2)
self.assertEqual(s[1], b1)
self.assertEqual(s.index(b1), 1)
self.assertEqual(s[1].activeSite, s)
# offset of b1 is at the highest time
self.assertEqual([e.offset for e in s], [0.0, 30.0])
s.append(n2)
self.assertEqual(len(s), 3)
self.assertEqual(s[1], n2)
self.assertEqual(s.index(n2), 1)
self.assertEqual(s[2], b1)
self.assertEqual(s.index(b1), 2)
self.assertEqual(s.highestTime, 50)
# there are now three elements, and the third is the bar
self.assertEqual([e.offset for e in s], [0.0, 30, 50.0])
# get offset by elements
self.assertEqual(s.elementOffset(n1), 0.0)
self.assertEqual(s.elementOffset(b1), 50)
# get elements by offset
found1 = s.getElementsByOffset(0, 40)
self.assertEqual(len(found1.notesAndRests), 2)
# check within the maximum range
found2 = s.getElementsByOffset(40, 60)
self.assertEqual(len(found2.notesAndRests), 0)
# found the barline
self.assertEqual(found2[0], b1)
# should get the barline
self.assertEqual(s.getElementAtOrBefore(50), b1)
self.assertEqual(s.getElementAtOrBefore(49), n2)
# can get element after element
self.assertEqual(s.getElementAfterElement(n1), n2)
self.assertEqual(s.getElementAfterElement(n2), b1)
# try to get elements by class
sub1 = s.getElementsByClass('Barline')
self.assertEqual(len(sub1), 1)
# only found item is barline
self.assertEqual(sub1[0], b1)
self.assertEqual([e.offset for e in sub1], [0.0])
# if we append a new element, the old barline should report
# an offset at the last element
n3 = note.Note()
n3.quarterLength = 10
sub1.append(n3) # places this before barline
self.assertEqual(sub1[sub1.index(b1)].offset, 10.0)
self.assertEqual([e.offset for e in sub1], [0.0, 10.0])
# try to get elements not of class; only have notes
sub2 = s.getElementsNotOfClass(bar.Barline)
self.assertEqual(len(sub2), 2)
self.assertEqual(len(sub2.notesAndRests), 2)
sub3 = s.getElementsNotOfClass(note.Note)
self.assertEqual(len(sub3), 1)
self.assertEqual(len(sub3.notesAndRests), 0)
# make a copy:
sCopy = copy.deepcopy(s)
self.assertEqual([e.offset for e in sCopy], [0.0, 30, 50.0])
# not equal b/c a deepcopy was made
self.assertEqual(id(sCopy[2]) == id(b1), False)
# can still match class
self.assertEqual(isinstance(sCopy[2], bar.Barline), True)
# create another barline and try to replace
b2 = bar.Barline()
s.replace(b1, b2)
self.assertEqual(id(s[2]), id(b2))
# try to remove elements; the second index is the barline
self.assertEqual(s.pop(2), b2)
self.assertEqual(len(s), 2)
self.assertEqual([e.offset for e in s], [0.0, 30])
# add back again.
s.storeAtEnd(b1)
self.assertEqual([e.offset for e in s], [0.0, 30, 50.0])
# try to remove intermediary elements
self.assertEqual(s.pop(1), n2)
# offset of highest time element has shifted
self.assertEqual([e.offset for e in s], [0.0, 30.0])
# index is now 1
self.assertEqual(s.index(b1), 1)
def testElementsHighestTimeB(self):
'''Test adding elements at the highest time position
'''
n1 = note.Note()
n1.quarterLength = 30
n2 = note.Note()
n2.quarterLength = 20
b1 = bar.Barline()
s = Stream()
s.append(n1)
s.append(n2)
s.storeAtEnd(b1)
self.assertEqual([e.offset for e in s], [0.0, 30.0, 50.0])
# can shift elements, altering all, but only really shifting
# standard elements
s.shiftElements(5)
self.assertEqual([e.offset for e in s], [5.0, 35.0, 55.0])
# got all
found1 = s.extractContext(n2, 30)
self.assertEqual([e.offset for e in found1], [5.0, 35.0, 55.0])
# just after, none before
found1 = s.extractContext(n2, 0, 30)
self.assertEqual([e.offset for e in found1], [35.0, 55.0])
def testElementsHighestTimeC(self):
n1 = note.Note()
n1.quarterLength = 30
n2 = note.Note()
n2.quarterLength = 20
ts1 = meter.TimeSignature('6/8')
b1 = bar.Barline()
c1 = clef.Treble8vaClef()
s = Stream()
s.append(n1)
self.assertEqual([e.offset for e in s], [0.0])
s.storeAtEnd(b1)
s.storeAtEnd(c1)
s.storeAtEnd(ts1)
self.assertEqual([e.offset for e in s], [0.0, 30.0, 30.0, 30.0] )
s.append(n2)
self.assertEqual([e.offset for e in s], [0.0, 30.0, 50.0, 50.0, 50.0] )
# sorting of objects is by class
self.assertEqual([e.classes[0] for e in s], ['Note', 'Note', 'Barline', 'Treble8vaClef', 'TimeSignature'] )
b2 = bar.Barline()
s.storeAtEnd(b2)
self.assertEqual([e.classes[0] for e in s], ['Note', 'Note', 'Barline', 'Barline', 'Treble8vaClef', 'TimeSignature'] )
def testSliceByQuarterLengthsBuilt(self):
from music21 import stream
s = Stream()
n1 = note.Note()
n1.quarterLength = 1
n2 = note.Note()
n2.quarterLength = 2
n3 = note.Note()
n3.quarterLength = .5
n4 = note.Note()
n4.quarterLength = 1.5
for n in [n1,n2,n3,n4]:
s.append(n)
post = s.sliceByQuarterLengths(.125, inPlace=False)
self.assertEqual([n.tie.type for n in post.notesAndRests], ['start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop'] )
post = s.sliceByQuarterLengths(.25, inPlace=False)
self.assertEqual([n.tie.type for n in post.notesAndRests], ['start', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'stop'] )
post = s.sliceByQuarterLengths(.5, inPlace=False)
self.assertEqual([n.tie == None for n in post.notesAndRests], [False, False, False, False, False, False, True, False, False, False] )
# cannot map .3333 into .5, so this raises an exception
self.assertRaises(stream.StreamException, lambda: s.sliceByQuarterLengths(1/3., inPlace=False))
post = s.sliceByQuarterLengths(1/6., inPlace=False)
self.assertEqual([n.tie.type for n in post.notesAndRests], ['start', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop'])
#post.show()
# try to slice just a target
post = s.sliceByQuarterLengths(.125, target=n2, inPlace=False)
self.assertEqual([n.tie == None for n in post.notesAndRests], [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, True] )
#post.show()
# test case where we have an existing tied note in a multi Measure structure that we do not want to break
s = Stream()
n1 = note.Note()
n1.quarterLength = 8
n2 = note.Note()
n2.quarterLength = 8
n3 = note.Note()
n3.quarterLength = 8
s.append(n1)
s.append(n2)
s.append(n3)
self.assertEqual(s.highestTime, 24)
sMeasures = s.makeMeasures()
sMeasures.makeTies(inPlace=True)
self.assertEquals([n.tie.type for n in sMeasures.flat.notesAndRests],
['start', 'stop', 'start', 'stop', 'start', 'stop'] )
# this shows that the previous ties across the bar line are maintained
# even after slicing
sMeasures.sliceByQuarterLengths([.5], inPlace=True)
self.assertEquals([n.tie.type for n in sMeasures.flat.notesAndRests],
['start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop'] )
#sMeasures.show()
s = Stream()
n1 = note.Note('c#')
n1.quarterLength = 1
n2 = note.Note('d-')
n2.quarterLength = 2
n3 = note.Note('f#')
n3.quarterLength = .5
n4 = note.Note('g#')
n4.quarterLength = 1.5
for n in [n1,n2,n3,n4]:
s.append(n)
post = s.sliceByQuarterLengths(.125, inPlace=False)
#post.show()
self.assertEqual([n.tie == None for n in post.notesAndRests], [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False])
s = Stream()
n1 = note.Note()
n1.quarterLength = .25
n2 = note.Note()
n2.quarterLength = .5
n3 = note.Note()
n3.quarterLength = 1
n4 = note.Note()
n4.quarterLength = 1.5
for n in [n1,n2,n3,n4]:
s.append(n)
post = s.sliceByQuarterLengths(.5, inPlace=False)
self.assertEqual([n.tie == None for n in post.notesAndRests], [True, True, False, False, False, False, False])
def testSliceByQuarterLengthsImported(self):
from music21 import corpus
sSrc = corpus.parse('bwv66.6')
s = copy.deepcopy(sSrc)
for p in s.parts:
p.sliceByQuarterLengths(.5, inPlace=True, addTies=False)
p.makeBeams(inPlace=True)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 72)
s = copy.deepcopy(sSrc)
for p in s.parts:
p.sliceByQuarterLengths(.25, inPlace=True, addTies=False)
p.makeBeams(inPlace=True)
self.assertEqual(len(s.parts[0].flat.notesAndRests), 144)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 144)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 144)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 144)
# test applying to a complete score; works fine
s = copy.deepcopy(sSrc)
s.sliceByQuarterLengths(.5, inPlace=True, addTies=False)
#s.show()
self.assertEqual(len(s.parts[0].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 72)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 72)
def testSliceByGreatestDivisorBuilt(self):
s = Stream()
n1 = note.Note()
n1.quarterLength = 1.75
n2 = note.Note()
n2.quarterLength = 2
n3 = note.Note()
n3.quarterLength = .5
n4 = note.Note()
n4.quarterLength = 1.5
for n in [n1,n2,n3,n4]:
s.append(n)
post = s.sliceByGreatestDivisor(inPlace=False)
self.assertEqual(len(post.flat.notesAndRests), 23)
self.assertEqual([n.tie.type for n in post.notesAndRests], ['start', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'stop'])
s = Stream()
n1 = note.Note()
n1.quarterLength = 2
n2 = note.Note()
n2.quarterLength = 1/3.
n3 = note.Note()
n3.quarterLength = .5
n4 = note.Note()
n4.quarterLength = 1.5
for n in [n1,n2,n3,n4]:
s.append(n)
post = s.sliceByGreatestDivisor(inPlace=False)
self.assertEqual(len(post.flat.notesAndRests), 26)
self.assertEqual([n.tie.type for n in post.notesAndRests], ['start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop', 'start', 'stop', 'start', 'continue', 'stop', 'start', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'continue', 'stop'] )
def testSliceByGreatestDivisorImported(self):
from music21 import corpus
sSrc = corpus.parse('bwv66.6')
s = copy.deepcopy(sSrc)
for p in s.parts:
p.sliceByGreatestDivisor(inPlace=True, addTies=True)
#p.makeBeams(inPlace=True) # uncomment when debugging, otherwise just slows down the test
#s.show()
# parts have different numbers of notes, as splitting is done on
# a note per note basis
self.assertEqual(len(s.parts[0].flat.notesAndRests), 44)
self.assertEqual(len(s.parts[1].flat.notesAndRests), 59)
self.assertEqual(len(s.parts[2].flat.notesAndRests), 61)
self.assertEqual(len(s.parts[3].flat.notesAndRests), 53)
s = copy.deepcopy(sSrc)
s.sliceByGreatestDivisor(inPlace=True, addTies=True)
#s.flat.makeChords().show()
#s.show()
def testSliceAtOffsetsSimple(self):
s = Stream()
n = note.Note()
n.quarterLength = 4
s.append(n)
unused_post = s.sliceAtOffsets([1, 2, 3], inPlace=True)
a = [(e.offset, e.quarterLength) for e in s]
b = [(0.0, 1.0), (1.0, 1.0), (2.0, 1.0), (3.0, 1.0)]
self.assertEqual(a, b)
def testSliceAtOffsetsBuilt(self):
from music21 import stream
s = stream.Stream()
for p, ql in [('d2',4)]:
n = note.Note(p)
n.quarterLength = ql
s.append(n)
self.assertEqual([e.offset for e in s], [0.0])
s1 = s.sliceAtOffsets([0.5, 1, 1.5, 2, 2.5, 3, 3.5], inPlace=False)
self.assertEqual([(e.offset, e.quarterLength) for e in s1], [(0.0, 0.5), (0.5, 0.5), (1.0, 0.5), (1.5, 0.5), (2.0, 0.5), (2.5, 0.5), (3.0, 0.5), (3.5, 0.5)] )
s1 = s.sliceAtOffsets([.5], inPlace=False)
self.assertEqual([(e.offset, e.quarterLength) for e in s1], [(0.0, 0.5), (0.5, 3.5)])
s = stream.Stream()
for p, ql in [('a2',1.5), ('a2',1.5), ('a2',1.5)]:
n = note.Note(p)
n.quarterLength = ql
s.append(n)
self.assertEqual([e.offset for e in s], [0.0, 1.5, 3.0])
s1 = s.sliceAtOffsets([.5], inPlace=False)
self.assertEqual([e.offset for e in s1], [0.0, 0.5, 1.5, 3.0])
s1.sliceAtOffsets([1.0, 2.5], inPlace=True)
self.assertEqual([e.offset for e in s1], [0.0, 0.5, 1.0, 1.5, 2.5, 3.0])
s1.sliceAtOffsets([3.0, 2.0, 3.5, 4.0], inPlace=True)
self.assertEqual([e.offset for e in s1], [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0])
self.assertEqual([e.quarterLength for e in s1], [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
def testSliceAtOffsetsImported(self):
from music21 import corpus
sSrc = corpus.parse('bwv66.6')
post = sSrc.parts[0].flat.sliceAtOffsets([.25, 1.25, 3.25])
self.assertEqual([e.offset for e in post], [0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.5, 1.0, 1.25, 2.0, 3.0, 3.25, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 9.0, 9.5, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 29.0, 31.0, 32.0, 33.0, 34.0, 34.5, 35.0, 36.0] )
# will also work on measured part
post = sSrc.parts[0].sliceAtOffsets([.25, 1.25, 3.25, 35.125])
self.assertEqual([e.offset for e in
post.getElementsByClass('Measure')[0].notesAndRests], [0.0, 0.25, 0.5])
self.assertEqual([e.offset for e in
post.getElementsByClass('Measure')[1].notesAndRests], [0.0, 0.25, 1.0, 2.0, 2.25, 3.0])
# check for alteration in last measure
self.assertEqual([e.offset for e in
post.getElementsByClass('Measure')[-1].notesAndRests], [0.0, 1.0, 1.5, 2.0, 2.125] )
def testSliceByBeatBuilt(self):
from music21 import stream
s = stream.Stream()
ts1 = meter.TimeSignature('3/4')
s.insert(0, ts1)
for p, ql in [('d2',3)]:
n = note.Note(p)
n.quarterLength = ql
s.append(n)
# have time signature and one note
self.assertEqual([e.offset for e in s], [0.0, 0.0])
s1 = s.sliceByBeat()
self.assertEqual([(e.offset, e.quarterLength) for e in s1.notesAndRests], [(0.0, 1.0), (1.0, 1.0), (2.0, 1.0)] )
# replace old ts with a new
s.remove(ts1)
ts2 = meter.TimeSignature('6/8')
s.insert(0, ts2)
s1 = s.sliceByBeat()
self.assertEqual([(e.offset, e.quarterLength) for e in s1.notesAndRests], [(0.0, 1.5), (1.5, 1.5)] )
def testSliceByBeatImported(self):
from music21 import corpus
sSrc = corpus.parse('bwv66.6')
post = sSrc.parts[0].sliceByBeat()
self.assertEqual([e.offset for e in post.flat.notesAndRests], [0.0, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 9.5, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0, 34.0, 34.5, 35.0])
#post.show()
def testChordifyImported(self):
from music21 import corpus
s = corpus.parse('luca/gloria')
#s.show()
post = s.measures(0, 20, gatherSpanners=False)
# somehow, this is doubling measures
#post.show()
self.assertEqual([e.offset for e in post.parts[0].flat.notesAndRests], [0.0, 3.0, 3.5, 4.5, 5.0, 6.0, 6.5, 7.5, 8.5, 9.0, 10.5, 12.0, 15.0, 16.5, 17.5, 18.0, 18.5, 19.0, 19.5, 20.0, 20.5, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 30.0, 33.0, 34.5, 35.5, 36.0, 37.5, 38.0, 39.0, 40.0, 41.0, 42.0, 43.5, 45.0, 45.5, 46.5, 47.0, 48.0, 49.5, 51.0, 51.5, 52.0, 52.5, 53.0, 53.5, 54.0, 55.5, 57.0, 58.5])
post = post.chordify()
#post.show('t')
#post.show()
self.assertEqual([e.offset for e in post.flat.notes], [0.0, 3.0, 3.5, 4.5, 5.0, 5.5, 6.0, 6.5, 7.5, 8.5, 9.0, 10.5, 12.0, 15.0, 16.5, 17.5, 18.0, 18.5, 19.0, 19.5, 20.0, 20.5, 21.0, 21.5, 22.0, 22.5, 23.0, 23.5, 24.0, 24.5, 25.0, 25.5, 26.0, 26.5, 27.0, 30.0, 33.0, 34.5, 35.5, 36.0, 37.5, 38.0, 39.0, 40.0, 40.5, 41.0, 42.0, 43.5, 45.0, 45.5, 46.0, 46.5, 47.0, 47.5, 48.0, 49.5, 51.0, 51.5, 52.0, 52.5, 53.0, 53.5, 54.0, 54.5, 55.0, 55.5, 56.0, 56.5, 57.0, 58.5, 59.5])
self.assertEqual(len(post.flat.getElementsByClass('Chord')), 71) # Careful! one version of the caching is screwing up m. 20 which definitely should not have rests in it -- was creating 69 notes, not 71.
def testChordifyRests(self):
# test that chordify does not choke on rests
from music21 import stream
p1 = stream.Part()
for p, ql in [(None, 2), ('d2',2), (None, 2), ('e3',2), ('f3', 2)]:
if p == None:
n = note.Rest()
else:
n = note.Note(p)
n.quarterLength = ql
p1.append(n)
p2 = stream.Part()
for p, ql in [(None, 2), ('c#3',1), ('d#3',1), (None, 2), ('e-5',2), (None, 2)]:
if p == None:
n = note.Rest()
else:
n = note.Note(p)
n.quarterLength = ql
p2.append(n)
self.assertEqual([e.offset for e in p1], [0.0, 2.0, 4.0, 6.0, 8.0])
self.assertEqual([e.offset for e in p2], [0.0, 2.0, 3.0, 4.0, 6.0, 8.0])
score = stream.Score()
score.insert(0, p1)
score.insert(0, p2)
# parts retain their characteristics
# rests are recast
scoreChords = score.makeChords()
#scoreChords.show()
self.assertEqual(len(scoreChords.parts[0].flat), 5)
self.assertEqual(len(scoreChords.parts[0].flat.getElementsByClass(
'Chord')), 3)
self.assertEqual(len(scoreChords.parts[0].flat.getElementsByClass(
'Rest')), 2)
self.assertEqual(len(scoreChords.parts[1].flat), 6)
self.assertEqual(len(scoreChords.parts[1].flat.getElementsByClass(
'Chord')), 3)
self.assertEqual(len(scoreChords.parts[1].flat.getElementsByClass(
'Rest')), 3)
# calling this on a flattened version
scoreFlat = score.flat
scoreChords = scoreFlat.makeChords()
self.assertEqual(len(scoreChords.flat.getElementsByClass(
'Chord')), 3)
self.assertEqual(len(scoreChords.flat.getElementsByClass(
'Rest')), 2)
scoreChordify = score.chordify()
self.assertEqual(len(scoreChordify.flat.getElementsByClass(
'Chord')), 4)
self.assertEqual(len(scoreChordify.flat.getElementsByClass(
'Rest')), 2)
self.assertEqual(str(scoreChordify.getElementsByClass(
'Chord')[0].pitches), '(<music21.pitch.Pitch D2>, <music21.pitch.Pitch C#3>)')
self.assertEqual(str(scoreChordify.getElementsByClass(
'Chord')[1].pitches), '(<music21.pitch.Pitch D2>, <music21.pitch.Pitch D#3>)')
def testChordifyA(self):
from music21 import stream, expressions
p1 = stream.Part()
p1.insert(0, note.Note(quarterLength=12.0))
p1.insert(0.25, expressions.TextExpression('test'))
self.assertEqual(p1.highestTime, 12.0)
p2 = stream.Part()
p2.repeatAppend(note.Note('g4'), 12)
s = stream.Score()
s.insert(0, p1)
s.insert(0, p2)
post = s.chordify()
self.assertEqual(len(post.getElementsByClass('Chord')), 12)
self.assertEqual(str(post.getElementsByClass('Chord')[0].pitches),
'(<music21.pitch.Pitch C4>, <music21.pitch.Pitch G4>)')
p1 = stream.Part()
p1.insert(0, note.Note(quarterLength=12.0))
p1.insert(0.25, expressions.TextExpression('test'))
self.assertEqual(p1.highestTime, 12.0)
p2 = stream.Part()
p2.repeatAppend(note.Note('g4', quarterLength=6.0), 2)
#p2.repeatAppend(note.Note('g4'), 12)
s = stream.Score()
s.insert(0, p1)
s.insert(0, p2)
post = s.chordify()
self.assertEqual(len(post.getElementsByClass('Chord')), 2)
self.assertEqual(str(post.getElementsByClass('Chord')[0].pitches),
'(<music21.pitch.Pitch C4>, <music21.pitch.Pitch G4>)')
#post.show()
#s.show()
def testChordifyB(self):
from music21 import stream
p1 = stream.Part()
m1a = stream.Measure()
m1a.timeSignature = meter.TimeSignature('4/4')
m1a.insert(0, note.Note())
m1a.padAsAnacrusis()
self.assertEqual(m1a.paddingLeft, 3.0)
#m1a.paddingLeft = 3.0 # a quarter pickup
m2a = stream.Measure()
m2a.repeatAppend(note.Note(), 4)
p1.append([m1a, m2a])
p2 = stream.Part()
m1b = stream.Measure()
m1b.timeSignature = meter.TimeSignature('4/4')
m1b.repeatAppend(note.Rest(), 1)
m1b.padAsAnacrusis()
self.assertEqual(m1b.paddingLeft, 3.0)
m2b = stream.Measure()
m2b.repeatAppend(note.Note('g4'), 4)
p2.append([m1b, m2b])
s = stream.Score()
s.insert(0, p1)
s.insert(0, p2)
#s.show()
post = s.chordify()
self.assertEqual(len(post.getElementsByClass('Measure')), 2)
m1 = post.getElementsByClass('Measure')[0]
# test that padding has been maintained
self.assertEqual(m1.paddingLeft, 3.0)
#post.show()
def testChordifyC(self):
from music21 import corpus
s = corpus.parse('schoenberg/opus19/movement6')
#s.show()
m1 = s.parts[0].getElementsByClass('Measure')[0]
self.assertEqual(m1.highestTime, 1.0)
self.assertEqual(m1.paddingLeft, 3.0)
self.assertEqual(m1.duration.quarterLength, 1.0)
self.assertEqual([e.offset for e in m1.notes], [0.0])
#s.parts[0].show()
post = s.chordify()
self.assertEqual(post.getElementsByClass('Measure')[0].paddingLeft, 3.0)
#self.assertEqual(len(post.flat), 3)
#post.show()
# make sure we do not have any voices after chordifying
match = []
for m in post.getElementsByClass('Measure'):
self.assertEqual(m.hasVoices(), False)
match.append(len(m.pitches))
self.assertEqual(match, [3, 9, 9, 25, 25, 21, 12, 6, 21, 29])
self.assertEqual(len(post.flat.getElementsByClass('Rest')), 4)
def testChordifyD(self):
from music21 import stream
# test on a Stream of Streams.
s1 = stream.Stream()
s1.repeatAppend(note.Note(quarterLength=3), 4)
s2 = stream.Stream()
s2.repeatAppend(note.Note('g4', quarterLength=2), 6)
s3 = stream.Stream()
s3.insert(0, s1)
s3.insert(0, s2)
post = s3.chordify()
self.assertEqual(len(post.getElementsByClass('Chord')), 8)
def testChordifyE(self):
from music21 import stream
s1 = stream.Stream()
m1 = stream.Measure()
v1 = stream.Voice()
v1.repeatAppend(note.Note('g4', quarterLength=1.5), 3)
v2 = stream.Voice()
v2.repeatAppend(note.Note(quarterLength=1), 6)
m1.insert(0, v1)
m1.insert(0, v2)
#m1.timeSignature = m1.flat.bestTimeSignature()
#self.assertEqual(str(m1.timeSignature), '')
s1.append(m1)
#s1.show()
post = s1.chordify()
#post.show()
self.assertEqual(len(post.flat.getElementsByClass('Chord')), 8)
def testOpusSearch(self):
from music21 import corpus
import re
o = corpus.parse('essenFolksong/erk5')
s = o.getScoreByTitle('blauen')
self.assertEqual(s.metadata.title, 'Ich sach mir einen blauen Storchen')
s = o.getScoreByTitle('pfal.gr.f')
self.assertEqual(s.metadata.title, 'Es fuhr sich ein Pfalzgraf')
s = o.getScoreByTitle(re.compile('Pfal(.*)'))
self.assertEqual(s.metadata.title, 'Es fuhr sich ein Pfalzgraf')
def testActiveSiteMangling(self):
s1 = Stream()
s2 = Stream()
s2.append(s1)
self.assertEqual(s1.activeSite, s2)
junk = s1.semiFlat
self.assertEqual(s1.activeSite, s2)
junk = s1.flat # the order of these two calls ensures that _getFlatFromSemiflat is called
self.assertEqual(s1.activeSite, s2)
# this works fine
junk = s2.flat
self.assertEqual(s1.activeSite, s2)
# this was the key problem: getting the semiFlat of the activeSite
# looses the activeSite of the sub-stream; this is fixed by the inserting
# of the sub-Stream with setActiveSite False
junk = s2.semiFlat
self.assertEqual(s1.activeSite, s2)
# these test prove that getting a semiFlat stream does not change the
# activeSite
junk = s1.sites.getObjByClass(meter.TimeSignature)
self.assertEqual(s1.activeSite, s2)
junk = s1.sites.getObjByClass(clef.Clef)
self.assertEqual(s1.activeSite, s2)
junk = s1.getContextByClass('Clef')
self.assertEqual(s1.activeSite, s2)
def testGetElementsByContextStream(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
for p in s.parts:
for m in p.getElementsByClass('Measure'):
post = m.getContextByClass(clef.Clef)
self.assertEqual(isinstance(post, clef.Clef), True)
post = m.getContextByClass(meter.TimeSignature)
self.assertEqual(isinstance(post, meter.TimeSignature), True)
post = m.getContextByClass(key.KeySignature)
self.assertEqual(isinstance(post, key.KeySignature), True)
def testVoicesA(self):
v1 = Voice()
n1 = note.Note('d5')
n1.quarterLength = .5
v1.repeatAppend(n1, 8)
v2 = Voice()
n2 = note.Note('c4')
n2.quarterLength = 1
v2.repeatAppend(n2, 4)
s = Measure()
s.insert(0, v1)
s.insert(0, v2)
# test allocating streams and assigning indices
oMap = s.offsetMap
oMapStr = "[\n" # construct string from dict in fixed order...
for ob in oMap:
oMapStr += "{'voiceIndex': " + str(ob.voiceIndex) + ", 'element': " + str(ob.element) + ", 'endTime': " + str(ob.endTime) + ", 'offset': " + str(ob.offset) + "},\n"
oMapStr += "]\n"
#print oMapStr
self.assertEqual(oMapStr,
'''[
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 0.5, 'offset': 0.0},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 1.0, 'offset': 0.5},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 1.5, 'offset': 1.0},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 2.0, 'offset': 1.5},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 2.5, 'offset': 2.0},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 3.0, 'offset': 2.5},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 3.5, 'offset': 3.0},
{'voiceIndex': 0, 'element': <music21.note.Note D>, 'endTime': 4.0, 'offset': 3.5},
{'voiceIndex': 1, 'element': <music21.note.Note C>, 'endTime': 1.0, 'offset': 0.0},
{'voiceIndex': 1, 'element': <music21.note.Note C>, 'endTime': 2.0, 'offset': 1.0},
{'voiceIndex': 1, 'element': <music21.note.Note C>, 'endTime': 3.0, 'offset': 2.0},
{'voiceIndex': 1, 'element': <music21.note.Note C>, 'endTime': 4.0, 'offset': 3.0},
]
''')
oMeasures = Part()
oMeasures.insert(0, s)
self.assertEqual(len(oMeasures[0].voices), 2)
self.assertEqual([e.offset for e in oMeasures[0].voices[0]], [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5])
self.assertEqual([e.offset for e in oMeasures[0].voices[1]], [0.0, 1.0, 2.0, 3.0])
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s).decode('utf-8')
# try version longer than 1 measure, more than 2 voices
v1 = Voice()
n1 = note.Note('c5')
n1.quarterLength = .5
v1.repeatAppend(n1, 32)
v2 = Voice()
n2 = note.Note('c4')
n2.quarterLength = 1
v2.repeatAppend(n2, 16)
v3 = Voice()
n3 = note.Note('c3')
n3.quarterLength = .25
v3.repeatAppend(n3, 64)
v4 = Voice()
n4 = note.Note('c2')
n4.quarterLength = 4
v4.repeatAppend(n4, 4)
s = Part()
s.insert(0, v1)
s.insert(0, v2)
s.insert(0, v3)
s.insert(0, v4)
oMeasures = s.makeMeasures()
# each measures has the same number of voices
for i in range(3):
self.assertEqual(len(oMeasures[i].voices), 4)
# each measures has the same total number of voices
for i in range(3):
self.assertEqual(len(oMeasures[i].flat.notesAndRests), 29)
# each measures has the same number of notes for each voices
for i in range(3):
self.assertEqual(len(oMeasures[i].voices[0].notesAndRests), 8)
self.assertEqual(len(oMeasures[i].voices[1].notesAndRests), 4)
self.assertEqual(len(oMeasures[i].voices[2].notesAndRests), 16)
self.assertEqual(len(oMeasures[i].voices[3].notesAndRests), 1)
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(oMeasures).decode('utf-8')
#s.show()
def testVoicesB(self):
# make sure strip ties works
from music21 import stream
v1 = stream.Voice()
n1 = note.Note('c5')
n1.quarterLength = .5
v1.repeatAppend(n1, 27)
v2 = stream.Voice()
n2 = note.Note('c4')
n2.quarterLength = 3
v2.repeatAppend(n2, 6)
v3 = stream.Voice()
n3 = note.Note('c3')
n3.quarterLength = 8
v3.repeatAppend(n3, 4)
s = stream.Stream()
s.insert(0, v1)
s.insert(0, v2)
s.insert(0, v3)
sPost = s.makeNotation()
# voices are retained for all measures after make notation
self.assertEqual(len(sPost.getElementsByClass('Measure')), 8)
self.assertEqual(len(sPost.getElementsByClass('Measure')[0].voices), 3)
self.assertEqual(len(sPost.getElementsByClass('Measure')[1].voices), 3)
self.assertEqual(len(sPost.getElementsByClass('Measure')[5].voices), 3)
self.assertEqual(len(sPost.getElementsByClass('Measure')[7].voices), 3)
#s.show()
def testVoicesC(self):
from music21 import stream
v1 = stream.Voice()
n1 = note.Note('c5')
n1.quarterLength = .25
v1.repeatInsert(n1, [2, 4.5, 7.25, 11.75])
v2 = stream.Voice()
n2 = note.Note('c4')
n2.quarterLength = .25
v2.repeatInsert(n2, [.25, 3.75, 5.5, 13.75])
s = stream.Stream()
s.insert(0, v1)
s.insert(0, v2)
sPost = s.makeRests(fillGaps=True, inPlace=False)
self.assertEqual(str([n for n in sPost.voices[0].notesAndRests]), '[<music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>]')
self.assertEqual(str([n for n in sPost.voices[1].notesAndRests]), '[<music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>, <music21.note.Rest rest>, <music21.note.Note C>]')
#sPost.show()
def testPartsToVoicesA(self):
from music21 import corpus
s0 = corpus.parse('bwv66.6')
#s.show()
s1 = s0.partsToVoices(2)
#s1.show()
#s1.show('t')
self.assertEqual(len(s1.parts), 2)
p1 = s1.parts[0]
self.assertEqual(len(p1.flat.getElementsByClass('Clef')), 1)
#p1.show('t')
# look at individual measure; check counts; these should not
# change after measure extraction
m1Raw = p1.getElementsByClass('Measure')[1]
#environLocal.printDebug(['m1Raw', m1Raw])
self.assertEqual(len(m1Raw.flat), 8)
#m1Raw.show('t')
m2Raw = p1.getElementsByClass('Measure')[2]
#environLocal.printDebug(['m2Raw', m2Raw])
self.assertEqual(len(m2Raw.flat), 9)
# get a measure from this part
# NOTE: we no longer get Clef here, as we return clefs in the
# Part outside of a Measure when using measures()
#m1 = p1.measure(2)
#self.assertEqual(len(m1.flat.getElementsByClass('Clef')), 1)
# look at individual measure; check counts; these should not
# change after measure extraction
m1Raw = p1.getElementsByClass('Measure')[1]
#environLocal.printDebug(['m1Raw', m1Raw])
self.assertEqual(len(m1Raw.flat), 8)
#m1Raw.show('t')
m2Raw = p1.getElementsByClass('Measure')[2]
#environLocal.printDebug(['m2Raw', m2Raw])
self.assertEqual(len(m2Raw.flat), 9)
#m2Raw.show('t')
#self.assertEqual(len(m1.flat.getElementsByClass('Clef')), 1)
ex1 = p1.measures(1,3)
self.assertEqual(len(ex1.flat.getElementsByClass('Clef')), 1)
#ex1.show()
for p in s1.parts:
# need to look in measures to get at voices
self.assertEqual(len(p.getElementsByClass('Measure')[0].voices), 2)
self.assertEqual(len(p.measure(2).voices), 2)
self.assertEqual(len(p.measures(
1,3).getElementsByClass('Measure')[2].voices), 2)
#s1.show()
#p1.show()
def testPartsToVoicesB(self):
from music21 import corpus
# this work has five parts: results in e parts
s0 = corpus.parse('corelli/opus3no1/1grave')
self.assertEqual(len(s0.parts), 3)
s1 = s0.partsToVoices(2, permitOneVoicePerPart=True)
self.assertEqual(len(s1.parts), 2)
self.assertEqual(len(s1.parts[0].getElementsByClass(
'Measure')[0].voices), 2)
self.assertEqual(len(s1.parts[1].getElementsByClass(
'Measure')[0].voices), 1)
#s1.show()
# s0 = corpus.parse('hwv56', '1-05')
# # can use index values
# s2 = s0.partsToVoices(([0,1], [2,4], 3), permitOneVoicePerPart=True)
# self.assertEqual(len(s2.parts), 3)
# self.assertEqual(len(s2.parts[0].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[1].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[2].getElementsByClass(
# 'Measure')[0].voices), 1)
#
# s2 = s0.partsToVoices((['Violino I','Violino II'], ['Viola','Bassi'], ['Basso']), permitOneVoicePerPart=True)
# self.assertEqual(len(s2.parts), 3)
# self.assertEqual(len(s2.parts[0].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[1].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[2].getElementsByClass(
# 'Measure')[0].voices), 1)
#
#
# # this will keep the voice part unaltered
# s2 = s0.partsToVoices((['Violino I','Violino II'], ['Viola','Bassi'], 'Basso'), permitOneVoicePerPart=False)
# self.assertEqual(len(s2.parts), 3)
# self.assertEqual(len(s2.parts[0].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[1].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(s2.parts[2].getElementsByClass(
# 'Measure')[0].hasVoices(), False)
#
#
# # mm 16-19 are a good examples
# s1 = corpus.parse('hwv56', '1-05').measures(16, 19)
# s2 = s1.partsToVoices((['Violino I','Violino II'], ['Viola','Bassi'], 'Basso'))
# #s2.show()
#
# self.assertEqual(len(s2.parts), 3)
# self.assertEqual(len(s2.parts[0].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(len(s2.parts[1].getElementsByClass(
# 'Measure')[0].voices), 2)
# self.assertEqual(s2.parts[2].getElementsByClass(
# 'Measure')[0].hasVoices(), False)
def testVoicesToPartsA(self):
from music21 import corpus
s0 = corpus.parse('bwv66.6')
#s.show()
s1 = s0.partsToVoices(2) # produce two parts each with two voices
s2 = s1.parts[0].voicesToParts()
# now a two part score
self.assertEqual(len(s2.parts), 2)
# makes sure we have what we started with
self.assertEqual(len(s2.parts[0].flat.notesAndRests), len(s0.parts[0].flat.notesAndRests))
s1 = s0.partsToVoices(4) # create one staff with all parts
self.assertEqual(s1.classes[0], 'Score') # we get a Score back
# we have a Score with one part and measures, each with 4 voices
self.assertEqual(len(s1.parts[0].getElementsByClass(
'Measure')[0].voices), 4)
# need to access part
s2 = s1.voicesToParts() # return to four parts in a score;
# make sure we have what we started with
self.assertEqual(len(s2.parts[0].flat.notesAndRests),
len(s0.parts[0].flat.notesAndRests))
self.assertEqual(str([n for n in s2.parts[0].flat.notesAndRests]),
str([n for n in s0.parts[0].flat.notesAndRests]))
self.assertEqual(len(s2.parts[1].flat.notesAndRests),
len(s0.parts[1].flat.notesAndRests))
self.assertEqual(str([n for n in s2.parts[1].flat.notesAndRests]),
str([n for n in s0.parts[1].flat.notesAndRests]))
self.assertEqual(len(s2.parts[2].flat.notesAndRests),
len(s0.parts[2].flat.notesAndRests))
self.assertEqual(str([n for n in s2.parts[2].flat.notesAndRests]),
str([n for n in s0.parts[2].flat.notesAndRests]))
self.assertEqual(len(s2.parts[3].flat.notesAndRests),
len(s0.parts[3].flat.notesAndRests))
self.assertEqual(str([n for n in s2.parts[3].flat.notesAndRests]),
str([n for n in s0.parts[3].flat.notesAndRests]))
# try on a built Stream that has no Measures
# build a stream
s0 = Stream()
v1 = Voice()
v1.repeatAppend(note.Note('c3'), 4)
v2 = Voice()
v2.repeatAppend(note.Note('g4'), 4)
v3 = Voice()
v3.repeatAppend(note.Note('b5'), 4)
s0.insert(0, v1)
s0.insert(0, v2)
s0.insert(0, v3)
#s2.show()
s1 = s0.voicesToParts()
self.assertEqual(len(s1.parts), 3)
#self.assertEqual(len(s1.parts[0].flat), len(v1.flat))
self.assertEqual([e for e in s1.parts[0].flat], [e for e in v1.flat])
self.assertEqual(len(s1.parts[1].flat), len(v2.flat))
self.assertEqual([e for e in s1.parts[1].flat], [e for e in v2.flat])
self.assertEqual(len(s1.parts[2].flat), len(v3.flat))
self.assertEqual([e for e in s1.parts[2].flat], [e for e in v3.flat])
#s1.show()
def testMergeElements(self):
from music21 import stream
s1 = stream.Stream()
s2 = stream.Stream()
s3 = stream.Stream()
n1 = note.Note('f#')
n2 = note.Note('g')
s1.append(n1)
s1.append(n2)
s2.mergeElements(s1)
self.assertEqual(len(s2), 2)
self.assertEqual(id(s1[0]) == id(s2[0]), True)
self.assertEqual(id(s1[1]) == id(s2[1]), True)
s3.mergeElements(s1, classFilterList=['Rest'])
self.assertEqual(len(s3), 0)
s3.mergeElements(s1, classFilterList=['GeneralNote'])
self.assertEqual(len(s3), 2)
def testInternalize(self):
s = Stream()
n1 = note.Note()
s.repeatAppend(n1, 4)
self.assertEqual(len(s), 4)
s.internalize()
# now have one component
self.assertEqual(len(s), 1)
self.assertEqual(s[0].classes[0], 'Voice') # default is a Voice
self.assertEqual(len(s[0]), 4)
self.assertEqual(str([n for n in s.voices[0].notesAndRests]), '[<music21.note.Note C>, <music21.note.Note C>, <music21.note.Note C>, <music21.note.Note C>]')
def testDeepcopySpanners(self):
from music21 import spanner, stream
n1 = note.Note()
n2 = note.Note('a4')
n3 = note.Note('g#4')
n3.quarterLength = .25
su1 = spanner.Slur(n1, n2)
s1 = stream.Stream()
s1.append(n1)
s1.repeatAppend(n3, 4)
s1.append(n2)
s1.insert(su1)
self.assertEqual(s1.notesAndRests[0] in s1.spanners[0].getSpannedElements(), True)
self.assertEqual(s1.notesAndRests[-1] in s1.spanners[0].getSpannedElements(), True)
s2 = copy.deepcopy(s1)
# old relations are still valid
self.assertEqual(len(s1.spanners), 1)
self.assertEqual(s1.notesAndRests[0] in s1.spanners[0].getSpannedElements(), True)
self.assertEqual(s1.notesAndRests[-1] in s1.spanners[0].getSpannedElements(), True)
# new relations exist in new stream.
self.assertEqual(len(s2.spanners), 1)
self.assertEqual(s2.notesAndRests[0] in s2.spanners[0].getSpannedElements(), True)
self.assertEqual(s2.notesAndRests[-1] in s2.spanners[0].getSpannedElements(), True)
self.assertEqual(s2.spanners[0].getSpannedElements(), [s2.notesAndRests[0], s2.notesAndRests[-1]])
GEX = m21ToXml.GeneralObjectExporter()
unused_mx = GEX.parse(s2).decode('utf-8')
#s2.show('t')
#s2.show()
def testAddSlurByMelisma(self):
from music21 import corpus, spanner
s = corpus.parse('luca/gloria')
ex = s.parts[0]
nStart = None
nEnd = None
exFlatNotes = ex.flat.notesAndRests
nLast = exFlatNotes[-1]
for i, n in enumerate(exFlatNotes):
if i < len(exFlatNotes) - 1:
nNext = exFlatNotes[i+1]
else:
continue
if n.lyrics:
nStart = n
# if next is a begin, then this is an end
elif nStart is not None and nNext.lyrics and n.tie is None:
nEnd = n
elif nNext is nLast:
nEnd = n
if nStart is not None and nEnd is not None:
# insert in top-most container
ex.insert(spanner.Slur(nStart, nEnd))
nStart = None
nEnd = None
#ex.show()
exFlat = ex.flat
melismaByBeat = {}
for sp in ex.spanners:
n = sp.getFirst()
oMin, oMax = sp.getDurationSpanBySite(exFlat)
dur = oMax - oMin
beatStr = n.beatStr
if beatStr not in melismaByBeat:
melismaByBeat[beatStr] = []
melismaByBeat[beatStr].append(dur)
#environLocal.printDebug(['start note:', n, 'beat:', beatStr, 'slured duration:', dur])
for beatStr in sorted(list(melismaByBeat.keys())):
unused_avg = sum(melismaByBeat[beatStr]) / len(melismaByBeat[beatStr])
#environLocal.printDebug(['melisma beat:', beatStr.ljust(6), 'average duration:', avg])
def testTwoZeroOffset(self):
from music21 import stream
p = stream.Part()
#p.append(instrument.Voice())
p.append(note.Note("D#4"))
#environLocal.printDebug([p.offsetMap])
def testStripTiesBuiltB(self):
from music21 import stream
s1 = stream.Stream()
s1.append(meter.TimeSignature('4/4'))
s1.append(note.Note(type='quarter'))
s1.append(note.Note(type='half'))
s1.append(note.Note(type='half'))
s1.append(note.Note(type='half'))
s1.append(note.Note(type='quarter'))
s2 = s1.makeNotation()
self.assertEqual(len(s2.flat.notesAndRests), 6)
self.assertEqual(str([n.tie for n in s2.flat.notesAndRests]), '[None, None, <music21.tie.Tie start>, <music21.tie.Tie stop>, None, None]')
self.assertEqual([n.quarterLength for n in s2.flat.notesAndRests], [1.0, 2.0, 1.0, 1.0, 2.0, 1.0])
s3 = s2.stripTies(retainContainers=True)
self.assertEqual(str([n.tie for n in s3.flat.notesAndRests]), '[None, None, None, None, None]')
self.assertEqual([n.quarterLength for n in s3.flat.notesAndRests], [1.0, 2.0, 2.0, 2.0, 1.0])
self.assertEqual([n.offset for n in s3.getElementsByClass('Measure')[0].notesAndRests], [0.0, 1.0, 3.0])
self.assertEqual([n.quarterLength for n in s3.getElementsByClass('Measure')[0].notesAndRests], [1.0, 2.0, 2.0])
self.assertEqual([n.beatStr for n in s3.getElementsByClass('Measure')[0].notesAndRests], ['1', '2', '4'])
self.assertEqual([n.offset for n in s3.getElementsByClass('Measure')[1].notesAndRests], [1.0, 3.0])
self.assertEqual([n.quarterLength for n in s3.getElementsByClass('Measure')[1].notesAndRests], [2.0, 1.0])
self.assertEqual([n.beatStr for n in s3.getElementsByClass('Measure')[1].notesAndRests], ['2', '4'])
#s3.show()
def testStripTiesImportedB(self):
from music21 import corpus
# this file was imported by sibelius and does not have completeing ties
sMonte = corpus.parse('monteverdi/madrigal.4.2.xml')
s1 = sMonte.parts['Alto']
mStream = s1.getElementsByClass('Measure')
self.assertEqual([n.offset for n in mStream[3].notesAndRests], [0.0])
self.assertEqual(str([n.tie for n in mStream[3].notesAndRests]), '[<music21.tie.Tie start>]')
self.assertEqual([n.offset for n in mStream[4].notesAndRests], [0.0, 2.0])
self.assertEqual(str([n.tie for n in mStream[4].notesAndRests]), '[None, None]')
# post strip ties; must use matchByPitch
s2 = s1.stripTies(retainContainers=True, matchByPitch=True)
mStream = s2.getElementsByClass('Measure')
self.assertEqual([n.offset for n in mStream[3].notesAndRests], [0.0])
self.assertEqual(str([n.tie for n in mStream[3].notesAndRests]), '[None]')
self.assertEqual([n.offset for n in mStream[4].notesAndRests], [2.0])
self.assertEqual(str([n.tie for n in mStream[4].notesAndRests]), '[None]')
self.assertEqual([n.offset for n in mStream[5].notesAndRests], [0.0, 0.5, 1.0, 1.5, 2.0, 3.0])
def testDerivationA(self):
from music21 import stream, corpus
s1 = stream.Stream()
s1.repeatAppend(note.Note(), 10)
s1.repeatAppend(chord.Chord(), 10)
# for testing against
s2 = stream.Stream()
s3 = s1.getElementsByClass('GeneralNote')
self.assertEqual(len(s3), 20)
#environLocal.printDebug(['s3.derivation.origin', s3.derivation.origin])
self.assertEqual(s3.derivation.origin is s1, True)
self.assertEqual(s3.derivation.origin is not s2, True)
s4 = s3.getElementsByClass('Chord')
self.assertEqual(len(s4), 10)
self.assertEqual(s4.derivation.origin is s3, True)
# test imported and flat
s = corpus.parse('bach/bwv66.6')
p1 = s.parts[0]
# the part is not derived from anything yet
self.assertEqual(p1.derivation.origin, None)
p1Flat = p1.flat
self.assertEqual(p1.flat.derivation.origin is p1, True)
self.assertEqual(p1.flat.derivation.origin is s, False)
p1FlatNotes = p1Flat.notesAndRests
self.assertEqual(p1FlatNotes.derivation.origin is p1Flat, True)
self.assertEqual(p1FlatNotes.derivation.origin is p1, False)
self.assertEqual(list(p1FlatNotes.derivation.chain()), [p1Flat, p1])
# we cannot do this, as each call to flat produces a new Stream
self.assertEqual(p1.flat.notesAndRests.derivation.origin is p1.flat, False)
# chained calls to .derives from can be used
self.assertEqual(p1.flat.notesAndRests.derivation.origin.derivation.origin is p1, True)
# can use rootDerivation to get there faster
self.assertEqual(p1.flat.notesAndRests.derivation.rootDerivation is p1, True)
# this does not work because are taking an item via in index
# value, and this Measure is not derived from a Part
self.assertEqual(p1.getElementsByClass(
'Measure')[3].flat.notesAndRests.derivation.rootDerivation is p1, False)
# the root here is the Measure
self.assertEqual(p1.getElementsByClass(
'Measure')[3].flat.notesAndRests.derivation.rootDerivation is p1.getElementsByClass(
'Measure')[3], True)
m4 = p1.measure(4)
self.assertTrue(m4.flat.notesAndRests.derivation.rootDerivation is m4, list(m4.flat.notesAndRests.derivation.chain()))
# part is the root derivation of a measures() call
mRange = p1.measures(4, 6)
self.assertEqual(mRange.derivation.rootDerivation, p1)
self.assertEqual(mRange.flat.notesAndRests.derivation.rootDerivation, p1)
self.assertEqual(s.flat.getElementsByClass(
'Rest').derivation.rootDerivation is s, True)
# we cannot use the activeSite to get the Part from the Measure, as
# the activeSite was set when doing the getElementsByClass operation
self.assertEqual(p1.getElementsByClass(
'Measure')[3].activeSite is p1, False)
def testDerivationB(self):
from music21 import stream
s1 = stream.Stream()
s1.repeatAppend(note.Note(), 10)
s1Flat = s1.flat
self.assertEqual(s1Flat.derivation.origin is s1, True)
# check what the derivation object thinks its container is
self.assertEqual(s1Flat._derivation.client is s1Flat, True)
s2 = copy.deepcopy(s1Flat)
self.assertEqual(s2.derivation.origin is s1Flat, True)
self.assertEqual(s2.derivation.origin.derivation.origin is s1, True)
# check low level attrbiutes
self.assertEqual(s2._derivation.client is s2, True)
def testDerivationC(self):
from music21 import corpus
s = corpus.parse('bach/bwv66.6')
p1 = s.parts['Soprano']
pMeasures = p1.measures(3, 10)
pMeasuresFlat = pMeasures.flat
pMeasuresFlatNotes = pMeasuresFlat.notesAndRests
self.assertEqual(list(pMeasuresFlatNotes.derivation.chain()), [pMeasuresFlat, pMeasures, p1])
def testDerivationMethodA(self):
from music21 import stream, converter
s1 = stream.Stream()
s1.repeatAppend(note.Note(), 10)
s1Flat = s1.flat
self.assertEqual(s1Flat.derivation.origin is s1, True)
self.assertEqual(s1Flat.derivation.method is 'flat', True)
s1Elements = s1Flat.getElementsByClass('Note')
self.assertEqual(s1Elements.derivation.method is 'getElementsByClass', True)
s1 = converter.parse("tinyNotation: 4/4 C2 D2")
s1m = s1.makeMeasures()
self.assertEqual(s1m.derivation.method, 'makeMeasures')
s1m1 = s1m.measure(1)
self.assertEqual(s1m1.derivation.origin, None)
def testcontainerHierarchyA(self):
from music21 import corpus
s = corpus.parse('bach/bwv66.6')
# the part is not derived from anything yet
self.assertEqual([str(e.__class__) for e in s[1][2][3].containerHierarchy], ["<class 'music21.stream.Measure'>", "<class 'music21.stream.Part'>", "<class 'music21.stream.Score'>"])
# after extraction and changing activeSite, cannot find
n = s.flat.notesAndRests[0]
self.assertEqual([common.classToClassStr(e.__class__) for e in n.containerHierarchy], ['Score', 'Score'] )
# still cannot get hierarchy
#self.assertEqual([str(e.__class__) for e in s.parts[0].containerHierarchy], [])
def testMakeMeasuresTimeSignatures(self):
from music21 import stream
sSrc = stream.Stream()
sSrc.append(note.Note('C4', type='quarter'))
sSrc.append(note.Note('D4', type='quarter'))
sSrc.append(note.Note('E4', type='quarter'))
sMeasures = sSrc.makeMeasures()
# added 4/4 here as default
self.assertEqual(str(sMeasures[0].timeSignature), '<music21.meter.TimeSignature 4/4>')
# no time signature are in the source
self.assertEqual(len(sSrc.flat.getElementsByClass('TimeSignature')), 0)
# we add one time signature
sSrc.insert(0.0, meter.TimeSignature('2/4'))
self.assertEqual(len(sSrc.flat.getElementsByClass('TimeSignature')), 1)
sMeasuresTwoFour = sSrc.makeMeasures()
self.assertEqual(str(sMeasuresTwoFour[0].timeSignature), '<music21.meter.TimeSignature 2/4>')
self.assertEqual(sMeasuresTwoFour.isSorted, True)
# check how many time signature we have:
# we should have 1
self.assertEqual(len(
sMeasuresTwoFour.flat.getElementsByClass('TimeSignature')), 1)
def testDeepcopyActiveSite(self):
# test that active sites make sense after deepcopying
from music21 import stream, corpus
s = stream.Stream()
n = note.Note()
s.append(n)
self.assertEqual(id(n.activeSite), id(s))
# test that elements in stream get their active site properly copied
s1 = copy.deepcopy(s)
n1 = s1[0]
self.assertEqual(id(n1.activeSite), id(s1))
s = stream.Stream()
m = stream.Measure()
n = note.Note()
m.append(n)
s.append(m)
self.assertEqual(id(n.activeSite), id(m))
self.assertEqual(id(m.activeSite), id(s))
s1 = copy.deepcopy(s)
m1 = s1[0]
n1 = m1[0]
self.assertEqual(id(n1.activeSite), id(m1))
self.assertEqual(id(m1.activeSite), id(s1))
# try imported
s = corpus.parse('madrigal.5.8.rntxt')
p = s[1] # for test, not .parts
m = p[2] # for test, not .getElementsByClass('Measure')
rn = m[2]
self.assertEqual(id(rn.activeSite), id(m))
self.assertEqual(id(m.activeSite), id(p))
self.assertEqual(id(p.activeSite), id(s))
s1 = copy.deepcopy(s)
p1 = s1[1]
m1 = p1[2]
rn1 = m1[2]
self.assertEqual(id(rn1.activeSite), id(m1))
self.assertEqual(id(m1.activeSite), id(p1))
self.assertEqual(id(p1.activeSite), id(s1))
def testRecurseA(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
# default
rElements = list(s.recurse()) # NOTE: list(s.recurse())
# removes self, while [x for x in s.recurse()] does not.
self.assertTrue(s in rElements)
self.assertEqual(len(rElements), 240)
return
rElements = list(s.recurse(streamsOnly=True))
self.assertEqual(len(rElements), 45)
p1 = rElements[1]
m1 = rElements[2]
#m2 = rElements[3]
m2 = rElements[4]
self.assertIs(p1.activeSite, s)
self.assertIs(m1.activeSite, p1)
self.assertIs(m2.activeSite, p1)
rElements = list(s.recurse(classFilter='KeySignature'))
self.assertEqual(len(rElements), 4)
# the first elements active site is the measure
self.assertEqual(id(rElements[0].activeSite), id(m1))
rElements = list(s.recurse(classFilter=['TimeSignature']))
self.assertEqual(len(rElements), 4)
# s = corpus.parse('bwv66.6')
# m1 = s[2][1] # cannot use parts here as breaks active site
# rElements = list(m1.recurse(direction='upward'))
# self.assertEqual([str(e.classes[0]) for e in rElements], ['Measure',
# 'Instrument',
# 'Part',
# 'Metadata',
# 'Part',
# 'Score',
# 'Part',
# 'Part',
# 'StaffGroup',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure',
# 'Measure'])
# self.assertEqual(len(rElements), 18)
def testRecurseB(self):
from music21 import corpus
s = corpus.parse('madrigal.5.8.rntxt')
self.assertEqual(len(s.flat.getElementsByClass('KeySignature')), 1)
for e in s.recurse(classFilter='KeySignature'):
e.activeSite.remove(e)
self.assertEqual(len(s.flat.getElementsByClass('KeySignature')), 0)
def testTransposeScore(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
p1 = s.parts[0]
pitch1 = p1.flat.notesAndRests[0]
pitch2 = pitch1.transpose('P4', inPlace=False)
self.assertEqual(str(pitch1), '<music21.note.Note C#>')
self.assertEqual(str(pitch2), '<music21.note.Note F#>')
# can now transpose a part alone as is recursive
p2 = p1.transpose('P4', inPlace=False)
self.assertEqual(str(p1.flat.notesAndRests[0]), '<music21.note.Note C#>')
self.assertEqual(str(p2.flat.notesAndRests[0]), '<music21.note.Note F#>')
p2 = p1.flat.transpose('P4', inPlace=False)
self.assertEqual(str(p1.flat.notesAndRests[0]), '<music21.note.Note C#>')
self.assertEqual(str(p2.flat.notesAndRests[0]), '<music21.note.Note F#>')
def testExtendDurationA(self):
# spanners in this were causing some problems
from music21.musicxml import testFiles
from music21 import converter
# testing a file a file with dynamics
a = converter.parse(testFiles.schumannOp48No1) # @UndefinedVariable
unused_b = a.flat
#b = a.flat.extendDuration(dynamics.Dynamic)
def testSpannerTransferA(self):
from music21 import corpus
# test getting spanners after .measures extraction
s = corpus.parse('corelli/opus3no1/1grave')
post = s.parts[0].measures(5, 10)
# two per part
rbSpanners = post.getElementsByClass('Slur')
self.assertEqual(len(rbSpanners), 6)
#post.parts[0].show()
unused_firstSpannedElementIds = [id(x) for x in rbSpanners[0].getSpannedElements()]
unused_secondSpannedElementIds = [id(x) for x in rbSpanners[1].getSpannedElements()]
#self.assertEqual()
# TODO: compare ids of new measures
def testMeasureGrouping(self):
from music21 import corpus
def parseMeasures(piece):
#The measures of the piece, for a unique extraction
voicesMeasures = []
for part in piece.parts:
# not all things in a Part are Measure objects; you might
# also find Instruments and Spanners, for example.
# thus, filter by Measure first to get the highest measure number
mMax = part.getElementsByClass('Measure')[-1].number
# the measures() method returns more than just measures;
# it the Part it returns includes Slurs, that may reside at the
# Part level
voicesMeasures.append(part.measures(0, mMax))
#The problem itself : print a measure to check if len(notes) == 0
for voice in voicesMeasures:
# only get the Measures, not everything in the Part
for meas in voice.getElementsByClass('Measure'):
# some Measures contain Voices, some do not
# do get all notes regardless of Voices, take a flat measure
self.assertEqual(len(meas.flat.notesAndRests) != 0, True)
piece = corpus.parse('corelli/opus3no1/1grave')
parseMeasures(piece)
piece = corpus.parse('bach/bwv7.7')
parseMeasures(piece)
def testMakeNotationByMeasuresA(self):
from music21 import stream
m = stream.Measure()
m.repeatAppend(note.Note('c#', quarterLength=.5), 4)
m.repeatAppend(note.Note('c', quarterLength=1/3.), 6)
# calls makeAccidentals, makeBeams, makeTuplets
m.makeNotation(inPlace=True)
# after running, there should only be two displayed accidentals
self.assertEqual([str(n.pitch.accidental) for n in m.notes],
['<accidental sharp>', '<accidental sharp>', '<accidental sharp>', '<accidental sharp>', '<accidental natural>', 'None', 'None', 'None', 'None', 'None'])
self.assertEqual([n.pitch.accidental.displayStatus for n in m.notes[:5]], [True, False, False, False, True])
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(m).decode('utf-8')
self.assertTrue(raw.find('<tuplet bracket="yes" placement="above"') > 0, raw)
self.assertTrue(raw.find('<beam number="1">begin</beam>') > 0, raw)
def testMakeNotationByMeasuresB(self):
from music21 import stream
m = stream.Measure()
m.repeatAppend(note.Note('c#', quarterLength=.5), 4)
m.repeatAppend(note.Note('c', quarterLength=1/3.), 6)
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(m).decode('utf-8')
self.assertEqual(raw.find('<beam number="1">begin</beam>') > 0, True)
self.assertEqual(raw.find('<tuplet bracket="yes" placement="above"') > 0, True)
def testHaveAccidentalsBeenMadeA(self):
from music21 import stream
m = stream.Measure()
m.append(note.Note('c#'))
m.append(note.Note('c'))
m.append(note.Note('c#'))
m.append(note.Note('c'))
#m.show() on musicxml output, accidentals will be made
self.assertEqual(m.haveAccidentalsBeenMade(), False)
m.makeAccidentals()
self.assertEqual(m.haveAccidentalsBeenMade(), True)
def testHaveAccidentalsBeenMadeB(self):
from music21 import stream
m1 = stream.Measure()
m1.repeatAppend(note.Note('c#'), 4)
m2 = stream.Measure()
m2.repeatAppend(note.Note('c'), 4)
p = stream.Part()
p.append([m1, m2])
#p.show()
# test result of xml output to make sure a natural has been hadded
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(p).decode('utf-8')
self.assertEqual(raw.find('<accidental>natural</accidental>') > 0, True)
# make sure original is not chagned
self.assertEqual(p.haveAccidentalsBeenMade(), False)
def testHaveBeamsBeenMadeA(self):
from music21 import stream
m1 = stream.Measure()
m1.timeSignature = meter.TimeSignature('4/4')
m1.repeatAppend(note.Note('c#', quarterLength=.5), 8)
m2 = stream.Measure()
m2.repeatAppend(note.Note('c', quarterLength=.5), 8)
p = stream.Part()
p.append([m1, m2])
self.assertEqual(p.streamStatus.haveBeamsBeenMade(), False)
p.makeBeams(inPlace=True)
self.assertEqual(p.streamStatus.haveBeamsBeenMade(), True)
def testHaveBeamsBeenMadeB(self):
from music21 import stream
m1 = stream.Measure()
m1.timeSignature = meter.TimeSignature('4/4')
m1.repeatAppend(note.Note('c#', quarterLength=.5), 8)
m2 = stream.Measure()
m2.repeatAppend(note.Note('c', quarterLength=.5), 8)
p = stream.Part()
p.append([m1, m2])
self.assertEqual(p.streamStatus.haveBeamsBeenMade(), False)
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(p).decode('utf-8')
# after getting musicxml, make sure that we have not changed the source
#p.show()
self.assertEqual(p.streamStatus.haveBeamsBeenMade(), False)
self.assertEqual(raw.find('<beam number="1">end</beam>') > 0, True)
def testFlatCachingA(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
flat1 = s.flat
flat2 = s.flat
self.assertEqual(id(flat1), id(flat2))
flat1.insert(0, note.Note('g'))
self.assertNotEqual(id(flat1), s.flat)
def testFlatCachingB(self):
from music21 import corpus
sSrc = corpus.parse('bach/bwv13.6.xml')
sPart = sSrc.getElementById('Alto')
ts = meter.TimeSignature('6/8')
# for n in sPart.flat.notesAndRests:
# bs = n.beatStr
#environLocal.printDebug(['calling makeMeasures'])
sPartFlat = sPart.flat
unused_notesAndRests = sPartFlat.notesAndRests
# test cache...
sMeasures = sPart.flat.notesAndRests.makeMeasures(ts)
target = []
for n in sMeasures.flat.notesAndRests:
target.append(n.beatStr)
self.assertEqual(target, ['1', '1 2/3', '2 1/3', '1', '1 2/3', '2 1/3', '1', '1 2/3', '2 1/3', '2 2/3', '1', '1 1/3', '1 2/3', '2', '2 1/3', '1', '1 2/3', '1', '1 2/3', '2 1/3', '1', '1 2/3', '2 1/3', '1', '1', '1 2/3', '2 1/3', '1', '1 2/3', '2 1/3', '1 2/3', '2 1/3', '1', '1 1/3', '1 2/3', '2', '2 1/3', '2 2/3', '1', '1 1/3', '1 2/3', '2 1/3', '1', '1 2/3', '2 1/3', '1', '1 1/3', '1 2/3', '2 1/3', '2 2/3', '1', '1 2/3', '2', '2 1/3'])
def testFlatCachingC(self):
from music21 import corpus, stream
qj = corpus.parse('ciconia/quod_jactatur').parts[0]
unused_idFlat1 = id(qj.flat)
#environLocal.printDebug(['idFlat1', idFlat1])
k1 = qj.flat.getElementsByClass(key.KeySignature)[0]
qj.flat.replace(k1, key.KeySignature(-3))
unused_idFlat2 = id(qj.flat)
#environLocal.printDebug(['idFlat2', idFlat2])
unused_m1 = qj.getElementsByClass(stream.Measure)[1]
#m1.show('t')
#m1.insert(0, key.KeySignature(5))
qj[1].insert(0, key.KeySignature(5))
#qj.elementsChanged()
unused_keySigSearch = qj.flat.getElementsByClass(key.KeySignature)
for n in qj.flat.notes:
junk = n.getContextByClass(key.KeySignature)
#print junk
unused_qj2 = qj.invertDiatonic(note.Note('F4'), inPlace = False)
#qj2.measures(1,2).show('text')
def testSemiFlatCachingA(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
ssf1 = s.semiFlat
ssf2 = s.semiFlat
self.assertEqual(id(ssf1), id(ssf2))
ts = s.parts[0].getElementsByClass(
'Measure')[3].getContextByClass('TimeSignature')
self.assertEqual(str(ts), '<music21.meter.TimeSignature 4/4>')
#environLocal.printDebug(['ts', ts])
beatStr = s.parts[0].getElementsByClass(
'Measure')[3].notes[3].beatStr
self.assertEqual(beatStr, '3')
#environLocal.printDebug(['beatStr', beatStr])
# def testDeepCopyLocations(self):
# from music21 import stream, note
# s1 = stream.Stream()
# n1 = note.Note()
# s1.append(n1)
# print [id(x) for x in n1.getSites()]
# s2 = copy.deepcopy(s1)
# #print s2[0].getSites()
# print [id(x) for x in s2[0].getSites()]
def testFlattenUnnecessaryVoicesA(self):
from music21 import stream
s = stream.Stream()
v1 = stream.Voice()
v2 = stream.Voice()
s.insert(0, v1)
s.insert(0, v2)
self.assertEqual(len(s.voices), 2)
s.flattenUnnecessaryVoices(inPlace=True)
# as empty, are removed
self.assertEqual(len(s.voices), 0)
# next case: one voice empty, other with notes
s = stream.Stream()
v1 = stream.Voice()
v2 = stream.Voice()
n1 = note.Note()
n2 = note.Note()
v1.insert(10, n1)
v1.insert(20, n2)
s.insert(50, v1) # need to test inclusion of this offset
s.insert(50, v2)
self.assertEqual(len(s.voices), 2)
s.flattenUnnecessaryVoices(inPlace=True)
# as empty, are removed
self.assertEqual(len(s.voices), 0)
self.assertEqual(len(s.notes), 2)
self.assertEqual(n1.getOffsetBySite(s), 60)
self.assertEqual(n2.getOffsetBySite(s), 70)
# last case: two voices with notes
s = stream.Stream()
v1 = stream.Voice()
v2 = stream.Voice()
n1 = note.Note()
n2 = note.Note()
n3 = note.Note()
v1.insert(10, n1)
v1.insert(20, n2)
v2.insert(20, n3)
s.insert(50, v1) # need to test inclusion of this offset
s.insert(50, v2)
self.assertEqual(len(s.voices), 2)
s.flattenUnnecessaryVoices(inPlace=True)
# none are removed by default
self.assertEqual(len(s.voices), 2)
# can force
s.flattenUnnecessaryVoices(force=True, inPlace=True)
self.assertEqual(len(s.voices), 0)
self.assertEqual(len(s.notes), 3)
def testGetElementBeforeOffsetA(self):
from music21 import stream
s = stream.Stream()
n1 = note.Note()
n2 = note.Note()
n3 = note.Note()
s.insert(0, n1)
s.insert(3, n2)
s.insert(5, n3)
self.assertEqual(s.getElementBeforeOffset(5), n2)
self.assertEqual(s.getElementBeforeOffset(5.1), n3)
self.assertEqual(s.getElementBeforeOffset(3), n1)
self.assertEqual(s.getElementBeforeOffset(3.2), n2)
self.assertEqual(s.getElementBeforeOffset(0), None)
self.assertEqual(s.getElementBeforeOffset(0.3), n1)
self.assertEqual(s.getElementBeforeOffset(5, ['Note']), n2)
self.assertEqual(s.getElementBeforeOffset(0.3, ['GeneralNote']), n1)
def testGetElementBeforeOffsetB(self):
from music21 import stream
s = stream.Stream()
# fill with clefs to test class matching
n1 = note.Note()
n2 = note.Note()
n3 = note.Note()
s.insert(0, n1)
s.insert(0, clef.SopranoClef())
s.insert(2, clef.BassClef())
s.insert(3, n2)
s.insert(3, clef.TrebleClef())
s.insert(3.1, clef.TenorClef())
s.insert(5, n3)
self.assertEqual(s.getElementBeforeOffset(5, ['Note']), n2)
self.assertEqual(s.getElementBeforeOffset(5.1, ['Note']), n3)
self.assertEqual(s.getElementBeforeOffset(3, ['Note']), n1)
self.assertEqual(s.getElementBeforeOffset(3.2, ['Note']), n2)
self.assertEqual(s.getElementBeforeOffset(0, ['Note']), None)
self.assertEqual(s.getElementBeforeOffset(0.3, ['Note']), n1)
def testFinalBarlinePropertyA(self):
from music21 import stream
s = stream.Stream()
m1 = stream.Measure()
m1.repeatAppend(note.Note(quarterLength=2.0), 2)
m2 = stream.Measure()
m2.repeatAppend(note.Note(quarterLength=2.0), 2)
s.append([m1, m2])
s.finalBarline = 'dotted'
self.assertEqual(str(s.getElementsByClass('Measure')[-1].rightBarline), '<music21.bar.Barline style=dotted>')
self.assertEqual(str(s.finalBarline), '<music21.bar.Barline style=dotted>')
s.finalBarline = 'final'
self.assertEqual(str(s.getElementsByClass('Measure')[-1].rightBarline), '<music21.bar.Barline style=final>')
self.assertEqual(str(s.finalBarline), '<music21.bar.Barline style=final>')
#s.show()
def testFinalBarlinePropertyB(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
sop = s.parts[0]
self.assertEqual(str(sop.finalBarline), '<music21.bar.Barline style=final>')
sop.finalBarline = 'double'
self.assertEqual(str(sop.finalBarline), '<music21.bar.Barline style=double>')
# process entire Score
s.finalBarline = 'tick'
self.assertEqual(str(s.finalBarline), '[<music21.bar.Barline style=tick>, <music21.bar.Barline style=tick>, <music21.bar.Barline style=tick>, <music21.bar.Barline style=tick>]')
# can set a heterogenous final barlines
s.finalBarline = ['final', 'none']
self.assertEqual(str(s.finalBarline), '[<music21.bar.Barline style=final>, <music21.bar.Barline style=none>, <music21.bar.Barline style=final>, <music21.bar.Barline style=none>]')
# def testGraceNoteSortingA(self):
# from music21 import stream
#
# n1 = note.Note('C', type='16th')
# n2 = note.Note('D', type='16th')
# n3 = note.Note('E', type='16th')
# n4 = note.Note('F', type='16th')
# n5 = note.Note('G', type='16th')
#
# s = stream.Stream()
#
# n1.makeGrace()
# s.append(n1)
# n2.makeGrace()
# s.append(n2)
#
# s.append(n3)
#
# n4.makeGrace()
# s.append(n4)
# s.append(n5)
#
# self.assertEqual(s._getGracesAtOffset(0), [n1, n2])
# self.assertEqual(s._getGracesAtOffset(.25), [n4])
#
# match = [(n.name, n.offset, n.quarterLength, n.priority) for n in s]
# self.assertEqual(match,
# [('C', 0.0, 0.0, -100),
# ('D', 0.0, 0.0, -99),
# ('E', 0.0, 0.25, 0),
# ('F', 0.25, 0.0, -100),
# ('G', 0.25, 0.25, 0)])
# def testGraceNoteSortingB(self):
# from music21 import stream
#
# n1 = note.Note('C', type='16th')
# n2 = note.Note('D', type='16th')
# n3 = note.Note('E', type='16th')
# n4 = note.Note('F', type='16th')
# n5 = note.Note('G', type='16th')
# s = stream.Stream()
#
# n1.makeGrace()
# s.append(n1)
# n2.makeGrace()
# s.append(n2)
# n3.makeGrace()
# s.append(n3)
#
# s.append(n4)
# n5.makeGrace() # grace at end
# s.append(n5)
#
# #s.show('t')
#
# self.assertEqual(s._getGracesAtOffset(0), [n1, n2, n3])
# self.assertEqual(s._getGracesAtOffset(.25), [n5])
#
# match = [(n.name, n.offset, n.quarterLength, n.priority) for n in s]
# self.assertEqual(match,
# [('C', 0.0, 0.0, -100),
# ('D', 0.0, 0.0, -99),
# ('E', 0.0, 0.0, -98),
# ('F', 0.0, 0.25, 0),
# ('G', 0.25, 0.0, -100)])
# add a clef; test sorting
# problem: this sorts priority before class
# c1 = clef.AltoClef()
# s.insert(0, c1)
# s.show('t')
# self.assertEqual(c1, s[0]) # should be first
def testStreamElementsComparison(self):
from music21 import stream
s1 = stream.Stream()
s1.repeatAppend(note.Note(), 7)
n1 = note.Note()
s1.append(n1)
s2 = stream.Stream()
s2.elements = s1
match = []
for e in s2.elements:
match.append(e.getOffsetBySite(s2))
self.assertEqual(match, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
# have the same object in each stream
self.assertEqual(id(s2[-1]), id(s1[-1]))
s3 = stream.Stream()
s4 = stream.Stream()
s4.insert(25, n1) # active site is now changed
s3.elements = s1.elements
match = []
for e in s3.elements:
match.append(e.getOffsetBySite(s3))
# this is not desirable but results from setting of last active site
# before elements assignment
self.assertEqual(match, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 25.0])
#s3.elements = s1
s3 = s1[:]
match = []
for e in s3.elements:
match.append(e.getOffsetBySite(s3))
self.assertEqual(match, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
# this resets active site, so we get the right offsets on element
# assignment
s3.elements = s1
match = []
for e in s3.elements:
match.append(e.getOffsetBySite(s3))
self.assertEqual(match, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
s5 = stream.Stream()
s5.elements = s1
match = []
for e in s5.elements:
match.append(e.getOffsetBySite(s5))
self.assertEqual(match, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
def testSecondsPropertyA(self):
from music21 import stream, tempo
# simple case of one tempo
s = stream.Stream()
s.insert(0, tempo.MetronomeMark(number=60))
s.repeatAppend(note.Note(), 60)
self.assertEqual(s.seconds, 60.0)
s = stream.Stream()
s.insert(0, tempo.MetronomeMark(number=90))
s.repeatAppend(note.Note(), 60)
self.assertEqual(s.seconds, 40.0)
s = stream.Stream()
s.insert(0, tempo.MetronomeMark(number=120))
s.repeatAppend(note.Note(), 60)
self.assertEqual(s.seconds, 30.0)
# changing tempo mid-stream
s = stream.Stream()
s.insert(0, tempo.MetronomeMark(number=60))
s.repeatAppend(note.Note(), 60)
s.insert(30, tempo.MetronomeMark(number=120))
# 30 notes at 60, 30 notes at 120
self.assertEqual(s.seconds, 30.0 + 15.0)
s = stream.Stream()
s.insert(0, tempo.MetronomeMark(number=60))
s.repeatAppend(note.Note(), 60)
s.insert(15, tempo.MetronomeMark(number=120))
s.insert(30, tempo.MetronomeMark(number=240))
s.insert(45, tempo.MetronomeMark(number=480))
# 15 notes at 60, 15 notes at 120, 15 at 240, 15 at 480
self.assertEqual(s.seconds, 15.0 + 7.5 + 3.75 + 1.875)
def testSecondsPropertyB(self):
from music21 import corpus, tempo
s = corpus.parse('bwv66.6')
sFlat = s.flat
# we have not tempo
self.assertEqual(len(sFlat.getElementsByClass('TempoIndication')), 0)
sFlat.insert(0, tempo.MetronomeMark('adagio'))
self.assertAlmostEquals(sFlat.seconds, 38.57142857)
sFlat.removeByClass('TempoIndication')
sFlat.insert(0, tempo.MetronomeMark('presto'))
self.assertAlmostEquals(sFlat.seconds, 11.73913043)
sFlat.removeByClass('TempoIndication')
sFlat.insert(0, tempo.MetronomeMark('prestissimo'))
self.assertAlmostEquals(sFlat.seconds, 10.38461538)
def testSecondsPropertyC(self):
from music21 import stream, tempo
s = stream.Stream()
m1 = stream.Measure()
m1.timeSignature = meter.TimeSignature('3/4')
mm = tempo.MetronomeMark(number=60)
m1.insert(0, mm)
m1.insert(note.Note(quarterLength=3))
s.append(m1)
m2 = stream.Measure()
m2.timeSignature = meter.TimeSignature('5/4')
m2.insert(note.Note(quarterLength=5))
s.append(m2)
m3 = stream.Measure()
m3.timeSignature = meter.TimeSignature('2/4')
m3.insert(note.Note(quarterLength=2))
s.append(m3)
self.assertEqual([m.seconds for m in s.getElementsByClass('Measure')], [3.0, 5.0, 2.0])
mm.number = 120
self.assertEqual([m.seconds for m in s.getElementsByClass('Measure')], [1.5, 2.5, 1.0])
mm.number = 30
self.assertEqual([m.seconds for m in s.getElementsByClass('Measure')], [6.0, 10.0, 4.0])
# TODO: New piece with Metronome Mark Boundaries
# def testMetronomeMarkBoundaries(self):
# from music21 import corpus
# s = corpus.parse('hwv56/movement2-09.md')
# mmBoundaries = s.metronomeMarkBoundaries()
# self.assertEqual(str(mmBoundaries), '[(0.0, 20.0, <music21.tempo.MetronomeMark Largo e piano Quarter=46>)]')
def testAccumulatedTimeA(self):
from music21 import stream, tempo
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([0, tempo.MetronomeMark(number=60)])
mmBoundaries = s.metronomeMarkBoundaries()
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 1), 1.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 2), 2.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 8), 8.0)
# changing in the middle of boundary
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([0, tempo.MetronomeMark(number=60),
4, tempo.MetronomeMark(number=120)])
mmBoundaries = s.metronomeMarkBoundaries()
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 4), 4.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 4, 8), 2.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 8), 6.0)
def testAccumulatedTimeB(self):
from music21 import stream, tempo
# changing in the middle of boundary
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([0, tempo.MetronomeMark(number=60),
4, tempo.MetronomeMark(number=120),
6, tempo.MetronomeMark(number=240)])
mmBoundaries = s.metronomeMarkBoundaries()
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 4), 4.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 4, 6), 1.0)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 6, 8), 0.5)
self.assertEqual(s._accumulatedSeconds(mmBoundaries, 0, 8), 5.5)
def testSecondsMapA(self):
from music21 import stream, tempo
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([0, tempo.MetronomeMark(number=90),
4, tempo.MetronomeMark(number=120),
6, tempo.MetronomeMark(number=240)])
self.assertEqual(str(s.metronomeMarkBoundaries()), '[(0.0, 4.0, <music21.tempo.MetronomeMark maestoso Quarter=90>), (4.0, 6.0, <music21.tempo.MetronomeMark animato Quarter=120>), (6.0, 8.0, <music21.tempo.MetronomeMark Quarter=240>)]')
# not starting
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([4, tempo.MetronomeMark(number=120),
6, tempo.MetronomeMark(number=240)])
self.assertEqual(str(s.metronomeMarkBoundaries()), '[(0.0, 4.0, <music21.tempo.MetronomeMark animato Quarter=120>), (4.0, 6.0, <music21.tempo.MetronomeMark animato Quarter=120>), (6.0, 8.0, <music21.tempo.MetronomeMark Quarter=240>)]')
# none
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
self.assertEqual(str(s.metronomeMarkBoundaries()), '[(0.0, 8.0, <music21.tempo.MetronomeMark animato Quarter=120>)]')
# ont mid stream
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([6, tempo.MetronomeMark(number=240)])
self.assertEqual(str(s.metronomeMarkBoundaries()), '[(0.0, 6.0, <music21.tempo.MetronomeMark animato Quarter=120>), (6.0, 8.0, <music21.tempo.MetronomeMark Quarter=240>)]')
# one start stream
s = stream.Stream()
s.repeatAppend(note.Note(), 8)
s.insert([0, tempo.MetronomeMark(number=240)])
self.assertEqual(str(s.metronomeMarkBoundaries()), '[(0.0, 8.0, <music21.tempo.MetronomeMark Quarter=240>)]')
def testSecondsMapB(self):
from music21 import stream, tempo
# one start stream
s = stream.Stream()
s.repeatAppend(note.Note(), 2)
s.insert([0, tempo.MetronomeMark(number=60)])
sMap = s._getSecondsMap()
sMapStr = "[" # construct string from dict in fixed order...
for ob in sMap:
sMapStr += "{'durationSeconds': " + str(ob['durationSeconds']) + ", 'voiceIndex': " + str(ob['voiceIndex']) + ", 'element': " + str(ob['element']) + ", 'offsetSeconds': " + str(ob['offsetSeconds']) + ", 'endTimeSeconds': " + str(ob['endTimeSeconds']) + "}, "
sMapStr = sMapStr[0:-2]
sMapStr += "]"
self.assertEqual(sMapStr, """[{'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghetto Quarter=60>, 'offsetSeconds': 0.0, 'endTimeSeconds': 0.0}, {'durationSeconds': 1.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 0.0, 'endTimeSeconds': 1.0}, {'durationSeconds': 1.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 1.0, 'endTimeSeconds': 2.0}]""")
s = stream.Stream()
s.repeatAppend(note.Note(), 2)
s.insert([0, tempo.MetronomeMark(number=15)])
sMap = s._getSecondsMap()
sMapStr = "[" # construct string from dict in fixed order...
for ob in sMap:
sMapStr += "{'durationSeconds': " + str(ob['durationSeconds']) + ", 'voiceIndex': " + str(ob['voiceIndex']) + ", 'element': " + str(ob['element']) + ", 'offsetSeconds': " + str(ob['offsetSeconds']) + ", 'endTimeSeconds': " + str(ob['endTimeSeconds']) + "}, "
sMapStr = sMapStr[0:-2]
sMapStr += "]"
self.assertEqual(str(sMapStr), """[{'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghissimo Quarter=15>, 'offsetSeconds': 0.0, 'endTimeSeconds': 0.0}, {'durationSeconds': 4.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 0.0, 'endTimeSeconds': 4.0}, {'durationSeconds': 4.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 4.0, 'endTimeSeconds': 8.0}]""")
s = stream.Stream()
s.repeatAppend(note.Note(), 2)
s.insert([0, tempo.MetronomeMark(number=15),
1, tempo.MetronomeMark(number=60)])
sMap = s._getSecondsMap()
sMapStr = "[" # construct string from dict in fixed order...
for ob in sMap:
sMapStr += "{'durationSeconds': " + str(ob['durationSeconds']) + ", 'voiceIndex': " + str(ob['voiceIndex']) + ", 'element': " + str(ob['element']) + ", 'offsetSeconds': " + str(ob['offsetSeconds']) + ", 'endTimeSeconds': " + str(ob['endTimeSeconds']) + "}, "
sMapStr = sMapStr[0:-2]
sMapStr += "]"
self.assertEqual(sMapStr, """[{'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghissimo Quarter=15>, 'offsetSeconds': 0.0, 'endTimeSeconds': 0.0}, {'durationSeconds': 4.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 0.0, 'endTimeSeconds': 4.0}, {'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghetto Quarter=60>, 'offsetSeconds': 4.0, 'endTimeSeconds': 4.0}, {'durationSeconds': 1.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 4.0, 'endTimeSeconds': 5.0}]""")
s = stream.Stream()
s.repeatAppend(note.Note(quarterLength=2.0), 1)
s.insert([0, tempo.MetronomeMark(number=15),
1, tempo.MetronomeMark(number=60)])
sMap = s._getSecondsMap()
sMapStr = "[" # construct string from dict in fixed order...
for ob in sMap:
sMapStr += "{'durationSeconds': " + str(ob['durationSeconds']) + ", 'voiceIndex': " + str(ob['voiceIndex']) + ", 'element': " + str(ob['element']) + ", 'offsetSeconds': " + str(ob['offsetSeconds']) + ", 'endTimeSeconds': " + str(ob['endTimeSeconds']) + "}, "
sMapStr = sMapStr[0:-2]
sMapStr += "]"
self.assertEqual(sMapStr, """[{'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghissimo Quarter=15>, 'offsetSeconds': 0.0, 'endTimeSeconds': 0.0}, {'durationSeconds': 5.0, 'voiceIndex': None, 'element': <music21.note.Note C>, 'offsetSeconds': 0.0, 'endTimeSeconds': 5.0}, {'durationSeconds': 0.0, 'voiceIndex': None, 'element': <music21.tempo.MetronomeMark larghetto Quarter=60>, 'offsetSeconds': 4.0, 'endTimeSeconds': 4.0}]""")
def testPartDurationA(self):
from music21 import stream
#s = corpus.parse('bach/bwv7.7')
p1 = stream.Part()
p1.append(note.Note(quarterLength=72))
p2 = stream.Part()
p2.append(note.Note(quarterLength=72))
sNew = stream.Score()
sNew.append(p1)
self.assertEqual(str(sNew.duration), '<music21.duration.Duration 72.0>')
self.assertEqual(sNew.duration.quarterLength, 72.0)
sNew.append(p2)
self.assertEqual(sNew.duration.quarterLength, 144.0)
#sPost = sNew.chordify()
#sPost.show()
def testPartDurationB(self):
from music21 import stream, corpus
s = corpus.parse('bach/bwv66.6')
sNew = stream.Score()
sNew.append(s.parts[0])
self.assertEqual(str(s.parts[0].duration), '<music21.duration.Duration 36.0>')
self.assertEqual(str(sNew.duration), '<music21.duration.Duration 36.0>')
self.assertEqual(sNew.duration.quarterLength, 36.0)
sNew.append(s.parts[1])
self.assertEqual(sNew.duration.quarterLength, 72.0)
def testChordifyTagPartA(self):
from music21 import stream
p1 = stream.Stream()
p1.id = 'a'
p1.repeatAppend(note.Note('g4', quarterLength=2), 6)
p2 = stream.Stream()
p2.repeatAppend(note.Note('c4', quarterLength=3), 4)
p2.id = 'b'
s = stream.Score()
s.insert(0, p1)
s.insert(0, p2)
post = s.chordify(addPartIdAsGroup=True, removeRedundantPitches=False)
self.assertEqual(len(post.flat.notes), 8)
# test that each note has its original group
idA = []
idB = []
for c in post.flat.getElementsByClass('Chord'):
for p in c.pitches:
if 'a' in p.groups:
idA.append(p.name)
if 'b' in p.groups:
idB.append(p.name)
self.assertEqual(idA, ['G', 'G', 'G', 'G', 'G', 'G', 'G', 'G'])
self.assertEqual(idB, ['C', 'C', 'C', 'C', 'C', 'C', 'C', 'C'])
def testChordifyTagPartB(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
idSoprano = []
idAlto = []
idTenor = []
idBass = []
post = s.chordify(addPartIdAsGroup=True, removeRedundantPitches=False)
for c in post.flat.getElementsByClass('Chord'):
for p in c.pitches:
if 'Soprano' in p.groups:
idSoprano.append(p.name)
if 'Alto' in p.groups:
idAlto.append(p.name)
if 'Tenor' in p.groups:
idTenor.append(p.name)
if 'Bass' in p.groups:
idBass.append(p.name)
self.assertEqual(idSoprano, [u'C#', u'B', u'A', u'B', u'C#', u'E', u'C#', u'C#', u'B', u'B', u'A', u'C#', u'A', u'B', u'G#', u'G#', u'F#', u'A', u'B', u'B', u'B', u'B', u'F#', u'F#', u'E', u'A', u'A', u'B', u'B', u'C#', u'C#', u'A', u'B', u'C#', u'A', u'G#', u'G#', u'F#', u'F#', u'G#', u'F#', u'F#', u'F#', u'F#', u'F#', u'F#', u'F#', u'F#', u'F#', u'E#', u'F#'])
self.assertEqual(idAlto, [u'E', u'E', u'F#', u'E', u'E', u'E', u'E', u'A', u'G#', u'G#', u'E', u'G#', u'F#', u'G#', u'E#', u'E#', u'C#', u'F#', u'F#', u'F#', u'E', u'E', u'D#', u'D#', u'C#', u'C#', u'F#', u'E', u'E', u'E', u'A', u'F#', u'F#', u'G#', u'F#', u'F#', u'E#', u'F#', u'F#', u'C#', u'C#', u'D', u'E', u'E', u'D', u'C#', u'B', u'C#', u'D', u'D', u'C#'])
# length should be the same
self.assertEqual(len(idSoprano), len(idAlto))
def testTransposeByPitchA(self):
from music21 import stream, instrument
i1 = instrument.EnglishHorn() # -p5
i2 = instrument.Clarinet() # -M2
p1 = stream.Part()
p1.repeatAppend(note.Note('C4'), 4)
p1.insert(0, i1)
p1.insert(2, i2)
p2 = stream.Part()
p2.repeatAppend(note.Note('C4'), 4)
p2.insert(0, i2)
s = stream.Score()
s.insert(0, p1)
s.insert(0, p2)
self.assertEqual([str(p) for p in p1.pitches], ['C4', 'C4', 'C4', 'C4'])
test = p1._transposeByInstrument(inPlace=False)
self.assertEqual([str(p) for p in test.pitches], ['F3', 'F3', 'B-3', 'B-3'])
test = p1._transposeByInstrument(inPlace=False, reverse=True)
self.assertEqual([str(p) for p in test.pitches], ['G4', 'G4', 'D4', 'D4'])
# declare that at written pitch
p1.atSoundingPitch = False
test = p1.toSoundingPitch(inPlace=False)
# all transpositions should be downward
self.assertEqual([str(p) for p in test.pitches], ['F3', 'F3', 'B-3', 'B-3'])
# declare that at written pitch
p1.atSoundingPitch = False
test = p1.toWrittenPitch(inPlace=False)
# no change; already at written
self.assertEqual([str(p) for p in test.pitches], ['C4', 'C4', 'C4', 'C4'])
# declare that at sounding pitch
p1.atSoundingPitch = True
# no change happens
test = p1.toSoundingPitch(inPlace=False)
self.assertEqual([str(p) for p in test.pitches], ['C4', 'C4', 'C4', 'C4'])
# declare at sounding pitch
p1.atSoundingPitch = True
# reverse intervals; app pitches should be upward
test = p1.toWrittenPitch(inPlace=False)
self.assertEqual([str(p) for p in test.pitches], ['G4', 'G4', 'D4', 'D4'])
# test on a complete score
s.parts[0].atSoundingPitch = False
s.parts[1].atSoundingPitch = False
test = s.toSoundingPitch(inPlace=False)
self.assertEqual([str(p) for p in test.parts[0].pitches], ['F3', 'F3', 'B-3', 'B-3'])
self.assertEqual([str(p) for p in test.parts[1].pitches], ['B-3', 'B-3', 'B-3', 'B-3'])
# test same in place
s.parts[0].atSoundingPitch = False
s.parts[1].atSoundingPitch = False
s.toSoundingPitch(inPlace=True)
self.assertEqual([str(p) for p in s.parts[0].pitches], ['F3', 'F3', 'B-3', 'B-3'])
self.assertEqual([str(p) for p in test.parts[1].pitches], ['B-3', 'B-3', 'B-3', 'B-3'])
def testTransposeByPitchB(self):
from music21.musicxml import testPrimitive
from music21 import converter
s = converter.parse(testPrimitive.transposingInstruments72a)
self.assertEqual(s.parts[0].atSoundingPitch, False)
self.assertEqual(s.parts[1].atSoundingPitch, False)
self.assertEqual(str(s.parts[0].getElementsByClass(
'Instrument')[0].transposition), '<music21.interval.Interval M-2>')
self.assertEqual(str(s.parts[1].getElementsByClass(
'Instrument')[0].transposition), '<music21.interval.Interval M-6>')
self.assertEqual([str(p) for p in s.parts[0].pitches], ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5', 'D5'])
self.assertEqual([str(p) for p in s.parts[1].pitches], ['A4', 'B4', 'C#5', 'D5', 'E5', 'F#5', 'G#5', 'A5'])
s.toSoundingPitch(inPlace=True)
self.assertEqual([str(p) for p in s.parts[0].pitches], ['C4', 'D4', 'E4', 'F4', 'G4', 'A4', 'B4', 'C5'] )
self.assertEqual([str(p) for p in s.parts[1].pitches], ['C4', 'D4', 'E4', 'F4', 'G4', 'A4', 'B4', 'C5'] )
def testExtendTiesA(self):
from music21 import stream
s = stream.Stream()
s.append(note.Note('g4'))
s.append(chord.Chord(['c3', 'g4', 'a5']))
s.append(note.Note('a5'))
s.append(chord.Chord(['c4', 'a5']))
s.extendTies()
post = []
for n in s.flat.getElementsByClass('GeneralNote'):
if 'Chord' in n.classes:
post.append([q.tie for q in n])
else:
post.append(n.tie)
self.assertEqual(str(post), '[<music21.tie.Tie start>, [None, <music21.tie.Tie stop>, <music21.tie.Tie start>], <music21.tie.Tie continue>, [None, <music21.tie.Tie stop>]]')
def testExtendTiesB(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
sChords = s.measures(9, 9).chordify()
#sChords = s.chordify()
sChords.extendTies()
post = []
for chord in sChords.flat.getElementsByClass('Chord'):
post.append([n.tie for n in chord])
self.assertEqual(str(post), '[[<music21.tie.Tie continue>, <music21.tie.Tie start>, <music21.tie.Tie start>], [<music21.tie.Tie continue>, None, <music21.tie.Tie continue>, <music21.tie.Tie stop>], [<music21.tie.Tie stop>, <music21.tie.Tie start>, <music21.tie.Tie continue>, <music21.tie.Tie start>], [None, <music21.tie.Tie stop>, <music21.tie.Tie stop>, <music21.tie.Tie stop>], [None, None, None, None]]')
#sChords.show()
def testInsertIntoNoteOrChordA(self):
from music21 import stream
s = stream.Stream()
s.repeatAppend(note.Note('d4'), 8)
s.insertIntoNoteOrChord(3, note.Note('g4'))
self.assertEqual(str([e for e in s]), '[<music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.chord.Chord D4 G4>, <music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>]')
s.insertIntoNoteOrChord(3, note.Note('b4'))
self.assertEqual(str([e for e in s]), '[<music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.chord.Chord D4 G4 B4>, <music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>]')
s.insertIntoNoteOrChord(5, note.Note('b4'))
self.assertEqual(str([e for e in s]), '[<music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.chord.Chord D4 G4 B4>, <music21.note.Note D>, <music21.chord.Chord D4 B4>, <music21.note.Note D>, <music21.note.Note D>]')
s.insertIntoNoteOrChord(5, chord.Chord(['c5', 'e-5']))
self.assertEqual(str([e for e in s]), '[<music21.note.Note D>, <music21.note.Note D>, <music21.note.Note D>, <music21.chord.Chord D4 G4 B4>, <music21.note.Note D>, <music21.chord.Chord D4 B4 C5 E-5>, <music21.note.Note D>, <music21.note.Note D>]')
#s.show('text')
def testInsertIntoNoteOrChordB(self):
from music21 import stream
s = stream.Stream()
s.repeatAppend(chord.Chord(['c4', 'e4', 'g4']), 8)
s.insertIntoNoteOrChord(5, note.Note('b4'))
s.insertIntoNoteOrChord(3, note.Note('b4'))
s.insertIntoNoteOrChord(6, chord.Chord(['d5', 'e-5', 'b-5']))
self.assertEqual(str([e for e in s]), '[<music21.chord.Chord C4 E4 G4>, <music21.chord.Chord C4 E4 G4>, <music21.chord.Chord C4 E4 G4>, <music21.chord.Chord C4 E4 G4 B4>, <music21.chord.Chord C4 E4 G4>, <music21.chord.Chord C4 E4 G4 B4>, <music21.chord.Chord C4 E4 G4 D5 E-5 B-5>, <music21.chord.Chord C4 E4 G4>]')
def testSortingAfterInsertA(self):
from music21 import corpus
import math
s = corpus.parse('bwv66.6')
#s.show()
p = s.parts[0]
for m in p.getElementsByClass('Measure'):
for n in m.notes:
targetOffset = n.getOffsetBySite(m)
if targetOffset != math.floor(targetOffset):
## remove all offbeats
r = note.Rest(quarterLength=n.quarterLength)
m.remove(n)
m.insert(targetOffset, r)
# if we iterate, we get a sorted version
#self.assertEqual([str(n) for n in p.flat.notesAndRests], [])
# when we just call show(), we were not geting a sorted version;
# this was due to making the stream immutable before sorting
# this is now fixed
# m. 3
match = """ <note>
<pitch>
<step>A</step>
<octave>4</octave>
</pitch>
<duration>5040</duration>
<type>eighth</type>
<stem>up</stem>
</note>
<note>
<rest/>
<duration>5040</duration>
<type>eighth</type>
</note>
<note>
<pitch>
<step>G</step>
<alter>1</alter>
<octave>4</octave>
</pitch>
<duration>10080</duration>
<type>quarter</type>
<stem>up</stem>
</note>
<note>"""
GEX = m21ToXml.GeneralObjectExporter()
originalRaw = GEX.parse(p).decode('utf-8')
match = match.replace(' ', '')
match = match.replace('\n', '')
raw = originalRaw.replace(' ', '')
raw = raw.replace('\n', '')
self.assertEqual(raw.find(match) > 0, True, (match, originalRaw))
def testInvertDiatonicA(self):
# TODO: Check results
from music21 import corpus, stream
qj = corpus.parse('ciconia/quod_jactatur').parts[0]
k1 = qj.flat.getElementsByClass(key.KeySignature)[0]
qj.flat.replace(k1, key.KeySignature(-3))
qj.getElementsByClass(stream.Measure)[1].insert(0, key.KeySignature(5))
unused_qj2 = qj.invertDiatonic(note.Note('F4'), inPlace = False)
def testMeasuresA(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
ex = s.parts[0].measures(3,6)
self.assertEqual(str(ex.flat.getElementsByClass('Clef')[0]), '<music21.clef.TrebleClef>')
self.assertEqual(str(ex.flat.getElementsByClass('Instrument')[0]), 'P1: Soprano: Instrument 1')
# check that we have the exact same Measure instance
mTarget = s.parts[0].getElementsByClass('Measure')[3]
self.assertEqual(id(ex.getElementsByClass('Measure')[0]), id(mTarget))
for m in ex.getElementsByClass('Measure'):
for n in m.notes:
if n.name == 'B':
o = n.getOffsetBySite(m)
m.remove(n)
r = note.Rest(quarterLength=n.quarterLength)
m.insert(o, r)
#s.parts[0].show()
self.assertEqual(len(ex.flat.getElementsByClass('Rest')), 5)
def testMeasuresB(self):
from music21 import corpus
s = corpus.parse('luca/gloria')
y = s.measures(50,90)
self.assertEqual(len(
y.parts[0].flat.getElementsByClass('TimeSignature')), 2)
# make sure that ts is being found in musicxml score generation
# as it is in the Part, and not the Measure, this req an extra check
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(y.parts[0]).decode('utf-8')
match = """ <time>
<beats>2</beats>
<beat-type>4</beat-type>
</time>
"""
raw = raw.replace(' ', '')
raw = raw.replace('\n', '')
match = match.replace(' ', '')
match = match.replace('\n', '')
self.assertEqual(raw.find(match)>0, True)
def testMeasuresC(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
ex = s.parts[0].measures(3,6)
for n in list(ex.recurse(classFilter=['Note'])):
if n.name == 'B': # should do a list(recurse()) because manipulating
o = n.offset # the stream while iterating.
site = n.activeSite
n.activeSite.remove(n)
r = note.Rest(quarterLength=n.quarterLength)
site.insert(o, r)
self.assertEqual(len(ex.flat.getElementsByClass('Rest')), 5)
#ex.show()
def testChordifyF(self):
# testing chordify handling of triplets
from music21.musicxml import testPrimitive
from music21 import converter
# TODO: there are still errors in this chordify output
s = converter.parse(testPrimitive.triplets01)
#s.parts[0].show()
self.maxDiff = None
self.assertMultiLineEqual(s.parts[0].getElementsByClass('Measure')[0]._reprText(addEndTimes=True, useMixedNumerals=True),
'''{0 - 0} <music21.layout.SystemLayout>
{0 - 0} <music21.clef.TrebleClef>
{0 - 0} <music21.key.KeySignature of 2 flats, mode major>
{0 - 0} <music21.meter.TimeSignature 4/4>
{0 - 2/3} <music21.note.Note B->
{2/3 - 1 1/3} <music21.note.Note C>
{1 1/3 - 2} <music21.note.Note B->
{2 - 4} <music21.note.Note A>''')
self.assertMultiLineEqual(s.parts[1].getElementsByClass('Measure')[0]._reprText(addEndTimes=True),
'''{0.0 - 0.0} <music21.clef.BassClef>
{0.0 - 0.0} <music21.key.KeySignature of 2 flats, mode major>
{0.0 - 0.0} <music21.meter.TimeSignature 4/4>
{0.0 - 4.0} <music21.note.Note B->''')
chords = s.chordify()
m1 = chords.getElementsByClass('Measure')[0]
self.assertMultiLineEqual(m1._reprText(addEndTimes=True, useMixedNumerals=True),
'''{0 - 0} <music21.layout.SystemLayout>
{0 - 0} <music21.clef.TrebleClef>
{0 - 0} <music21.key.KeySignature of 2 flats, mode major>
{0 - 0} <music21.meter.TimeSignature 4/4>
{0 - 2/3} <music21.chord.Chord B-4 B-2>
{2/3 - 1 1/3} <music21.chord.Chord C5 B-2>
{1 1/3 - 2} <music21.chord.Chord B-4 B-2>
{2 - 4} <music21.chord.Chord A4 B-2>''')
match = [([str(p) for p in n.pitches], str(round(float(n.offset), 2)), str(round(float(n.quarterLength), 3))) for n in m1.notes]
self.assertEqual(str(match), "[(['B-4', 'B-2'], '0.0', '0.667'), " + \
"(['C5', 'B-2'], '0.67', '0.667'), " + \
"(['B-4', 'B-2'], '1.33', '0.667'), " + \
"(['A4', 'B-2'], '2.0', '2.0')]")
#chords.show()
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(m1).decode('utf-8')
# there should only be 2 tuplet indications in the produced chords: start and stop...
self.assertEqual(raw.count('<tuplet'), 2, raw)
# pitch grouping in measure index 1 was not allocated properly
#for c in chords.getElementsByClass('Chord'):
# self.assertEqual(len(c), 2)
def testChordifyG(self):
from music21 import stream
# testing a problem in triplets in makeChords
s = stream.Stream()
s.repeatAppend(note.Note('G4', quarterLength=1/3.), 6)
s.insert(0, note.Note('C4', quarterLength=2))
chords = s.chordify()
#s.chordify().show('t')
for c in chords.getElementsByClass('Chord'):
self.assertEqual(len(c), 2)
# try with small divisions
s = stream.Stream()
s.repeatAppend(note.Note('G4', quarterLength=1/6.), 12)
s.insert(0, note.Note('C4', quarterLength=2))
chords = s.chordify()
#s.chordify().show('t')
for c in chords.getElementsByClass('Chord'):
self.assertEqual(len(c), 2)
s = stream.Stream()
s.repeatAppend(note.Note('G4', quarterLength=1/12.), 24)
s.insert(0, note.Note('C4', quarterLength=2))
chords = s.chordify()
#s.chordify().show('t')
for c in chords.getElementsByClass('Chord'):
self.assertEqual(len(c), 2)
s = stream.Stream()
s.repeatAppend(note.Note('G4', quarterLength=1/24.), 48)
s.insert(0, note.Note('C4', quarterLength=2))
chords = s.chordify()
#s.chordify().show('t')
for c in chords.getElementsByClass('Chord'):
self.assertEqual(len(c), 2)
def testMakeVoicesA(self):
from music21 import stream
s = stream.Stream()
s.repeatAppend(note.Note('d-4', quarterLength=1), 8)
s.insert(0, note.Note('C4', quarterLength=8))
s.makeVoices(inPlace=True)
self.assertEqual(len(s.voices), 2)
self.assertEqual(len(s.voices[0]), 8)
self.assertEqual(len(s.voices[1]), 1)
#s.show()
def testMakeVoicesB(self):
from music21 import corpus
s = corpus.parse('bwv66.6')
#s.measures(6,7).show()
sMeasures = s.measures(6,7)
sFlatVoiced = sMeasures.flat.makeVoices(inPlace=False)
#sFlatVoiced.show('t')
#sFlatVoiced.show()
self.assertEqual(len(sMeasures.flat.notes), len(sFlatVoiced.flat.notes))
self.assertEqual(sMeasures.flat.highestTime,
sFlatVoiced.flat.notes.highestTime)
self.assertEqual(len(sFlatVoiced.voices), 4)
def testSplitAtQuarterLengthA(self):
from music21 import stream
s = stream.Measure()
s.append(note.Note('a', quarterLength=1))
s.append(note.Note('b', quarterLength=2))
s.append(note.Note('c', quarterLength=1))
l, r = s.splitAtQuarterLength(2, retainOrigin=True)
# if retain origin is true, l is the original
self.assertEqual(l, s)
self.assertEqual(l.highestTime, 2)
self.assertEqual(len(l.notes), 2)
self.assertEqual(r.highestTime, 2)
self.assertEqual(len(r.notes), 2)
sPost = stream.Stream()
sPost.append(l)
sPost.append(r)
def testSplitAtQuarterLengthB(self):
'''Test if recursive calls work over voices in a Measure
'''
from music21 import stream
m1 = stream.Measure()
v1 = stream.Voice()
v1.repeatAppend(note.Note('g4', quarterLength=2), 3)
v2 = stream.Voice()
v2.repeatAppend(note.Note(quarterLength=6), 1)
m1.insert(0, v1)
m1.insert(0, v2)
#m1.show()
mLeft, mRight = m1.splitAtQuarterLength(3)
self.assertEqual(len(mLeft.flat.notes), 3)
self.assertEqual(len(mLeft.voices), 2)
self.assertEqual(len(mRight.flat.notes), 3)
self.assertEqual(len(mRight.voices), 2)
sPost = stream.Stream()
sPost.append(mLeft)
sPost.append(mRight)
#sPost.show()
def testSplitAtQuarterLengthC(self):
'''Test splitting a Score
'''
from music21 import corpus
s = corpus.parse('bwv66.6')
sLeft, sRight = s.splitAtQuarterLength(6)
self.assertEqual(len(sLeft.parts), 4)
self.assertEqual(len(sRight.parts), 4)
for i in range(4):
self.assertEqual(
str(sLeft.parts[i].getElementsByClass('Measure')[0].timeSignature), str(sRight.parts[i].getElementsByClass('Measure')[0].timeSignature))
for i in range(4):
self.assertEqual(
str(sLeft.parts[i].getElementsByClass('Measure')[0].clef), str(sRight.parts[i].getElementsByClass('Measure')[0].clef))
for i in range(4):
self.assertEqual(
str(sLeft.parts[i].getElementsByClass('Measure')[0].keySignature), str(sRight.parts[i].getElementsByClass('Measure')[0].keySignature))
#sLeft.show()
#sRight.show()
# def testGraceStreamA(self):
#
# from music21 import stream, spanner
#
# # the GraceStream transforms generic notes into Notes w/ grace
# # durations; otherwise it is not necssary
# gs = stream.GraceStream()
# # the notes here are copies of the created notes
# gs.append(note.Note('c4', quarterLength=.25))
# gs.append(note.Note('d#4', quarterLength=.25))
# gs.append(note.Note('g#4', quarterLength=.5))
#
# #gs.show('t')
# #gs.show()
#
# # the total duration of the
# self.assertEqual(gs.duration.quarterLength, 0.0)
#
# s = stream.Measure()
# s.append(note.Note('G3'))
# s.append(gs)
# s.append(note.Note('A4'))
#
# sp = spanner.Slur(gs[0], s[-1])
# s.append(sp)
#
# match = [str(x) for x in s.pitches]
# self.assertEqual(match, ['G3', 'C4', 'D#4', 'G#4', 'A4'])
#s.show('text')
# p = stream.Part()
# p.append(s)
# p.show()
def testGraceStreamB(self):
'''testing a graces w/o a grace stream'''
from music21 import stream, duration, dynamics
s = stream.Measure()
s.append(note.Note('G3'))
self.assertEqual(s.highestTime, 1.0)
# shows up in the same position as the following note, not the grace
s.append(dynamics.Dynamic('mp'))
gn1 = note.Note('d#4', quarterLength=.5)
# could create a NoteRest method to get a GraceNote from a Note
gn1.duration = gn1.duration.getGraceDuration()
self.assertEqual(gn1.duration.quarterLength, 0.0)
s.append(gn1)
# highest time is the same after adding the gracenote
self.assertEqual(s.highestTime, 1.0)
s.append(note.Note('A4'))
self.assertEqual(s.highestTime, 2.0)
# this works just fine
#s.show()
match = [str(e.pitch) for e in s.notes]
self.assertEqual(match, ['G3', 'D#4', 'A4'])
#s.sort()
# this insert and shift creates an ambiguous situation
# the grace note seems to move with the note itself
s.insertAndShift(1, note.Note('c4'))
match = [str(e) for e in s.pitches]
self.assertEqual(match, ['G3', 'C4', 'D#4', 'A4'])
#s.show('t')
#s.show()
# inserting and shifting this results in it appearing before
# the note at offset 2
gn2 = note.Note('c#4', quarterLength=.25).getGrace()
gn2.duration.slash = False
s.insertAndShift(1, gn2)
#s.show('t')
#s.show()
match = [str(e) for e in s.pitches]
self.assertEqual(match, ['G3', 'C#4', 'C4', 'D#4', 'A4'])
def testGraceStreamC(self):
from music21 import stream
s = stream.Measure()
s.append(chord.Chord(['G3', 'd4']))
gc1 = chord.Chord(['d#4', 'a#4'], quarterLength=.5)
gc1.duration = gc1.duration.getGraceDuration()
s.append(gc1)
gc2 = chord.Chord(['e4', 'b4'], quarterLength=.5)
gc2.duration = gc2.duration.getGraceDuration()
s.append(gc2)
s.append(chord.Chord(['f4', 'c5'], quarterLength=2))
gc3 = chord.Chord(['f#4', 'c#5'], quarterLength=.5)
gc3.duration = gc3.duration.getGraceDuration()
s.append(gc3)
s.append(chord.Chord(['e4', 'b4'], quarterLength=1))
#s.show()
def testScoreShowA(self):
# this checks the specific handling of Score.makeNotation()
from music21 import stream
s = stream.Stream()
s.append(key.Key('G'))
GEX = m21ToXml.GeneralObjectExporter()
raw = GEX.parse(s).decode('utf-8')
self.assertTrue(raw.find('<fifths>1</fifths>') > 0, raw)
def testGetVariantsA(self):
from music21 import stream, variant
s = stream.Stream()
v1 = variant.Variant()
v2 = variant.Variant()
s.append(v1)
s.append(v2)
self.assertEqual(len(s.variants), 2)
def testActivateVariantsA(self):
'''This tests a single-measure variant
'''
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
s.makeMeasures(inPlace=True)
v1 = variant.Variant()
m2Alt = stream.Measure()
m2Alt.repeatAppend(note.Note('G#4'), 4)
v1.append(m2Alt) # embed a complete Measure in v1
# insert the variant at the desired location
s.insert(4, v1)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 1)
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'G#', 'G#', 'G#', 'G#', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 1)
# activating again will restore the previous
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 1)
def testActivateVariantsB(self):
'''This tests two variants with different groups, each a single measure
'''
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
s.makeMeasures(inPlace=True)
v1 = variant.Variant()
m2Alt = stream.Measure()
m2Alt.repeatAppend(note.Note('a#4'), 4)
v1.append(m2Alt) # embed a complete Measure in v1
v1.groups.append('m2-a')
v2 = variant.Variant()
m2Alt = stream.Measure()
m2Alt.repeatAppend(note.Note('b-4'), 4)
v2.append(m2Alt) # embed a complete Measure in v1
v2.groups.append('m2-b')
# insert the variant at the desired location
s.insert(4, v1)
s.insert(4, v2)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 2)
s.activateVariants(group='m2-a', matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'A#', 'A#', 'A#', 'A#', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 2)
# if we try the same group twice, it is now not active, so there is no change
s.activateVariants(group='m2-a', matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'A#', 'A#', 'A#', 'A#', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 2)
# activate a different variant
s.activateVariants('m2-b', matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'B-', 'B-', 'B-', 'B-', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 2)
# TODO: keep groups
# we now have 2 variants that have been stripped of their groups
match = [e.groups for e in s.variants]
self.assertEqual(str(match), "[['default'], ['default']]")
def testActivateVariantsC(self):
'''This tests a two-measure variant
'''
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
s.makeMeasures(inPlace=True)
v1 = variant.Variant()
m2Alt = stream.Measure()
m2Alt.repeatAppend(note.Note('G#4'), 4)
v1.append(m2Alt) # embed a complete Measure in v1
m3Alt = stream.Measure()
m3Alt.repeatAppend(note.Note('A#4'), 4)
v1.append(m3Alt) # embed a complete Measure in v1
# insert the variant at the desired location
s.insert(4, v1)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 1)
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'G#', 'G#', 'G#', 'G#', 'A#', 'A#', 'A#', 'A#']")
self.assertEqual(len(s.variants), 1)
#s.show('t')
# can restore the removed two measures
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.variants), 1)
def testActivateVariantsD(self):
'''This tests a note-level variant
'''
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
v = variant.Variant()
v.append(note.Note('G#4'))
v.append(note.Note('a#4'))
v.append(note.Note('c#5'))
s.insert(5, v)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.notes), 12)
self.assertEqual(len(s.variants), 1)
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'G#', 'A#', 'C#', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.notes), 12)
self.assertEqual(len(s.variants), 1)
#s.show('t')
s.activateVariants(matchBySpan=False, inPlace=True)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.notes), 12)
self.assertEqual(len(s.variants), 1)
# note that if the start times of each component do not match, the
# variant part will not be matched
def testActivateVariantsE(self):
'''This tests a note-level variant with miss-matched rhythms
'''
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
v = variant.Variant()
v.append(note.Note('G#4', quarterLength=.5))
v.append(note.Note('a#4', quarterLength=1.5))
v.append(note.Note('c#5', quarterLength=1))
s.insert(5, v)
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.notes), 12)
self.assertEqual(len(s.variants), 1)
s.activateVariants(matchBySpan=False, inPlace=True)
# TODO
# this only matches the Notes that start at the same position
self.assertEqual(str([p.name for p in s.pitches]), "['D', 'D', 'D', 'D', 'D', 'G#', 'D', 'C#', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.notes), 12)
self.assertEqual(len(s.variants), 1)
self.assertEqual(str([p for p in s.variants[0].elements]), "[<music21.note.Note D>, <music21.note.Note D>]")
def testActivateVariantsBySpanA(self):
# this tests replacing 1 note with a 3-note variant
from music21 import stream, variant, dynamics
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 12)
v = variant.Variant()
v.insert(0, dynamics.Dynamic('ff'))
v.append(note.Note('G#4', quarterLength=.5))
v.append(note.Note('a#4', quarterLength=.25))
v.append(note.Note('c#5', quarterLength=.25))
s.insert(5, v)
# pre-check
self.assertEqual(len(s.flat.notes), 12)
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.getElementsByClass('Dynamic')), 0)
s.activateVariants(matchBySpan=True, inPlace=True)
self.assertEqual(len(s.flat.notes), 14) # replace 1 w/ 3, for +2
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'D', 'G#', 'A#', 'C#', 'D', 'D', 'D', 'D', 'D', 'D']")
self.assertEqual(len(s.getElementsByClass('Dynamic')), 1)
s.activateVariants(matchBySpan=True, inPlace=True)
self.assertEqual(len(s.flat.notes), 12)
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
# TODO: as we are presently matching removal by classes in the Variant
# the variant now has no dynamics, and thus leaves the dyn from the
# old variant here
self.assertEqual(len(s.getElementsByClass('Dynamic')), 1)
#s.show()
def testActivateVariantsBySpanB(self):
# this tests replacing 2 measures by a longer single measure
from music21 import stream, variant
s = stream.Stream()
s.repeatAppend(note.Note('d2'), 16)
s.makeMeasures(inPlace=True)
v1 = variant.Variant()
m2Alt = stream.Measure()
m2Alt.repeatAppend(note.Note('a#4'), 8)
m2Alt.timeSignature = meter.TimeSignature('8/4')
v1.append(m2Alt) # embed a complete Measure in v1
v1.groups.append('m2-a')
# insert the variant at the desired location
s.insert(4, v1)
self.assertEqual(len(s.flat.notes), 16)
self.assertEqual(len(s.getElementsByClass('Measure')), 4)
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
# replace 2 measures for 1
s.activateVariants(matchBySpan=True, inPlace=True)
self.assertEqual(len(s.flat.notes), 16)
self.assertEqual(len(s.getElementsByClass('Measure')), 3)
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'A#', 'A#', 'A#', 'A#', 'A#', 'A#', 'A#', 'A#', 'D', 'D', 'D', 'D']")
# replace the one for two
s.activateVariants("default", matchBySpan=True, inPlace=True)
self.assertEqual(len(s.getElementsByClass('Measure')), 4)
self.assertEqual(str([p.name for p in s.pitches]),
"['D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D', 'D']")
#s.show()
def testMeasureTemplateAll(self):
from music21 import corpus
b = corpus.parse('bwv66.6')
bass = b.parts[3]
bassEmpty = bass.measureTemplate(fillWithRests=False, customRemove=True)
for x in bassEmpty:
if 'Measure' in x.classes:
self.assertEqual(len(x), 0)
def testSetElements(self):
from music21 import dynamics
s = Stream()
s.append(note.Note('C', type='half'))
s.append(note.Note('D', type='half'))
s.append(note.Note('E', type='half'))
s.append(note.Note('F', type='half'))
n1 = s.notes[0]
n2 = s.notes[len(s.notes) // 2]
n3 = s.notes[-1]
sp1 = dynamics.Diminuendo(n1, n2)
sp2 = dynamics.Crescendo(n2, n3)
s.append(sp1)
s.append(sp2)
s2 = Stream()
s2.elements = s # do not set elements to s.elements, use s instead.
for el in s2:
self.assertEqual(el.getOffsetBySite(s2),
el.getOffsetBySite(s))
def testGetElementAfterElement(self):
n1 = note.Note('A3')
n2 = note.Note('B3')
n2.id = 'firstB'
n3 = note.Note('B3')
n3.id = 'secondB'
n4 = note.Note('D4')
m1 = note.Note('E4')
m2 = note.Note('F4')
m3 = note.Note('G4')
m4 = note.Note('A-5')
bass = Stream()
bass.append([n1, n2, n3, n4])
sop = Stream()
sop.append([m1, m2, m3, m4])
for i in range(len(bass.notes)-1):
note1 = bass.notes[i]
note2 = bass.getElementAfterElement(note1, ['Note'])
note3 = sop.playingWhenAttacked(note1)
note4 = sop.playingWhenAttacked(note2)
#print(note1, note2, note3, note4)
#print(note1.id, note2.id, note3.id, note4.id)
# TEST???
#------------------------------------------------------------------------------
if __name__ == "__main__":
import music21
music21.mainTest(Test, 'verbose') #, runTest='testGetElementAfterElement')
#------------------------------------------------------------------------------
# eof
| arnavd96/Cinemiezer | myvenv/lib/python3.4/site-packages/music21/test/testStream.py | Python | mit | 284,928 |
#Python 2.7
candidates = set([str(a * b) for a in range(100, 1000) for b in range(100, 1000)])
candidates = filter(lambda x: x == x[::-1], candidates)
print max([int(x) for x in candidates])
| dooleykh/ProjectEuler | 4.py | Python | mit | 192 |
# For building youtube_downloader on windows
from distutils.core import setup
import py2exe
# Define where you want youtube_downloader to be built to below
build_dir =
data_files = [('',['settings.ini',
'LICENSE',
'README.md']),
('sessions',[])]
options = {'py2exe': {
'dist_dir': build_dir}}
setup(
windows=['youtube_downloader.py'],
data_files=data_files,
options=options) | sammypg/youtube_downloader | setup.py | Python | mit | 470 |
# @Author: dileep
# @Last Modified by: dileep
import random
import pytest
from microbial_ai.regulation import Event, Action, Memory
@pytest.fixture
def random_action():
return Action(type='fixed', phi={'rxn1': (random.random(), '+')})
@pytest.fixture
def random_event(random_action):
return Event(state=random.randint(0, 100), action=random_action,
next_state=random.randint(0, 100), reward=random.random())
@pytest.mark.usefixtures("random_event")
class TestMemory:
"""
Tests for the Memory class
"""
def test_initialization(self):
memory = Memory(1000)
assert memory.capacity == 1000
assert memory.idx == 0
def test_add_event(self, random_event):
memory = Memory(1000)
memory.add_event(random_event)
assert len(memory.memory) == 1
assert memory.idx == 1
for _ in range(1500):
memory.add_event(random_event)
assert len(memory.memory) == memory.capacity
assert memory.idx == (1000 - 500 + 1)
def test_sample(self, random_event):
memory = Memory(1000)
with pytest.raises(ValueError):
memory.sample(100)
for _ in range(400):
memory.add_event(random_event)
assert len(memory.sample(200)) == 200
| dileep-kishore/microbial-ai | tests/regulation/test_memory.py | Python | mit | 1,304 |
from .utils import do, do_ex, trace
from .version import meta
from os.path import abspath, realpath
FILES_COMMAND = 'git ls-files'
DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*'
def parse(root, describe_command=DEFAULT_DESCRIBE):
real_root, _, ret = do_ex('git rev-parse --show-toplevel', root)
if ret:
return
trace('real root', real_root)
if abspath(realpath(real_root)) != abspath(realpath(root)):
return
rev_node, _, ret = do_ex('git rev-parse --verify --quiet HEAD', root)
if ret:
return meta('0.0')
rev_node = rev_node[:7]
out, err, ret = do_ex(describe_command, root)
if '-' not in out and '.' not in out:
revs = do('git rev-list HEAD', root)
count = revs.count('\n')
if ret:
out = rev_node
return meta('0.0', distance=count + 1, node=out)
if ret:
return
dirty = out.endswith('-dirty')
if dirty:
out = out.rsplit('-', 1)[0]
tag, number, node = out.rsplit('-', 2)
number = int(number)
if number:
return meta(tag, distance=number, node=node, dirty=dirty)
else:
return meta(tag, dirty=dirty, node=node)
| esben/setuptools_scm | setuptools_scm/git.py | Python | mit | 1,196 |
import logging
import utils
import options
_Warning = logging.Warning
_Info = logging.Info
#//===========================================================================//
_site_setup = []
_user_setup = {}
_tools_setup = {}
_tools_post_setup = {}
def ResetSetup( site_setup = _site_setup,
user_setup = _user_setup,
tools_setup = _tools_setup,
tools_post_setup = _tools_post_setup ):
if __debug__:
_Info( "ResetSetup" )
del site_setup[:]
user_setup.clear()
tools_setup.clear()
tools_post_setup.clear()
#//===========================================================================//
def AddSiteSetup( setup_function, _site_setup = _site_setup, toList = utils.toList ):
_site_setup.append( setup_function )
def siteSetup( setup_function ):
AddSiteSetup( setup_function )
return setup_function
def SiteSetup( options, os_env ):
global _site_setup
for f in _site_setup:
if __debug__:
_Info( "Site setup: " + f.__name__ )
f( options = options, os_env = os_env )
UserSetup( options, os_env )
#//===========================================================================//
def AddUserSetup( setup_id, setup_function, user_setup = _user_setup ):
user_setup.setdefault( setup_id, [] ).append( setup_function )
def UserSetup( options, os_env, user_setup = _user_setup ):
for s in options.setup.Value():
if __debug__:
_Info( "User setup: " + s )
for f in user_setup.get( s, [] ):
f( options = options, os_env = os_env )
#//===========================================================================//
def AddToolSetup( tool_name, setup_function, tools_setup = _tools_setup, toList = utils.toList ):
tools_setup.setdefault( tool_name, [] ).append( setup_function )
def toolSetup( tool_name ):
def addToolSetup( setup_function ):
AddToolSetup( tool_name, setup_function )
return setup_function
return addToolSetup
#//===========================================================================//
def _tool_setup( tool_name, env, tools_setup = _tools_setup ):
options = env.get( 'AQL_OPTIONS' )
if options is None:
if __debug__:
_Warning( "Tool setup: No AQL_OPTIONS in env: " + id(env) )
return
options.SetEnv( env )
os_env = env['ENV']
setup_functions = tools_setup.get( tool_name, [] )
if __debug__:
if not setup_functions:
#~ _Info( "Setup tool: No setup for tool: " + tool_name )
return
for f in setup_functions:
if __debug__:
_Info( "Tool setup: " + tool_name + ' (' + f.__name__ + ')' )
if f( env = env, options = options, os_env = os_env ):
break
#//===========================================================================//
def AddToolPostSetup( tool_name, setup_function, tools_post_setup = _tools_post_setup ):
tools_post_setup.setdefault( tool_name, [] ).append( setup_function )
def toolPostSetup( tool_name ):
def addToolPostSetup( setup_function ):
AddToolPostSetup( tool_name, setup_function )
return setup_function
return addToolPostSetup
#//===========================================================================//
def _tool_post_setup( tool_name, env, tools_post_setup = _tools_post_setup ):
options = env.get( 'AQL_OPTIONS' )
if options is None:
return
options.SetEnv( env )
os_env = env['ENV']
setup_functions = tools_post_setup.get( tool_name, [] )
if __debug__:
if not setup_functions:
#~ _Info( "Tool post setup: No setup for tool: " + tool_name )
return
for f in setup_functions:
if __debug__:
_Info( "Tool post setup: " + tool_name + ' (' + f.__name__ + ')' )
f( env = env, options = options, os_env = os_env )
#//===========================================================================//
def _tool_exists( self, env ):
if self._aql_is_exist is None:
_tool_setup( self.name, env )
self._aql_is_exist = self._aql_exists( env )
return self._aql_is_exist
#//===========================================================================//
def _tool_generate( self, env ):
if self._aql_is_exist is None:
if not _tool_exists( self, env ):
_Warning( "Tool: '%s' has not been found, but it has been added." % (self.name) )
self._aql_generate( env )
_tool_post_setup( self.name, env )
#//===========================================================================//
def _init_tool( self, name, toolpath = [], **kw ):
_SCons_Tool_Tool_init( self, name, toolpath, **kw )
self._aql_is_exist = None
self._aql_generate = self.generate
self._aql_exists = self.exists
self.exists = lambda env, self = self: _tool_exists( self, env )
self.generate = lambda env, self = self: _tool_generate( self, env )
#//===========================================================================//
import SCons.Tool
_SCons_Tool_Tool_init = SCons.Tool.Tool.__init__
SCons.Tool.Tool.__init__ = _init_tool
| menify/sandbox | trunk/setup.py | Python | mit | 5,383 |
import os
from src.core import prep
from sgprocessor import *
def ProcessSg(p, opts):
if opts.anno == True:
if 'BEDDB' not in os.environ:
p.error('$BEDDB Not Exist. See README')
str_path_sgfq = opts.sg
str_nm = os.path.basename(os.path.splitext(opts.sg)[0])
str_proj = 'aux'
str_path_proj = os.path.join(opts.tdir, str_proj)
if not os.path.exists(str_path_proj):
os.makedirs(str_path_proj)
str_path_sgpsam = os.path.join(str_path_proj, str_nm + '.sgpsam')
str_path_sgsam = os.path.join(str_path_proj, str_nm + '.sgsam')
str_path_sg = os.path.join(opts.tdir, str_nm + '.sg')
print('Mapping sgRNA seq to ref genome with Bwa...')
prep.CallBWA(str_path_sgfq, '', opts.ref, str_path_sgpsam, False, opts.thrd)
prep.FilterSam(str_path_sgpsam, str_path_sgsam, False)
print('Done')
print('Processing sgsam...')
OrganizeSgsam(str_path_sgsam, str_path_sg)
print('Done')
if opts.anno == True:
str_path_sgbed = os.path.join(str_path_proj, str_nm + '.sgbed')
str_path_sgmap = os.path.join(str_path_proj, str_nm + '.sgmap')
str_path_sga = os.path.join(opts.tdir, str_nm + '.sga')
print('Annotating sgRNA...')
int_status = AnnotateSg(str_path_sgsam, opts.ref, str_path_sgbed, str_path_sgmap)
if int_status == 1:
print('Annotated with RefSeq')
elif int_status ==2:
print('Annotated with RefSeq and UCSC Gene')
elif int_status ==3:
print('Annotated with RefSeq, UCSC Gene and GENCODE')
elif int_status == 4:
print('Annotated with RefSeq and UCSC Gene')
print('Warning: Some are marked with None')
elif int_status == 5:
print('Annotated with RefSeq, UCSC Gene and GENCODE')
print('Warning: Some are marked with None')
print('Done')
print('Merging sg and sgmap...')
MergeSg(str_path_sg, str_path_sgmap, str_path_sga)
print('Done')
| bm2-lab/cage | src/core/sg/interface_sg.py | Python | mit | 2,004 |
import pandas as pd
import numpy as np
from dateutil.relativedelta import relativedelta
#### Utilities
def get_first_visit_date(data_patient):
''' Determines the first visit for a given patient'''
#IDEA Could be parallelized in Dask
data_patient['first_visit_date'] = min(data_patient.visit_date)
return data_patient
def subset_analysis_data(data, date_analysis):
''' Function that subsets the full dataset to only the data available for a certain analysis date'''
if type(data.date_entered.iloc[0]) is str :
data.date_entered = pd.to_datetime(data.date_entered)
data = data[data.date_entered < date_analysis]
return data
def subset_cohort(data, horizon_date, horizon_time, bandwidth):
''' Function that subsets data from a cohort that has initiated care a year before the horizon_date, and after a year + bandwith'''
horizon_date = pd.to_datetime(horizon_date)
data['first_visit_date'] = pd.to_datetime(data['first_visit_date'])
cohort_data = data[(data['first_visit_date'] >= horizon_date - relativedelta(days=horizon_time + bandwidth)) &
(data['first_visit_date'] < horizon_date - relativedelta(days=horizon_time))]
return cohort_data
#### Standard reporting
def status_patient(data_patient, reference_date, grace_period):
''' Determines the status of a patient at a given reference_date, given the data available at a given analysis_date
TODO Also select the available data for Death and Transfer and other outcomes based on data entry time
'''
#IDEA Could be parallelized in Dask
data_patient = get_first_visit_date(data_patient)
date_out = pd.NaT
date_last_appointment = pd.to_datetime(max(data_patient.next_visit_date))
late_time = reference_date - date_last_appointment
if late_time.days > grace_period:
status = 'LTFU'
date_out = date_last_appointment
if late_time.days <= grace_period:
status = 'Followed'
if (data_patient.reasonDescEn.iloc[0] is not np.nan) & (pd.to_datetime(data_patient.discDate.iloc[0]) < reference_date):
status = data_patient.reasonDescEn.iloc[0]
date_out = pd.to_datetime(data_patient.discDate.iloc[0])
return pd.DataFrame([{'status': status,
'late_time': late_time,
'last_appointment': date_last_appointment,
'date_out':date_out ,
'first_visit_date':data_patient.first_visit_date.iloc[0],
'facility':data_patient.facility.iloc[0]}])
def horizon_outcome(data_cohort, reference_date, horizon_time):
# TODO Make sure dates are dates
data_cohort['first_visit_date'] = pd.to_datetime(data_cohort['first_visit_date']) #TODO This conversion should happen earlier
data_cohort.loc[:, 'horizon_date'] = data_cohort['first_visit_date'] + np.timedelta64(horizon_time, 'D')
data_cohort.loc[: , 'horizon_status'] = data_cohort['status']
# If the patient exited the cohort after his horizon date, still consider him followed
# BUG This is marginally invalid, for example if a patient was considered LTFU before he died
data_cohort.horizon_status[~(data_cohort['status'] == 'Followed') & (data_cohort['date_out'] > data_cohort['horizon_date'])] = 'Followed'
return data_cohort
## Transversal description only
def n_visits(data, month):
reporting_month = pd.to_datetime(data['visit_date']).dt.to_period('M')
n_vis = sum(reporting_month == month)
return n_vis
def make_report(data, reference_date, date_analysis, grace_period, horizon_time, cohort_width):
assert reference_date <= date_analysis, 'You should not analyze a period before you have the data (date of analysis is before reference date)'
if type(reference_date) is str :
reference_date = pd.to_datetime(reference_date)
if type(date_analysis) is str:
date_analysis = pd.to_datetime(date_analysis)
report_data = subset_analysis_data(data, date_analysis)
if len(report_data) > 0:
month = reference_date.to_period('M') - 1
n_visits_month = report_data.groupby('facility').apply(n_visits, month)
df_status = report_data.groupby('patient_id').apply(status_patient, reference_date, 90)
cohort_data = subset_cohort(df_status, reference_date, horizon_time, cohort_width)
# print(df_status.head())
horizon_outcome_data = horizon_outcome(cohort_data, month, 365)
transversal_reports = df_status.groupby('facility').status.value_counts()
longitudinal_reports = horizon_outcome_data.groupby('facility').status.value_counts()
out_reports = {'transversal':transversal_reports,
'longitudinal':longitudinal_reports,
'n_visits':n_visits_month}
return out_reports
# QUESTION What are the form_types
| grlurton/hiv_retention_metrics | src/models/cohort_analysis_function.py | Python | mit | 4,874 |
import os
import sys
from Bio.Seq import Seq
def main(*args, **kwargs):
fpath = os.path.join(os.getcwd(), args[-2])
tmp = []
with open(fpath,'r') as f:
for line in f:
txt = line.strip()
tmp.append(txt)
S1 = set(tmp)
S2 = set([str(Seq(s).reverse_complement()) for s in tmp])
S = S1.union(S2)
res = []
for s in S:
res.append((s[:-1],s[1:]))
for t1,t2 in res:
print '(%s, %s)' % (t1,t2)
out = os.path.join(os.getcwd(),args[-1])
f = open(out, 'w')
for t1,t2 in res:
txt = '(%s, %s)\n' % (t1,t2)
f.write(txt)
f.close()
if __name__ == '__main__':
main(*sys.argv)
| crf1111/Bio-Informatics-Learning | Bio-StrongHold/src/Constructing_a_De_Bruijn_Graph.py | Python | mit | 685 |
# quick_info/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
# Diagrams here: https://docs.google.com/drawings/d/1fEs_f2-4Du9knJ8FXn6PQ2BcmXL4zSkMYh-cp75EeLE/edit
from ballot.models import OFFICE, CANDIDATE, POLITICIAN, MEASURE, KIND_OF_BALLOT_ITEM_CHOICES
from django.db import models
from exception.models import handle_exception, handle_record_found_more_than_one_exception,\
handle_record_not_saved_exception
import wevote_functions.admin
from wevote_functions.functions import convert_to_int, positive_value_exists
from wevote_settings.models import fetch_next_we_vote_id_quick_info_integer, \
fetch_next_we_vote_id_quick_info_master_integer, fetch_site_unique_id_prefix
# Language Codes: http://www.mcanerin.com/en/articles/meta-language.asp
# Country Codes: http://www.mcanerin.com/en/articles/ccTLD.asp
SPANISH = 'es'
ENGLISH = 'en'
TAGALOG = 'tl'
VIETNAMESE = 'vi'
CHINESE = 'zh'
LANGUAGE_CHOICES = (
(ENGLISH, 'English'),
(SPANISH, 'Spanish'),
(TAGALOG, 'Tagalog'),
(VIETNAMESE, 'Vietnamese'),
(CHINESE, 'Chinese'),
)
NOT_SPECIFIED = 'not_specified'
BALLOTPEDIA = 'ballotpedia'
DIRECT_ENTRY = 'direct'
WIKIPEDIA = 'wikipedia'
SOURCE_SITE_CHOICES = (
(NOT_SPECIFIED, 'Not Specified'),
(BALLOTPEDIA, 'Ballotpedia'),
(DIRECT_ENTRY, 'Direct Entry'),
(WIKIPEDIA, 'Wikipedia'),
)
logger = wevote_functions.admin.get_logger(__name__)
class QuickInfo(models.Model):
"""
The information that shows when you click an info icon next to a ballot item
"""
# We are relying on built-in Python id field
# The we_vote_id identifier is unique across all We Vote sites, and allows us to share our org info with other
# organizations
# It starts with "wv" then we add on a database specific identifier like "3v" (WeVoteSetting.site_unique_id_prefix)
# then the string "info", and then a sequential integer like "123".
# We keep the last value in WeVoteSetting.we_vote_id_last_quick_info_integer
we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, default=None, null=True, blank=True, unique=True)
# The language that this text is in
language = models.CharField(max_length=5, choices=LANGUAGE_CHOICES, default=ENGLISH)
info_text = models.TextField(null=True, blank=True)
info_html = models.TextField(null=True, blank=True)
ballot_item_display_name = models.CharField(verbose_name="text name for ballot item for quick display",
max_length=255, null=True, blank=True)
# See also more_info_credit_text
more_info_credit = models.CharField(max_length=15, choices=SOURCE_SITE_CHOICES, default=NOT_SPECIFIED,
null=True, blank=True)
# A link to any location with more information about this quick information
more_info_url = models.URLField(blank=True, null=True, verbose_name='url with more the full entry for this info')
last_updated = models.DateTimeField(verbose_name='date entered', null=True, auto_now=True) # TODO Convert to date_last_changed
# The unique id of the last person who edited this entry.
last_editor_we_vote_id = models.CharField(
verbose_name="last editor we vote id", max_length=255, null=True, blank=True, unique=False)
# This is the office that the quick_info refers to.
# Either contest_measure is filled, contest_office OR candidate, but not all three
contest_office_we_vote_id = models.CharField(
verbose_name="we vote permanent id for the contest_office", max_length=255, null=True, blank=True, unique=False)
# This is the candidate/politician that the quick_info refers to.
# Either candidate is filled, contest_office OR contest_measure, but not all three
candidate_campaign_we_vote_id = models.CharField(
verbose_name="we vote permanent id for the candidate", max_length=255, null=True,
blank=True, unique=False)
# Useful for queries based on Politicians
politician_we_vote_id = models.CharField(
verbose_name="we vote permanent id for politician", max_length=255, null=True,
blank=True, unique=False)
# This is the measure/initiative/proquick_info that the quick_info refers to.
# Either contest_measure is filled, contest_office OR candidate, but not all three
contest_measure_we_vote_id = models.CharField(
verbose_name="we vote permanent id for the contest_measure", max_length=255, null=True,
blank=True, unique=False)
# There are many ballot items that don't have (or need) a custom quick_info entry, and can reference a general
# entry. This field is the we_vote_id of the master quick_info entry that has the general text.
quick_info_master_we_vote_id = models.CharField(
verbose_name="we vote id of other entry which is the master", max_length=255, default=None, null=True,
blank=True, unique=True)
# The unique ID of the election containing this contest. (Provided by Google Civic)
google_civic_election_id = models.PositiveIntegerField(
verbose_name="google civic election id", default=0, null=True, blank=True)
def __unicode__(self):
return self.we_vote_id
class Meta:
ordering = ('last_updated',)
# We override the save function so we can auto-generate we_vote_id
def save(self, *args, **kwargs):
# Even if this organization came from another source we still need a unique we_vote_id
if self.we_vote_id:
self.we_vote_id = self.we_vote_id.strip().lower()
if self.we_vote_id == "" or self.we_vote_id is None: # If there isn't a value...
# ...generate a new id
site_unique_id_prefix = fetch_site_unique_id_prefix()
next_local_integer = fetch_next_we_vote_id_quick_info_integer()
# "wv" = We Vote
# site_unique_id_prefix = a generated (or assigned) unique id for one server running We Vote
# "info" = tells us this is a unique id for a quick_info entry
# next_integer = a unique, sequential integer for this server - not necessarily tied to database id
self.we_vote_id = "wv{site_unique_id_prefix}info{next_integer}".format(
site_unique_id_prefix=site_unique_id_prefix,
next_integer=next_local_integer,
)
super(QuickInfo, self).save(*args, **kwargs)
def is_english(self):
if self.language == ENGLISH:
return True
return False
def is_spanish(self):
if self.language == SPANISH:
return True
return False
def is_vietnamese(self):
if self.language == VIETNAMESE:
return True
return False
def is_chinese(self):
if self.language == CHINESE:
return True
return False
def is_tagalog(self):
if self.language == TAGALOG:
return True
return False
def get_kind_of_ballot_item(self):
if positive_value_exists(self.contest_office_we_vote_id):
return OFFICE
elif positive_value_exists(self.candidate_campaign_we_vote_id):
return CANDIDATE
elif positive_value_exists(self.politician_we_vote_id):
return POLITICIAN
elif positive_value_exists(self.contest_measure_we_vote_id):
return MEASURE
return None
def get_ballot_item_we_vote_id(self):
if positive_value_exists(self.contest_office_we_vote_id):
return self.contest_office_we_vote_id
elif positive_value_exists(self.candidate_campaign_we_vote_id):
return self.candidate_campaign_we_vote_id
elif positive_value_exists(self.politician_we_vote_id):
return self.politician_we_vote_id
elif positive_value_exists(self.contest_measure_we_vote_id):
return self.contest_measure_we_vote_id
return None
def more_info_credit_text(self):
if self.more_info_credit == BALLOTPEDIA:
return "Courtesy of Ballotpedia.org"
if self.more_info_credit == WIKIPEDIA:
return "Courtesy of Wikipedia.org"
return ""
class QuickInfoManager(models.Manager):
def __unicode__(self):
return "QuickInfoManager"
def fetch_we_vote_id_from_local_id(self, quick_info_id):
if positive_value_exists(quick_info_id):
results = self.retrieve_quick_info_from_id(quick_info_id)
if results['quick_info_found']:
quick_info = results['quick_info']
return quick_info.we_vote_id
else:
return None
else:
return None
def retrieve_contest_office_quick_info(self, contest_office_we_vote_id):
quick_info_id = 0
quick_info_we_vote_id = None
candidate_we_vote_id = None
politician_we_vote_id = None
contest_measure_we_vote_id = None
quick_info_manager = QuickInfoManager()
return quick_info_manager.retrieve_quick_info(
quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id
)
def retrieve_candidate_quick_info(self, candidate_we_vote_id):
quick_info_id = 0
quick_info_we_vote_id = None
politician_we_vote_id = None
contest_measure_we_vote_id = None
contest_office_we_vote_id = None
quick_info_manager = QuickInfoManager()
return quick_info_manager.retrieve_quick_info(
quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id
)
def retrieve_contest_measure_quick_info(self, contest_measure_we_vote_id):
quick_info_id = 0
quick_info_we_vote_id = None
candidate_we_vote_id = None
politician_we_vote_id = None
contest_office_we_vote_id = None
quick_info_manager = QuickInfoManager()
return quick_info_manager.retrieve_quick_info(
quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id
)
def retrieve_quick_info_from_id(self, quick_info_id):
quick_info_we_vote_id = None
candidate_we_vote_id = None
politician_we_vote_id = None
contest_office_we_vote_id = None
contest_measure_we_vote_id = None
quick_info_manager = QuickInfoManager()
return quick_info_manager.retrieve_quick_info(
quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id
)
def retrieve_quick_info_from_we_vote_id(self, quick_info_we_vote_id):
quick_info_id = 0
candidate_we_vote_id = None
politician_we_vote_id = None
contest_office_we_vote_id = None
contest_measure_we_vote_id = None
quick_info_manager = QuickInfoManager()
return quick_info_manager.retrieve_quick_info(
quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id
)
def retrieve_quick_info(self, quick_info_id, quick_info_we_vote_id=None,
contest_office_we_vote_id=None,
candidate_we_vote_id=None,
politician_we_vote_id=None,
contest_measure_we_vote_id=None):
error_result = False
exception_does_not_exist = False
exception_multiple_object_returned = False
quick_info_on_stage = QuickInfo()
success = False
try:
if positive_value_exists(quick_info_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_QUICK_INFO_ID"
quick_info_on_stage = QuickInfo.objects.get(id=quick_info_id)
quick_info_id = quick_info_on_stage.id
success = True
elif positive_value_exists(quick_info_we_vote_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_WE_VOTE_ID"
quick_info_on_stage = QuickInfo.objects.get(we_vote_id=quick_info_we_vote_id)
quick_info_id = quick_info_on_stage.id
success = True
elif positive_value_exists(contest_office_we_vote_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_OFFICE_WE_VOTE_ID"
quick_info_on_stage = QuickInfo.objects.get(
contest_office_we_vote_id=contest_office_we_vote_id)
quick_info_id = quick_info_on_stage.id
success = True
elif positive_value_exists(candidate_we_vote_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_CANDIDATE_WE_VOTE_ID"
quick_info_on_stage = QuickInfo.objects.get(
candidate_campaign_we_vote_id=candidate_we_vote_id)
quick_info_id = quick_info_on_stage.id
success = True
elif positive_value_exists(politician_we_vote_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_POLITICIAN_WE_VOTE_ID"
quick_info_on_stage = QuickInfo.objects.get(
politician_we_vote_id=politician_we_vote_id)
quick_info_id = quick_info_on_stage.id
success = True
elif positive_value_exists(contest_measure_we_vote_id):
status = "RETRIEVE_QUICK_INFO_FOUND_WITH_MEASURE_WE_VOTE_ID"
quick_info_on_stage = QuickInfo.objects.get(
contest_measure_we_vote_id=contest_measure_we_vote_id)
quick_info_id = quick_info_on_stage.id
success = True
else:
status = "RETRIEVE_QUICK_INFO_INSUFFICIENT_VARIABLES"
except QuickInfo.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
error_result = True
exception_multiple_object_returned = True
success = False
status = "RETRIEVE_QUICK_INFO_MULTIPLE_FOUND"
except QuickInfo.DoesNotExist:
error_result = False
exception_does_not_exist = True
success = True
status = "RETRIEVE_QUICK_INFO_NONE_FOUND"
results = {
'success': success,
'status': status,
'error_result': error_result,
'DoesNotExist': exception_does_not_exist,
'MultipleObjectsReturned': exception_multiple_object_returned,
'quick_info_found': True if quick_info_id > 0 else False,
'quick_info_id': quick_info_id,
'quick_info_we_vote_id': quick_info_on_stage.we_vote_id,
'quick_info': quick_info_on_stage,
'is_chinese': quick_info_on_stage.is_chinese(),
'is_english': quick_info_on_stage.is_english(),
'is_spanish': quick_info_on_stage.is_spanish(),
'is_tagalog': quick_info_on_stage.is_tagalog(),
'is_vietnamese': quick_info_on_stage.is_vietnamese(),
}
return results
def retrieve_quick_info_list(self, google_civic_election_id, quick_info_search_str=''):
google_civic_election_id = convert_to_int(google_civic_election_id)
quick_info_list = []
quick_info_list_found = False
try:
quick_info_queryset = QuickInfo.objects.all()
if positive_value_exists(quick_info_search_str):
filters = []
# new_filter = Q(id__iexact=quick_info_search_str)
# filters.append(new_filter)
#
# new_filter = Q(ballot_location_display_name__icontains=quick_info_search_str)
# filters.append(new_filter)
# Add the first query
if len(filters):
final_filters = filters.pop()
# ...and "OR" the remaining items in the list
for item in filters:
final_filters |= item
quick_info_queryset = quick_info_queryset.filter(final_filters)
quick_info_queryset = quick_info_queryset.filter(
google_civic_election_id=google_civic_election_id)
# if positive_value_exists(state_code):
# quick_info_queryset = quick_info_queryset.filter(normalized_state__iexact=state_code)
quick_info_list = quick_info_queryset
if len(quick_info_list):
quick_info_list_found = True
status = 'QUICK_INFO_LIST_FOUND'
else:
status = 'NO_QUICK_INFO_LIST_FOUND'
except QuickInfo.DoesNotExist:
status = 'NO_QUICK_INFO_LIST_FOUND_DOES_NOT_EXIST'
quick_info_list = []
except Exception as e:
handle_exception(e, logger=logger)
status = 'FAILED retrieve_quick_info_list_for_election ' \
'{error} [type: {error_type}]'.format(error=e, error_type=type(e))
results = {
'success': True if quick_info_list_found else False,
'status': status,
'quick_info_list_found': quick_info_list_found,
'quick_info_list': quick_info_list,
}
return results
def update_or_create_quick_info(self, quick_info_id, quick_info_we_vote_id,
ballot_item_display_name,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id,
info_html,
info_text,
language,
last_editor_we_vote_id,
quick_info_master_we_vote_id,
more_info_url,
more_info_credit,
google_civic_election_id
):
# Does a quick_info entry already exist?
quick_info_manager = QuickInfoManager()
results = quick_info_manager.retrieve_quick_info(quick_info_id, quick_info_we_vote_id,
contest_office_we_vote_id,
candidate_we_vote_id,
politician_we_vote_id,
contest_measure_we_vote_id)
quick_info_on_stage_found = False
quick_info_on_stage_id = 0
quick_info_on_stage = QuickInfo()
if results['quick_info_found']:
quick_info_on_stage = results['quick_info']
# Update this quick_info entry with new values - we do not delete because we might be able to use
# noinspection PyBroadException
try:
# Figure out if the update is a change to a master entry
if positive_value_exists(quick_info_master_we_vote_id):
uses_master_entry = True
elif (info_html is not False) or (info_text is not False) or (more_info_url is not False):
uses_master_entry = False
elif positive_value_exists(quick_info_on_stage.info_textx) or \
positive_value_exists(quick_info_on_stage.info_html) or \
positive_value_exists(quick_info_on_stage.more_info_url):
uses_master_entry = False
elif positive_value_exists(quick_info_on_stage.quick_info_master_we_vote_id):
uses_master_entry = True
else:
uses_master_entry = True
if ballot_item_display_name is not False:
quick_info_on_stage.ballot_item_display_name = ballot_item_display_name
if language is not False:
quick_info_on_stage.language = language
if last_editor_we_vote_id is not False:
quick_info_on_stage.last_editor_we_vote_id = last_editor_we_vote_id
if contest_office_we_vote_id is not False:
quick_info_on_stage.contest_office_we_vote_id = contest_office_we_vote_id
if candidate_we_vote_id is not False:
quick_info_on_stage.candidate_campaign_we_vote_id = candidate_we_vote_id
if politician_we_vote_id is not False:
quick_info_on_stage.politician_we_vote_id = politician_we_vote_id
if contest_measure_we_vote_id is not False:
quick_info_on_stage.contest_measure_we_vote_id = contest_measure_we_vote_id
if google_civic_election_id is not False:
quick_info_on_stage.google_civic_election_id = google_civic_election_id
if uses_master_entry:
if quick_info_master_we_vote_id is not False:
quick_info_on_stage.quick_info_master_we_vote_id = quick_info_master_we_vote_id
# Clear out unique entry values
quick_info_on_stage.info_text = ""
quick_info_on_stage.info_html = ""
quick_info_on_stage.more_info_url = ""
quick_info_on_stage.more_info_credit = NOT_SPECIFIED
else:
# If here, this is NOT a master entry
if info_text is not False:
quick_info_on_stage.info_text = info_text
if info_html is not False:
quick_info_on_stage.info_html = info_html
if more_info_url is not False:
quick_info_on_stage.more_info_url = more_info_url
if more_info_credit is not False:
quick_info_on_stage.more_info_credit = more_info_credit
# Clear out master entry value
quick_info_on_stage.quick_info_master_we_vote_id = ""
if google_civic_election_id is not False:
quick_info_on_stage.google_civic_election_id = google_civic_election_id
# We don't need to update date_last_changed here because set set auto_now=True in the field
quick_info_on_stage.save()
quick_info_on_stage_id = quick_info_on_stage.id
quick_info_on_stage_found = True
status = 'QUICK_INFO_UPDATED'
except Exception as e:
status = 'FAILED_TO_UPDATE_QUICK_INFO'
elif results['MultipleObjectsReturned']:
status = 'QUICK_INFO MultipleObjectsReturned'
elif results['DoesNotExist']:
try:
# Create new quick_info entry
if ballot_item_display_name is False:
ballot_item_display_name = ""
if language is False:
language = ENGLISH
if last_editor_we_vote_id is False:
last_editor_we_vote_id = ""
if contest_office_we_vote_id is False:
contest_office_we_vote_id = ""
if candidate_we_vote_id is False:
candidate_we_vote_id = ""
if politician_we_vote_id is False:
politician_we_vote_id = ""
if contest_measure_we_vote_id is False:
contest_measure_we_vote_id = ""
if google_civic_election_id is False:
google_civic_election_id = 0
# Master related data
if quick_info_master_we_vote_id is False:
quick_info_master_we_vote_id = ""
# Unique related data
if info_html is False:
info_html = ""
if info_text is False:
info_text = ""
if more_info_url is False:
more_info_url = ""
if more_info_credit is False:
more_info_credit = None
quick_info_on_stage = QuickInfo(
ballot_item_display_name=ballot_item_display_name,
contest_office_we_vote_id=contest_office_we_vote_id,
candidate_campaign_we_vote_id=candidate_we_vote_id,
politician_we_vote_id=politician_we_vote_id,
contest_measure_we_vote_id=contest_measure_we_vote_id,
info_html=info_html,
info_text=info_text,
language=language,
last_editor_we_vote_id=last_editor_we_vote_id,
quick_info_master_we_vote_id=quick_info_master_we_vote_id,
more_info_url=more_info_url,
more_info_credit=more_info_credit,
google_civic_election_id=google_civic_election_id
# We don't need to update last_updated here because set set auto_now=True in the field
)
quick_info_on_stage.save()
quick_info_on_stage_id = quick_info_on_stage.id
quick_info_on_stage_found = True
status = 'CREATED_QUICK_INFO'
except Exception as e:
status = 'FAILED_TO_CREATE_NEW_QUICK_INFO'
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
else:
status = results['status']
results = {
'success': True if quick_info_on_stage_found else False,
'status': status,
'quick_info_found': quick_info_on_stage_found,
'quick_info_id': quick_info_on_stage_id,
'quick_info': quick_info_on_stage,
}
return results
def delete_quick_info(self, quick_info_id):
quick_info_id = convert_to_int(quick_info_id)
quick_info_deleted = False
try:
if quick_info_id:
results = self.retrieve_quick_info(quick_info_id)
if results['quick_info_found']:
quick_info = results['quick_info']
quick_info_id = quick_info.id
quick_info.delete()
quick_info_deleted = True
except Exception as e:
handle_exception(e, logger=logger)
results = {
'success': quick_info_deleted,
'quick_info_deleted': quick_info_deleted,
'quick_info_id': quick_info_id,
}
return results
class QuickInfoMaster(models.Model):
"""
Master data that can be applied to multiple ballot items
"""
# We are relying on built-in Python id field
# The we_vote_id identifier is unique across all We Vote sites, and allows us to share our org info with other
# organizations
# It starts with "wv" then we add on a database specific identifier like "3v" (WeVoteSetting.site_unique_id_prefix)
# then the string "infom" (for "info master"), and then a sequential integer like "123".
# We keep the last value in WeVoteSetting.we_vote_id_last_quick_info_master_integer
we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, default=None, null=True, blank=True, unique=True)
# What kind of ballot item is this a master entry for? Mostly used so we can organize these entries
kind_of_ballot_item = models.CharField(max_length=10, choices=KIND_OF_BALLOT_ITEM_CHOICES, default=OFFICE)
# The language that this text is in
language = models.CharField(max_length=5, choices=LANGUAGE_CHOICES, default=ENGLISH)
info_text = models.TextField(null=True, blank=True)
info_html = models.TextField(null=True, blank=True)
master_entry_name = models.CharField(verbose_name="text name for quick info master entry",
max_length=255, null=True, blank=True)
more_info_credit = models.CharField(max_length=15, choices=SOURCE_SITE_CHOICES, default=BALLOTPEDIA,
null=True, blank=True)
# A link to any location with more information about this quick information
more_info_url = models.URLField(blank=True, null=True, verbose_name='url with more the full entry for this info')
last_updated = models.DateTimeField(verbose_name='date entered', null=True, auto_now=True) # TODO convert to date_last_changed
# The unique id of the last person who edited this entry.
last_editor_we_vote_id = models.CharField(
verbose_name="last editor we vote id", max_length=255, null=True, blank=True, unique=False)
def __unicode__(self):
return self.we_vote_id
class Meta:
ordering = ('last_updated',)
# We override the save function so we can auto-generate we_vote_id
def save(self, *args, **kwargs):
# Even if this organization came from another source we still need a unique we_vote_id
if self.we_vote_id:
self.we_vote_id = self.we_vote_id.strip().lower()
if self.we_vote_id == "" or self.we_vote_id is None: # If there isn't a value...
# ...generate a new id
site_unique_id_prefix = fetch_site_unique_id_prefix()
next_local_integer = fetch_next_we_vote_id_quick_info_master_integer()
# "wv" = We Vote
# site_unique_id_prefix = a generated (or assigned) unique id for one server running We Vote
# "infom" = tells us this is a unique id for a quick_info_master entry
# next_integer = a unique, sequential integer for this server - not necessarily tied to database id
self.we_vote_id = "wv{site_unique_id_prefix}infom{next_integer}".format(
site_unique_id_prefix=site_unique_id_prefix,
next_integer=next_local_integer,
)
super(QuickInfoMaster, self).save(*args, **kwargs)
def is_english(self):
if self.language == ENGLISH:
return True
return False
def is_spanish(self):
if self.language == SPANISH:
return True
return False
def is_vietnamese(self):
if self.language == VIETNAMESE:
return True
return False
def is_chinese(self):
if self.language == CHINESE:
return True
return False
def is_tagalog(self):
if self.language == TAGALOG:
return True
return False
def more_info_credit_text(self):
if self.more_info_credit == BALLOTPEDIA:
return "Courtesy of Ballotpedia.org"
if self.more_info_credit == WIKIPEDIA:
return "Courtesy of Wikipedia.org"
return ""
class QuickInfoMasterManager(models.Manager):
def __unicode__(self):
return "QuickInfoMasterManager"
def fetch_we_vote_id_from_local_id(self, quick_info_master_id):
if positive_value_exists(quick_info_master_id):
results = self.retrieve_quick_info_master_from_id(quick_info_master_id)
if results['quick_info_master_found']:
quick_info_master = results['quick_info_master']
return quick_info_master.we_vote_id
else:
return None
else:
return None
def retrieve_quick_info_master_from_id(self, quick_info_master_id):
quick_info_master_we_vote_id = None
quick_info_master_manager = QuickInfoMasterManager()
return quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id, quick_info_master_we_vote_id)
def retrieve_quick_info_master_from_we_vote_id(self, quick_info_master_we_vote_id):
quick_info_master_id = 0
quick_info_master_manager = QuickInfoMasterManager()
return quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id, quick_info_master_we_vote_id)
def retrieve_quick_info_master(self, quick_info_master_id, quick_info_master_we_vote_id=None):
error_result = False
exception_does_not_exist = False
exception_multiple_object_returned = False
quick_info_master = QuickInfoMaster()
success = False
try:
if positive_value_exists(quick_info_master_id):
status = "RETRIEVE_QUICK_INFO_MASTER_FOUND_WITH_ID"
quick_info_master = QuickInfoMaster.objects.get(id=quick_info_master_id)
quick_info_master_id = quick_info_master.id
success = True
elif positive_value_exists(quick_info_master_we_vote_id):
status = "RETRIEVE_QUICK_INFO_MASTER_FOUND_WITH_WE_VOTE_ID"
quick_info_master = QuickInfoMaster.objects.get(we_vote_id=quick_info_master_we_vote_id)
quick_info_master_id = quick_info_master.id
success = True
else:
status = "RETRIEVE_QUICK_INFO_MASTER_INSUFFICIENT_VARIABLES"
except QuickInfoMaster.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
error_result = True
exception_multiple_object_returned = True
success = False
status = "RETRIEVE_QUICK_INFO_MASTER_MULTIPLE_FOUND"
except QuickInfoMaster.DoesNotExist:
error_result = False
exception_does_not_exist = True
success = True
status = "RETRIEVE_QUICK_INFO_MASTER_NONE_FOUND"
results = {
'success': success,
'status': status,
'error_result': error_result,
'DoesNotExist': exception_does_not_exist,
'MultipleObjectsReturned': exception_multiple_object_returned,
'quick_info_master_found': True if quick_info_master_id > 0 else False,
'quick_info_master_id': quick_info_master_id,
'quick_info_master': quick_info_master,
}
return results
def update_or_create_quick_info_master(self, quick_info_master_id,
quick_info_master_we_vote_id,
master_entry_name,
info_html, info_text,
language,
kind_of_ballot_item,
last_editor_we_vote_id,
more_info_url,
more_info_credit,
):
# Does a quick_info_master entry already exist?
quick_info_master_manager = QuickInfoMasterManager()
if positive_value_exists(quick_info_master_id) or positive_value_exists(quick_info_master_we_vote_id):
results = quick_info_master_manager.retrieve_quick_info_master(quick_info_master_id,
quick_info_master_we_vote_id)
quick_info_master_found = results['quick_info_master_found']
else:
quick_info_master_found = False
if quick_info_master_found:
quick_info_master = results['quick_info_master']
# noinspection PyBroadException
try:
if master_entry_name is not False:
quick_info_master.master_entry_name = master_entry_name
if info_html is not False:
quick_info_master.info_html = info_html
if info_text is not False:
quick_info_master.info_text = info_text
if language is not False:
quick_info_master.language = language
if kind_of_ballot_item is not False:
quick_info_master.kind_of_ballot_item = kind_of_ballot_item
if last_editor_we_vote_id is not False:
quick_info_master.last_editor_we_vote_id = last_editor_we_vote_id
if more_info_url is not False:
quick_info_master.more_info_url = more_info_url
if more_info_credit is not False:
quick_info_master.more_info_credit = more_info_credit
# We don't need to update date_last_changed here because set set auto_now=True in the field
quick_info_master.save()
quick_info_master_id = quick_info_master.id
quick_info_master_found = True
status = 'QUICK_INFO_MASTER_UPDATED'
except Exception as e:
status = 'FAILED_TO_UPDATE_QUICK_INFO_MASTER'
else:
try:
# Create new quick_info_master entry
# Create new quick_info entry
if master_entry_name is False:
master_entry_name = None
if info_html is False:
info_html = None
if info_text is False:
info_text = None
if language is False:
language = ENGLISH
if last_editor_we_vote_id is False:
last_editor_we_vote_id = None
if more_info_url is False:
more_info_url = None
if more_info_credit is False:
more_info_credit = None
quick_info_master = QuickInfoMaster(
master_entry_name=master_entry_name,
info_html=info_html,
info_text=info_text,
language=language,
kind_of_ballot_item=kind_of_ballot_item,
last_editor_we_vote_id=last_editor_we_vote_id,
more_info_url=more_info_url,
more_info_credit=more_info_credit,
# We don't need to update last_updated here because set set auto_now=True in the field
)
quick_info_master.save()
quick_info_master_id = quick_info_master.id
quick_info_master_found = True
status = 'CREATED_QUICK_INFO_MASTER'
except Exception as e:
status = 'FAILED_TO_CREATE_NEW_QUICK_INFO_MASTER'
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
results = {
'success': True if quick_info_master_found else False,
'status': status,
'quick_info_master_found': quick_info_master_found,
'quick_info_master_id': quick_info_master_id,
'quick_info_master': quick_info_master,
}
return results
def delete_quick_info_master(self, quick_info_master_id):
quick_info_master_id = convert_to_int(quick_info_master_id)
quick_info_master_deleted = False
try:
if quick_info_master_id:
results = self.retrieve_quick_info_master(quick_info_master_id)
if results['quick_info_master_found']:
quick_info_master = results['quick_info_master']
quick_info_master_id = quick_info_master.id
quick_info_master.delete()
quick_info_master_deleted = True
except Exception as e:
handle_exception(e, logger=logger)
results = {
'success': quick_info_master_deleted,
'quick_info_master_deleted': quick_info_master_deleted,
'quick_info_master_id': quick_info_master_id,
}
return results
| wevote/WeVoteServer | quick_info/models.py | Python | mit | 40,696 |
# anchorGenerator
from models.anchor import *
# main function
if __name__=='__main__':
# TEMP: Wipe existing anchors
# anchors = Anchor.all(size=1000)
# Anchor.delete_all(anchors)
# THIS IS TEMPORARY:
anchors = {'Vaccination', 'Vaccinations', 'Vaccine', 'Vaccines', 'Inoculation', 'Immunization', 'Shot', 'Chickenpox', 'Disease', 'Diseases', 'Hepatitis A', 'Hepatitis B', 'infection', 'infections', 'measles', 'outbreak', 'mumps', 'rabies', 'tetanus', 'virus', 'autism'}
seed = 'vaccination'
for anchor in anchors:
a = Anchor.getOrCreate(anchor)
a.findInstances()
a.save()
"""
query = {
"size": 0,
"query": {
"filtered": {
"query": {
"query_string": {
"query": "*",
"analyze_wildcard": True
}
}
}
},
"aggs": {
"2": {
"terms": {
"field": "title",
"size": 100,
"order": {
"_count": "desc"
}
}
}
}
}
response = es.search(index="crowdynews"', 'body=query)
retrieved = now()
anchors = {}
# go through each retrieved document
for hit in response['aggregations']['2']['buckets']:
key = hit['key']
if validKey(key):
anchors[key] = hit['doc_count']
addBulk(anchors)
""" | ControCurator/controcurator | python_code/anchorGenerator.py | Python | mit | 1,229 |
from bs4 import BeautifulSoup
import httplib, codecs, datetime
import cPickle as pickle
import time
def stan_tag(criteria, server):
tagged = []
file_count = 47
for ix, c in enumerate(criteria[2250000:]):
# initialize list of sentences
sents = []
try:
# send text to server
server.request('', c)
res = BeautifulSoup(server.getresponse().read())
# loop through sentences to generate lists of tagged/lemmatized tuples
for sentence in res.findAll('sentence'):
sent_tag = []
for word in sentence.findAll('word'):
sent_tag.append((word.get_text(), word['pos'], word['lemma']))
sents.append(sent_tag)
except:
print c
print ix
server = httplib.HTTPConnection('127.0.0.1:2020')
sents.append(c)
# add sentence to tagged list
tagged.append(sents)
#save every 50,000 lines
if ix % 50000 == 0:
print 'Line: ', ix
print 'File: ', file_count
print
pickle.dump(tagged, open('data/stanford_tagged/stanford_tagged_criteria_%d.pkl' % (file_count), 'wb'))
file_count += 1
del tagged
tagged = []
pickle.dump(tagged, open('data/stanford_tagged/stanford_tagged_criteria_%d.pkl' % (file_count), 'wb'))
print 'Complete'
def main():
server = httplib.HTTPConnection('127.0.0.1:2020')
criteria = codecs.open('data/stanford_sentence_list.csv','r').readlines()
stan_tag(criteria, server)
if __name__ == '__main__':
main()
| jasonost/clinicaltrials | trial_criteria/StanfordPOStagging.py | Python | mit | 1,663 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ARMBaseModel
from ._models_py3 import Address
from ._models_py3 import Alert
from ._models_py3 import AlertErrorDetails
from ._models_py3 import AlertList
from ._models_py3 import AsymmetricEncryptedSecret
from ._models_py3 import Authentication
from ._models_py3 import AzureContainerInfo
from ._models_py3 import BandwidthSchedule
from ._models_py3 import BandwidthSchedulesList
from ._models_py3 import ClientAccessRight
from ._models_py3 import CloudErrorBody
from ._models_py3 import ContactDetails
from ._models_py3 import DataBoxEdgeDevice
from ._models_py3 import DataBoxEdgeDeviceExtendedInfo
from ._models_py3 import DataBoxEdgeDeviceList
from ._models_py3 import DataBoxEdgeDevicePatch
from ._models_py3 import FileEventTrigger
from ._models_py3 import FileSourceInfo
from ._models_py3 import IoTDeviceInfo
from ._models_py3 import IoTRole
from ._models_py3 import Ipv4Config
from ._models_py3 import Ipv6Config
from ._models_py3 import Job
from ._models_py3 import JobErrorDetails
from ._models_py3 import JobErrorItem
from ._models_py3 import MetricDimensionV1
from ._models_py3 import MetricSpecificationV1
from ._models_py3 import MountPointMap
from ._models_py3 import NetworkAdapter
from ._models_py3 import NetworkAdapterPosition
from ._models_py3 import NetworkSettings
from ._models_py3 import Node
from ._models_py3 import NodeList
from ._models_py3 import Operation
from ._models_py3 import OperationDisplay
from ._models_py3 import OperationsList
from ._models_py3 import Order
from ._models_py3 import OrderList
from ._models_py3 import OrderStatus
from ._models_py3 import PeriodicTimerEventTrigger
from ._models_py3 import PeriodicTimerSourceInfo
from ._models_py3 import RefreshDetails
from ._models_py3 import Role
from ._models_py3 import RoleList
from ._models_py3 import RoleSinkInfo
from ._models_py3 import SecuritySettings
from ._models_py3 import ServiceSpecification
from ._models_py3 import Share
from ._models_py3 import ShareAccessRight
from ._models_py3 import ShareList
from ._models_py3 import Sku
from ._models_py3 import StorageAccountCredential
from ._models_py3 import StorageAccountCredentialList
from ._models_py3 import SymmetricKey
from ._models_py3 import TrackingInfo
from ._models_py3 import Trigger
from ._models_py3 import TriggerList
from ._models_py3 import UpdateDownloadProgress
from ._models_py3 import UpdateInstallProgress
from ._models_py3 import UpdateSummary
from ._models_py3 import UploadCertificateRequest
from ._models_py3 import UploadCertificateResponse
from ._models_py3 import User
from ._models_py3 import UserAccessRight
from ._models_py3 import UserList
except (SyntaxError, ImportError):
from ._models import ARMBaseModel # type: ignore
from ._models import Address # type: ignore
from ._models import Alert # type: ignore
from ._models import AlertErrorDetails # type: ignore
from ._models import AlertList # type: ignore
from ._models import AsymmetricEncryptedSecret # type: ignore
from ._models import Authentication # type: ignore
from ._models import AzureContainerInfo # type: ignore
from ._models import BandwidthSchedule # type: ignore
from ._models import BandwidthSchedulesList # type: ignore
from ._models import ClientAccessRight # type: ignore
from ._models import CloudErrorBody # type: ignore
from ._models import ContactDetails # type: ignore
from ._models import DataBoxEdgeDevice # type: ignore
from ._models import DataBoxEdgeDeviceExtendedInfo # type: ignore
from ._models import DataBoxEdgeDeviceList # type: ignore
from ._models import DataBoxEdgeDevicePatch # type: ignore
from ._models import FileEventTrigger # type: ignore
from ._models import FileSourceInfo # type: ignore
from ._models import IoTDeviceInfo # type: ignore
from ._models import IoTRole # type: ignore
from ._models import Ipv4Config # type: ignore
from ._models import Ipv6Config # type: ignore
from ._models import Job # type: ignore
from ._models import JobErrorDetails # type: ignore
from ._models import JobErrorItem # type: ignore
from ._models import MetricDimensionV1 # type: ignore
from ._models import MetricSpecificationV1 # type: ignore
from ._models import MountPointMap # type: ignore
from ._models import NetworkAdapter # type: ignore
from ._models import NetworkAdapterPosition # type: ignore
from ._models import NetworkSettings # type: ignore
from ._models import Node # type: ignore
from ._models import NodeList # type: ignore
from ._models import Operation # type: ignore
from ._models import OperationDisplay # type: ignore
from ._models import OperationsList # type: ignore
from ._models import Order # type: ignore
from ._models import OrderList # type: ignore
from ._models import OrderStatus # type: ignore
from ._models import PeriodicTimerEventTrigger # type: ignore
from ._models import PeriodicTimerSourceInfo # type: ignore
from ._models import RefreshDetails # type: ignore
from ._models import Role # type: ignore
from ._models import RoleList # type: ignore
from ._models import RoleSinkInfo # type: ignore
from ._models import SecuritySettings # type: ignore
from ._models import ServiceSpecification # type: ignore
from ._models import Share # type: ignore
from ._models import ShareAccessRight # type: ignore
from ._models import ShareList # type: ignore
from ._models import Sku # type: ignore
from ._models import StorageAccountCredential # type: ignore
from ._models import StorageAccountCredentialList # type: ignore
from ._models import SymmetricKey # type: ignore
from ._models import TrackingInfo # type: ignore
from ._models import Trigger # type: ignore
from ._models import TriggerList # type: ignore
from ._models import UpdateDownloadProgress # type: ignore
from ._models import UpdateInstallProgress # type: ignore
from ._models import UpdateSummary # type: ignore
from ._models import UploadCertificateRequest # type: ignore
from ._models import UploadCertificateResponse # type: ignore
from ._models import User # type: ignore
from ._models import UserAccessRight # type: ignore
from ._models import UserList # type: ignore
from ._data_box_edge_management_client_enums import (
AccountType,
AlertSeverity,
AuthenticationType,
AzureContainerDataFormat,
ClientPermissionType,
DataBoxEdgeDeviceStatus,
DataPolicy,
DayOfWeek,
DeviceType,
DownloadPhase,
EncryptionAlgorithm,
InstallRebootBehavior,
JobStatus,
JobType,
MetricAggregationType,
MetricCategory,
MetricUnit,
MonitoringStatus,
NetworkAdapterDHCPStatus,
NetworkAdapterRDMAStatus,
NetworkAdapterStatus,
NetworkGroup,
NodeStatus,
OrderState,
PlatformType,
RoleStatus,
RoleTypes,
SSLStatus,
ShareAccessProtocol,
ShareAccessType,
ShareStatus,
SkuName,
SkuTier,
TimeGrain,
TriggerEventType,
UpdateOperation,
UpdateOperationStage,
)
__all__ = [
'ARMBaseModel',
'Address',
'Alert',
'AlertErrorDetails',
'AlertList',
'AsymmetricEncryptedSecret',
'Authentication',
'AzureContainerInfo',
'BandwidthSchedule',
'BandwidthSchedulesList',
'ClientAccessRight',
'CloudErrorBody',
'ContactDetails',
'DataBoxEdgeDevice',
'DataBoxEdgeDeviceExtendedInfo',
'DataBoxEdgeDeviceList',
'DataBoxEdgeDevicePatch',
'FileEventTrigger',
'FileSourceInfo',
'IoTDeviceInfo',
'IoTRole',
'Ipv4Config',
'Ipv6Config',
'Job',
'JobErrorDetails',
'JobErrorItem',
'MetricDimensionV1',
'MetricSpecificationV1',
'MountPointMap',
'NetworkAdapter',
'NetworkAdapterPosition',
'NetworkSettings',
'Node',
'NodeList',
'Operation',
'OperationDisplay',
'OperationsList',
'Order',
'OrderList',
'OrderStatus',
'PeriodicTimerEventTrigger',
'PeriodicTimerSourceInfo',
'RefreshDetails',
'Role',
'RoleList',
'RoleSinkInfo',
'SecuritySettings',
'ServiceSpecification',
'Share',
'ShareAccessRight',
'ShareList',
'Sku',
'StorageAccountCredential',
'StorageAccountCredentialList',
'SymmetricKey',
'TrackingInfo',
'Trigger',
'TriggerList',
'UpdateDownloadProgress',
'UpdateInstallProgress',
'UpdateSummary',
'UploadCertificateRequest',
'UploadCertificateResponse',
'User',
'UserAccessRight',
'UserList',
'AccountType',
'AlertSeverity',
'AuthenticationType',
'AzureContainerDataFormat',
'ClientPermissionType',
'DataBoxEdgeDeviceStatus',
'DataPolicy',
'DayOfWeek',
'DeviceType',
'DownloadPhase',
'EncryptionAlgorithm',
'InstallRebootBehavior',
'JobStatus',
'JobType',
'MetricAggregationType',
'MetricCategory',
'MetricUnit',
'MonitoringStatus',
'NetworkAdapterDHCPStatus',
'NetworkAdapterRDMAStatus',
'NetworkAdapterStatus',
'NetworkGroup',
'NodeStatus',
'OrderState',
'PlatformType',
'RoleStatus',
'RoleTypes',
'SSLStatus',
'ShareAccessProtocol',
'ShareAccessType',
'ShareStatus',
'SkuName',
'SkuTier',
'TimeGrain',
'TriggerEventType',
'UpdateOperation',
'UpdateOperationStage',
]
| Azure/azure-sdk-for-python | sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_07_01/models/__init__.py | Python | mit | 10,249 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: J.Y Han
# start
# spawn-fcgi -d /users/hanjiyun/project/geeksoho -f /users/hanjiyun/project/geeksoho/application.py -a 127.0.0.1 -p 9001
#stop
# kill `pgrep -f "/users/hanjiyun/project/geeksoho/application.py"`
import os
import web
import rediswebpy
from web.contrib.template import render_jinja
import misc
db = web.database(dbn='mysql', db='geeksoho', user='geeksoho', passwd='geeksoho')
urls = (
'/', 'index',
'/test', 'test'
)
# controllers
# ===============
class index:
"""Home"""
def GET(self):
# return pjax('jobs.html')
jobsList = GetJobs()
return render.jobs(jobsList=jobsList)
def POST(self):
data = web.input(title='', link='', company='', company_weibo='', company_website='', city='', salary='', intro='')
CreatNewJob(data)
raise web.seeother('/')
class test:
"""test"""
def GET(self):
# return pjax('test.html')
return render.test()
# models
# =============
def CreatNewJob(data):
db.insert(
'jobs',
title = data.title,
link = data.link,
company = data.company,
company_weibo = data.company_weibo,
company_website = data.company_website,
city = data.city,
salary = data.salary,
intro = data.intro)
def GetJobs():
return db.select('jobs', limit = 100, order='id DESC')
# globals = get_all_functions(misc)
app = web.application(urls, globals())
web.config.debug = True
cache = False
session = web.session.Session(app, rediswebpy.RedisStore(), initializer={'count': 0})
render = render_jinja(
'templates', # 设置模板路径.
encoding = 'utf-8', # 编码.
)
myFilters = {'filter_tags': misc.filter_tags,}
render._lookup.filters.update(myFilters)
if __name__ == "__main__":
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
app.run() | naoyeye/geeksoho | application.py | Python | mit | 1,969 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import ScenarioTest, record_only, ResourceGroupPreparer
class TestClusterScenarios(ScenarioTest):
@record_only()
@ResourceGroupPreparer(name_prefix='cli_test_monitor_log_analytics_cluster_c', parameter_name='rg1', key='rg1', location='centralus')
def test_monitor_log_analytics_cluster_default(self, rg1):
new_cluster_name = self.create_random_name('clitest-cluster-', 20)
sku_capacity = 1000
self.kwargs.update({
'new_cluster_name': new_cluster_name,
'sku_capacity': sku_capacity
})
self.cmd("monitor log-analytics cluster create -g {rg1} -n {new_cluster_name} --sku-capacity {sku_capacity}",
checks=[])
self.cmd("monitor log-analytics cluster show -g {rg1} -n {new_cluster_name}", checks=[
self.check('provisioningState', 'Succeeded'),
self.check('name', new_cluster_name),
self.check('sku.capacity', sku_capacity)
])
new_sku_capacity = 2000
self.kwargs.update({
'sku_capacity': new_sku_capacity
})
self.cmd("monitor log-analytics cluster update -g {rg1} -n {new_cluster_name} "
"--sku-capacity {sku_capacity}",
checks=[
self.check('sku.capacity', new_sku_capacity)
])
self.cmd("monitor log-analytics cluster show -g {rg1} -n {new_cluster_name}", checks=[
self.check('provisioningState', 'Succeeded'),
self.check('sku.capacity', new_sku_capacity)
])
self.cmd("monitor log-analytics cluster list -g {rg1}", checks=[
self.check('length(@)', 1)
])
self.cmd("monitor log-analytics cluster delete -g {rg1} -n {new_cluster_name} -y", checks=[])
with self.assertRaisesRegex(SystemExit, '3'):
self.cmd('monitor log-analytics cluster show -g {rg1} -n {new_cluster_name}')
@record_only()
def test_monitor_log_analytics_cluster_update_key(self):
new_key_name = 'key2'
new_key_version = 'dc814576e6b34de69a10b186a4723035'
self.kwargs.update({
'rg': 'azure-cli-test-scus',
'key_name': new_key_name,
'key_version': new_key_version,
'key_vault_uri': 'https://yu-vault-1.vault.azure.net/',
'cluster_name': 'yu-test-cluster2'
})
self.cmd("monitor log-analytics cluster update -g {rg} -n {cluster_name} --key-name {key_name} "
"--key-vault-uri {key_vault_uri} --key-version {key_version}",
checks=[])
self.cmd("monitor log-analytics cluster wait -g {rg} -n {cluster_name} --updated", checks=[])
self.cmd("monitor log-analytics cluster show -g {rg} -n {cluster_name}", checks=[
self.check('provisioningState', 'Succeeded'),
self.check('keyVaultProperties.keyName', new_key_name),
self.check('keyVaultProperties.keyVersion', new_key_version)
])
| yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/monitor/tests/latest/test_monitor_log_analytics_cluster.py | Python | mit | 3,370 |
import factory
from dominion.games.models import Game
class GameFactory(factory.django.DjangoModelFactory):
class Meta:
model = Game
| jlward/dominion | dominion/games/factories.py | Python | mit | 148 |
from django.core.management.base import BaseCommand
from aspc.courses.models import Schedule
from datetime import datetime, timedelta
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from aspc.settings import EMAIL_HOST_USER
# Assume we send the emails at the end of the semester, we
# should only consider schedules that are at least 3 months old
MIN_DAYS = 90
MAX_DAYS = 300
EMAIL_TITLE = "Have you taken these classes?"
class Command(BaseCommand):
args = ''
help = 'imports terms'
def handle(self, *args, **options):
plaintext = get_template('email/solicit_reviews.txt')
htmly = get_template('email/solicit_reviews.html')
schedules = Schedule.objects.filter(create_ts__lte=datetime.now()-timedelta(days=MIN_DAYS),
create_ts__gte=datetime.now()-timedelta(days=MAX_DAYS))
emails_sent = 0
for schedule in schedules:
try:
context = Context({'user': schedule.user, 'courses': schedule.sections.all()})
text_content = plaintext.render(context)
html_content = htmly.render(context)
user_data = schedule.user.user.all()
if user_data and user_data[0].subscribed_email:
msg = EmailMultiAlternatives(EMAIL_TITLE, text_content, EMAIL_HOST_USER, [schedule.user.email])
msg.attach_alternative(html_content, "text/html")
msg.send()
emails_sent += 1
except Exception as e:
self.stdout.write('Error: %s\n' % e)
self.stdout.write('Successfully send %s emails\n' % emails_sent)
| aspc/mainsite | aspc/courses/management/commands/solicit_reviews.py | Python | mit | 1,761 |
"""
The utility module.
"""
import traceback
def extract_traceback(exception):
"""
Utility function for extracting the traceback from an exception.
:param exception: The exception to extract the traceback from.
:return: The extracted traceback.
"""
return ''.join(traceback.format_tb(exception.__traceback__))
| tylerlaberge/Jasper | jasper/utility.py | Python | mit | 341 |
"""
@name: Modules/Computer/Nodes/nodes.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2014-2030 by D. Brian Kimmel
@license: MIT License
@note: Created on Mar 6, 2014
@summary: This module does everything for nodes.
Nodes are read in from the config Xml file.
Then node local is run to update the local node
Finally, the nodes are synced between each other.
"""
__updated__ = '2020-01-25'
__version_info__ = (20, 1, 24)
__version__ = '.'.join(map(str, __version_info__))
# Import system type stuff
# Import PyMh files and modules.
from Modules.Computer.Nodes.node_local import Api as localApi
from Modules.Computer.Nodes.node_sync import Api as syncApi
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
from Modules.Core.Utilities import extract_tools
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Nodes ')
class MqttActions:
"""
"""
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj = p_pyhouse_obj
def decode(self, p_msg):
""" Decode the computer specific portions of the message and append them to the log string.
@param p-logmsg: is the partially decoded Mqtt message json
@param p_msg.Topic: is a list of topic part strings ( pyhouse, housename have been dropped
@param p_message: is the payload that is JSON
"""
l_topic = p_msg.UnprocessedTopic
p_msg.UnprocessedTopic = p_msg.UnprocessedTopic[1:]
p_msg.LogMessage += '\tNodes:\n'
l_topic = l_topic[0].lower()
if l_topic == 'sync':
syncApi(self.m_pyhouse_obj).DecodeMqttMessage(p_msg)
else:
p_msg.LogMessage += '\tUnknown sub-topic {}'.format(PrettyFormatAny.form(p_msg.Payload, 'Computer msg'))
LOG.warning('Unknown Node sub-topic: {}\n\tMsg: {}'.format(l_topic, p_msg.Payload))
class Yaml:
def load_yaml_config(self, p_pyhouse_obj):
"""
"""
pass
def save_yaml_config(self, p_pyhouse_obj):
"""
"""
pass
class Api():
m_pyhouse_obj = None
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj = p_pyhouse_obj
self._add_storage()
self.m_local = localApi(p_pyhouse_obj)
self.m_sync = syncApi(p_pyhouse_obj)
LOG.info('Initialized - Version:{}'.format(__version__))
def _add_storage(self):
"""
"""
def LoadConfig(self):
""" Load the Node xml info.
"""
Yaml().load_yaml_config(self.m_pyhouse_obj)
# p_pyhouse_obj.Computer.Nodes = l_nodes
LOG.info('Loaded Config - Version:{}'.format(__version__))
return
def Start(self):
self.m_local.Start()
self.m_sync.Start()
LOG.info('Started - Version:{}'.format(__version__))
def SaveConfig(self):
# l_xml, l_count = nodesXml.write_nodes_xml(self.m_pyhouse_obj)
# p_xml.append(l_xml)
Yaml().save_yaml_config(self.m_pyhouse_obj)
LOG.info("Saved Config")
return
def Stop(self):
self.m_local.Stop()
self.m_sync.Stop()
# ## END DBK
| DBrianKimmel/PyHouse | Project/src/Modules/Computer/Nodes/nodes.py | Python | mit | 3,171 |
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 14 15:30:11 2016
@author: worm_rig
"""
import json
import os
import tables
from tierpsy.analysis.compress.compressVideo import compressVideo, initMasksGroups
from tierpsy.analysis.compress.selectVideoReader import selectVideoReader
from tierpsy.helper.misc import TimeCounter, print_flush
#default parameters if wormencoder.ini does not exist
DFLT_SAVE_FULL_INTERVAL = 5000
DFLT_BUFFER_SIZE = 5
DFLT_MASK_PARAMS = {'min_area' : 50,
'max_area' : 500000000,
'thresh_C' : 15,
'thresh_block_size' : 61,
'dilation_size' : 7
}
def _getWormEnconderParams(fname):
def numOrStr(x):
x = x.strip()
try:
return int(x)
except:
return x
if os.path.exists(fname):
with open(fname, 'r') as fid:
dd = fid.read().split('\n')
plugin_params = {a.strip() : numOrStr(b) for a,b in
[x.split('=') for x in dd if x and x[0].isalpha()]}
else:
plugin_params = {}
return plugin_params
def _getReformatParams(plugin_params):
if plugin_params:
save_full_interval = plugin_params['UNMASKEDFRAMES']
buffer_size = plugin_params['MASK_RECALC_RATE']
mask_params = {'min_area' : plugin_params['MINBLOBSIZE'],
'max_area' : plugin_params['MAXBLOBSIZE'],
'thresh_C' : plugin_params['THRESHOLD_C'],
'thresh_block_size' : plugin_params['THRESHOLD_BLOCK_SIZE'],
'dilation_size' : plugin_params['DILATION_KERNEL_SIZE']}
else:
#if an empty dictionary was given return default values
save_full_interval = DFLT_SAVE_FULL_INTERVAL
buffer_size = DFLT_BUFFER_SIZE
mask_params = DFLT_MASK_PARAMS
return save_full_interval, buffer_size, mask_params
def _isValidSource(original_file):
try:
with tables.File(original_file, 'r') as fid:
fid.get_node('/mask')
return True
except tables.exceptions.HDF5ExtError:
return False
def reformatRigMaskedVideo(original_file, new_file, plugin_param_file, expected_fps, microns_per_pixel):
plugin_params = _getWormEnconderParams(plugin_param_file)
base_name = original_file.rpartition('.')[0].rpartition(os.sep)[-1]
if not _isValidSource(original_file):
print_flush(new_file + ' ERROR. File might be corrupt. ' + original_file)
return
save_full_interval, buffer_size, mask_params = _getReformatParams(plugin_params)
with tables.File(original_file, 'r') as fid_old, \
tables.File(new_file, 'w') as fid_new:
mask_old = fid_old.get_node('/mask')
tot_frames, im_height, im_width = mask_old.shape
progress_timer = TimeCounter('Reformating Gecko plugin hdf5 video.', tot_frames)
attr_params = dict(
expected_fps = expected_fps,
microns_per_pixel = microns_per_pixel,
is_light_background = True
)
mask_new, full_new, _ = initMasksGroups(fid_new, tot_frames, im_height, im_width,
attr_params, save_full_interval, is_expandable=False)
mask_new.attrs['plugin_params'] = json.dumps(plugin_params)
img_buff_ini = mask_old[:buffer_size]
full_new[0] = img_buff_ini[0]
mask_new[:buffer_size] = img_buff_ini*(mask_old[buffer_size] != 0)
for frame in range(buffer_size, tot_frames):
if frame % save_full_interval != 0:
mask_new[frame] = mask_old[frame]
else:
full_frame_n = frame //save_full_interval
img = mask_old[frame]
full_new[full_frame_n] = img
mask_new[frame] = img*(mask_old[frame-1] != 0)
if frame % 500 == 0:
# calculate the progress and put it in a string
progress_str = progress_timer.get_str(frame)
print_flush(base_name + ' ' + progress_str)
print_flush(
base_name +
' Compressed video done. Total time:' +
progress_timer.get_time_str())
def isGoodVideo(video_file):
try:
vid = selectVideoReader(video_file)
# i have problems with corrupt videos that can create infinite loops...
#it is better to test it before start a large taks
vid.release()
return True
except OSError:
# corrupt file, cannot read the size
return False
def processVideo(video_file, masked_image_file, compress_vid_param):
if video_file.endswith('hdf5'):
plugin_param_file = os.path.join(os.path.dirname(video_file), 'wormencoder.ini')
expected_fps = compress_vid_param['expected_fps']
microns_per_pixel = compress_vid_param['microns_per_pixel']
reformatRigMaskedVideo(video_file, masked_image_file, plugin_param_file, expected_fps=expected_fps, microns_per_pixel=microns_per_pixel)
else:
compressVideo(video_file, masked_image_file, **compress_vid_param)
if __name__ == '__main__':
import argparse
fname_wenconder = os.path.join(os.path.dirname(__file__), 'wormencoder.ini')
parser = argparse.ArgumentParser(description='Reformat the files produced by the Gecko plugin in to the format of tierpsy.')
parser.add_argument('original_file', help='path of the original file produced by the plugin')
parser.add_argument('new_file', help='new file name')
parser.add_argument(
'--plugin_param_file',
default = fname_wenconder,
help='wormencoder file used by the Gecko plugin.')
parser.add_argument(
'--expected_fps',
default=25,
help='Expected recording rate in frame per seconds.')
args = parser.parse_args()
reformatRigMaskedVideo(**vars(args))
| ver228/tierpsy-tracker | tierpsy/analysis/compress/processVideo.py | Python | mit | 5,963 |
import tensorflow as tf
import hyperchamber as hc
import hypergan as hg
import numpy as np
from hypergan.losses.least_squares_loss import LeastSquaresLoss
from hypergan.ops import TensorflowOps
from unittest.mock import MagicMock
from tests.mocks import mock_gan
loss_config = {'test': True, 'reduce':'reduce_mean', 'labels': [0,1,0]}
class LeastSquaresLossTest(tf.test.TestCase):
def test_config(self):
with self.test_session():
loss = LeastSquaresLoss(mock_gan(), loss_config)
self.assertTrue(loss.config.test)
def test_create(self):
with self.test_session():
gan = mock_gan()
loss = LeastSquaresLoss(gan, loss_config)
d_loss, g_loss = loss.create()
d_shape = gan.ops.shape(d_loss)
g_shape = gan.ops.shape(g_loss)
self.assertEqual(sum(d_shape), 0)
self.assertEqual(sum(g_shape), 0)
if __name__ == "__main__":
tf.test.main()
| 255BITS/HyperGAN | tests/losses/test_least_squares_loss.py | Python | mit | 966 |
class Kifu:
def __init__(self):
self.kifu = []
def add(self, from_x, from_y, to_x, to_y, promote, koma):
self.kifu.append((from_x, from_y, to_x, to_y, promote, koma))
def pop(self):
return self.kifu.pop()
| setokinto/slack-shogi | app/kifu.py | Python | mit | 255 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
desk_properties = ("search_bar", "notifications", "chat", "list_sidebar",
"bulk_actions", "view_switcher", "form_sidebar", "timeline", "dashboard")
class Role(Document):
def before_rename(self, old, new, merge=False):
if old in ("Guest", "Administrator", "System Manager", "All"):
frappe.throw(frappe._("Standard roles cannot be renamed"))
def after_insert(self):
frappe.cache().hdel('roles', 'Administrator')
def validate(self):
if self.disabled:
self.disable_role()
else:
self.set_desk_properties()
def disable_role(self):
if self.name in ("Guest", "Administrator", "System Manager", "All"):
frappe.throw(frappe._("Standard roles cannot be disabled"))
else:
self.remove_roles()
def set_desk_properties(self):
# set if desk_access is not allowed, unset all desk properties
if self.name == 'Guest':
self.desk_access = 0
if not self.desk_access:
for key in desk_properties:
self.set(key, 0)
def remove_roles(self):
frappe.db.sql("delete from `tabHas Role` where role = %s", self.name)
frappe.clear_cache()
def on_update(self):
'''update system user desk access if this has changed in this update'''
if frappe.flags.in_install: return
if self.has_value_changed('desk_access'):
for user_name in get_users(self.name):
user = frappe.get_doc('User', user_name)
user_type = user.user_type
user.set_system_user()
if user_type != user.user_type:
user.save()
def get_info_based_on_role(role, field='email'):
''' Get information of all users that have been assigned this role '''
users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent as user_name"])
return get_user_info(users, field)
def get_user_info(users, field='email'):
''' Fetch details about users for the specified field '''
info_list = []
for user in users:
user_info, enabled = frappe.db.get_value("User", user.get("user_name"), [field, "enabled"])
if enabled and user_info not in ["[email protected]", "[email protected]"]:
info_list.append(user_info)
return info_list
def get_users(role):
return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])]
# searches for active employees
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def role_query(doctype, txt, searchfield, start, page_len, filters):
report_filters = [['Role', 'name', 'like', '%{}%'.format(txt)], ['Role', 'is_custom', '=', 0]]
if filters and isinstance(filters, list):
report_filters.extend(filters)
return frappe.get_all('Role', limit_start=start, limit_page_length=page_len,
filters=report_filters, as_list=1) | saurabh6790/frappe | frappe/core/doctype/role/role.py | Python | mit | 2,874 |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 13 23:10:40 2016
@author: zhouyu
"""
#%%
import pandas as pd
import numpy as np
import os
import re
import nltk
from nltk.corpus import stopwords
from bs4 import BeautifulSoup
os.chdir('/Users/zhouyu/Documents/Zhou_Yu/DS/kaggle_challenge/text processing')
#%% step1: import data
import glob
alltrainfiles = glob.glob("*.csv")
raw_text =pd.concat((pd.read_csv(f,index_col = None, header =0) for f in alltrainfiles),ignore_index = True)
#raw_text = pd.read_csv("crypto.csv",index_col = None)
#%% step2: clean data, remove HTML, symbols and stopwords
def text_to_words(rawtext):
#split into individual words, remove HTML, only keep letters and number
# convert letters to lower case
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(" ".join(meaningful_words))
def target_to_words(rawtext):
#only return the first target word
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(meaningful_words[0])
#%%
cleaned_post = []
cleaned_target = []
sz = raw_text.shape[0]
for i in range(0,sz):
raw_post = raw_text['title'][i]+' '+raw_text['content'][i]
raw_post = BeautifulSoup(raw_post).get_text()
cleaned_post.append(text_to_words(raw_post))
cleaned_target.append(target_to_words(raw_text['tags'][i]))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,sz)
#print cleaned_post[1]
#%% step3: creating features from a bag of words
from sklearn.feature_extraction.text import CountVectorizer
count_vect = CountVectorizer(analyzer = "word", \
tokenizer = None, \
preprocessor = None, \
stop_words = None, \
max_features = 5000)
X_train_counts = count_vect.fit_transform(cleaned_post)
#X_target_counts = count_vect.fit_transform(cleaned_target)
from sklearn.feature_extraction.text import TfidfTransformer
tf_transformer = TfidfTransformer(use_idf = False).fit(X_train_counts)
X_train_tf = tf_transformer.transform(X_train_counts)
#%% training a linear model
# METHOD 1: BUILD randomforestclassifier...
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier(n_estimators = 10)
forest = rf.fit(X_train_tf, cleaned_target)
#%% examine the result produced by METHOD 1:
pred = rf.predict(X_train_tf)
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
import matplotlib.pyplot as plt
import itertools
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
cnf_matrix = confusion_matrix(cleaned_target,pred)
#target_names = set(cleaned_target)
#np.set_printoptions(precision = 2)
#plt.figure()
#plot_confusion_matrix(cnf_matrix,classes = target_names,normalize = True,title='Normalized confusion matrix')
#plt.show()
target_names = list(OrderedDict.fromkeys(cleaned_target))
print(classification_report(cleaned_target,pred,target_names = target_names))
#######
#%% Method 2: directly predicted as the highest frequency element
# find the highest tf-idf
#step1: select a random sample
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
sample = np.random.choice(87000,1000,replace = False)
tf_pred = []
tf_target = []
for i in range(0,1000):
r = sample[i];
tf_target.append(cleaned_target[r])
tf_post = X_train_tf.getrow(r).toarray()
tf_post_max = tf_post.argmax()
tf_pred.append(count_vect.get_feature_names()[tf_post_max])
tf_cnf_matrix = confusion_matrix(tf_target,tf_pred)
target_names = list(OrderedDict.fromkeys(tf_pred+tf_target))
print(classification_report(tf_target, tf_pred,target_names =target_names))
#%% evaluate test set
test = pd.read_csv('test/test.csv')
cleaned_test = []
test_sz = test.shape[0]
for i in range(0,test_sz):
test_post = test['title'][i]+' '+test['content'][i]
test_post = BeautifulSoup(test_post).get_text()
cleaned_test.append(text_to_words(test_post))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,test_sz)
#%% use random forest
X_test_counts = count_vect.fit_transform(cleaned_test)
X_test_tf = tf_transformer.transform(X_test_counts)
result = forest.predict(X_test_counts)
# use max tf-idf
#%%
test_pred = []
for i in range(0,test_sz):
tf_test = X_test_tf.getrow(i).toarray()
# just return one tag
#tf_test_max = tf_test.argmax()
#test_pred.append(count_vect.get_feature_names()[tf_test_max])
ind = np.argpartition(tf_test,-4)[:,-4:]
pred_tags = [count_vect.get_feature_names()[j] for j in ind[0,:].tolist()]
test_pred.append( " ".join(pred_tags))
if((i+1)%1000==0):
print "Predicting %d of %d\n" % (i+1,test_sz)
result = test_pred
#%% prepare submission
submission = pd.read_csv('test/sample_submission.csv')
submission.iloc[:,1] = result
submission.to_csv('test/submission.csv',index = None)
#%% try to use NMF model can not be mapped to specific question...
n_features = 5000
n_topics = 10
n_samples = test_sz
n_top_words = 4
def get_top_words(model, feature_names, n_top_words):
res = []
for topic_idx, topic in enumerate(model.components_):
tags = " ".join([feature_names[i]
for i in topic.argsort()[:-n_top_words - 1:-1]])
res.append(tags)
return res
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF
from time import time
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2,
max_features=n_features,
stop_words='english')
tfidf = tfidf_vectorizer.fit_transform(cleaned_test)
# Fit the NMF model
print("Fitting the NMF model (Frobenius norm) with tf-idf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_topics, random_state=1,
alpha=.1, l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in NMF model (Frobenius norm):")
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
#print_top_words(nmf, tfidf_feature_names, n_top_words)
result = get_top_words(nmf,tfidf_feature_names,n_top_words) | sadahanu/DataScience_SideProject | Stack_Exchange/py2_text.py | Python | mit | 7,689 |
# List of modules to import when celery starts.
# CELERY_IMPORTS = ('libcloud_sandbox.tasks.code_execute', )
# Result store settings.
CELERY_RESULT_BACKEND = 'database'
CELERY_RESULT_DBURI = 'sqlite:///mydatabase.db'
# Broker settings.
BROKER_TRANSPORT = 'sqlalchemy'
BROKER_HOST = 'sqlite:///tasks.db'
BROKER_PORT = 5672
BROKER_VHOST = '/'
BROKER_USER = 'guest'
BROKER_PASSWORD = 'guest'
## Worker settings
CELERYD_CONCURRENCY = 1
CELERYD_TASK_TIME_LIMIT = 20
# CELERYD_LOG_FILE = 'celeryd.log'
CELERYD_LOG_LEVEL = 'INFO' | texib/bitcoin-zoo | test/celeryconfig.py | Python | mit | 525 |
def add_without_op(x, y):
while y !=0:
carry = x & y
x = x ^ y
y = carry << 1
print(x)
def main():
x, y = map(int, input().split())
add_without_op(x, y)
if __name__ == "__main__":
main() | libchaos/algorithm-python | bit/add_with_op.py | Python | mit | 233 |
# -*- coding: UTF-8 -*-
__author__ = 'Sean Yu'
__mail__ = '[email protected]'
import sqlite3
def CreateTable(dbname, table,table_define):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute("""create table %s ( %s )"""%(table,table_define))
db.commit()
cu.close()
db.close()
def InsertRecord(dbname, table,record):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''insert into %s values(%s)'''%(table,record))
db.commit()
cu.close()
db.close()
def UpdateRecord(dbname,table, action, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''update %s set %s where %s'''%(table,action,condition))
db.commit()
cu.close()
db.close()
def RemoveRecord(dbname,table, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''delete from %s where %s'''%(table,condition))
db.commit()
cu.close()
db.close()
def FetchRecord(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
result =[]
for i in records:
i= list(i)
result.append(i)
db.commit()
cu.close()
db.close()
return result
def FetchOne(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
records =cu.fetchone()
if records:
result =list(records)
else:
result=None
db.commit()
cu.close()
db.close()
return result
| try-dash-now/dash-ia | lib/Database.py | Python | mit | 1,885 |
import logging
from mwoauth import ConsumerToken, Handshaker, AccessToken
from mwoauth.errors import OAuthException
import urllib.parse
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login, authenticate
from django.contrib.auth.models import User
from django.core.exceptions import DisallowedHost, PermissionDenied
from django.urls import reverse_lazy
from django.http import HttpResponseRedirect
from django.http.request import QueryDict
from django.views.generic.base import View
from django.utils.translation import get_language, gettext as _
from urllib.parse import urlencode
from .models import Editor
logger = logging.getLogger(__name__)
def _localize_oauth_redirect(redirect):
"""
Given an appropriate mediawiki oauth handshake url, return one that will
present the user with a login page of their preferred language.
"""
logger.info("Localizing oauth handshake URL.")
redirect_parsed = urllib.parse.urlparse(redirect)
redirect_query = urllib.parse.parse_qs(redirect_parsed.query)
localized_redirect = redirect_parsed.scheme
localized_redirect += "://"
localized_redirect += redirect_parsed.netloc
localized_redirect += redirect_parsed.path
localized_redirect += "?title="
localized_redirect += "Special:UserLogin"
localized_redirect += "&uselang="
localized_redirect += get_language()
localized_redirect += "&returnto="
localized_redirect += str(redirect_query["title"][0])
localized_redirect += "&returntoquery="
localized_redirect += "%26oauth_consumer_key%3D"
localized_redirect += str(redirect_query["oauth_consumer_key"][0])
localized_redirect += "%26oauth_token%3D"
localized_redirect += str(redirect_query["oauth_token"][0])
return localized_redirect
def _get_handshaker():
consumer_token = ConsumerToken(
settings.TWLIGHT_OAUTH_CONSUMER_KEY, settings.TWLIGHT_OAUTH_CONSUMER_SECRET
)
handshaker = Handshaker(settings.TWLIGHT_OAUTH_PROVIDER_URL, consumer_token)
return handshaker
def _dehydrate_token(token):
"""
Convert the request token into a dict suitable for storing in the session.
"""
session_token = {}
session_token["key"] = token.key
session_token["secret"] = token.secret
return session_token
def _rehydrate_token(token):
"""
Convert the stored dict back into a request token that we can use for
getting an access grant.
"""
request_token = ConsumerToken(token["key"], token["secret"])
return request_token
class OAuthBackend(object):
def _get_username(self, identity):
# The Username is globally unique, but Wikipedia allows it to
# have characters that the Django username system rejects. However,
# wiki userID should be unique, and limited to ASCII.
return "{sub}".format(sub=identity["sub"])
def _create_user(self, identity):
# This can't be super informative because we don't want to log
# identities.
logger.info("Creating user.")
# if not self._meets_minimum_requirement(identity):
# This needs to be reworked to actually check against global_userinfo.
# Don't create a User or Editor if this person does not meet the
# minimum account quality requirement. It would be nice to provide
# some user feedback here, but we can't; exception messages don't
# get passed on as template context in Django 1.8. (They do in
# 1.10, so this can be revisited in future.)
# logger.warning('User did not meet minimum requirements; not created.')
# messages.add_message (request, messages.WARNING,
# _('You do not meet the minimum requirements.'))
# raise PermissionDenied
# -------------------------- Create the user ---------------------------
try:
email = identity["email"]
except KeyError:
email = None
username = self._get_username(identity)
# Since we are not providing a password argument, this will call
# set_unusable_password, which is exactly what we want; users created
# via OAuth should only be allowed to log in via OAuth.
user = User.objects.create_user(username=username, email=email)
logger.info("User user successfully created.")
return user
def _create_editor(self, user, identity):
# ------------------------- Create the editor --------------------------
logger.info("Creating editor.")
editor = Editor()
editor.user = user
editor.wp_sub = identity["sub"]
lang = get_language()
editor.update_from_wikipedia(identity, lang) # This call also saves the editor
logger.info("Editor successfully created.")
return editor
def _create_user_and_editor(self, identity):
user = self._create_user(identity)
editor = self._create_editor(user, identity)
return user, editor
def _get_and_update_user_from_identity(self, identity):
"""
If we have an Editor and User matching the identity returned by
Wikipedia, update the editor with the identity parameters and return its
associated user. If we don't, create an Editor and User, and return that
user.
If the wikipedia account does not meet our eligibility criteria, create
a TWLight account if needed, but set it as inactive. Also deactivate
any existing accounts that have become ineligible.
Also return a boolean that is True if we created a user during this
call and False if we did not.
"""
logger.info("Attempting to update editor after OAuth login.")
try:
username = self._get_username(identity)
user = User.objects.get(username=username)
# This login path should only be used for accounts created via
# Wikipedia login, which all have editor objects.
if hasattr(user, "editor"):
editor = user.editor
lang = get_language()
editor.update_from_wikipedia(
identity, lang
) # This call also saves the editor
logger.info("Editor updated.")
created = False
else:
try:
logger.warning(
"A user tried using the Wikipedia OAuth "
"login path but does not have an attached editor."
)
editor = self._create_editor(user, identity)
created = True
except:
raise PermissionDenied
except User.DoesNotExist:
logger.info("Can't find user; creating one.")
user, editor = self._create_user_and_editor(identity)
created = True
return user, created
def authenticate(self, request=None, access_token=None, handshaker=None):
logger.info("Authenticating user...")
if not request or not access_token or not handshaker:
logger.info(
"Missing OAuth authentication elements; falling back"
"to another authentication method."
)
# You must have meant to use a different authentication backend.
# Returning None will make Django keep going down its list of
# options.
return None
try:
assert isinstance(access_token, AccessToken)
except AssertionError as e:
logger.exception(e)
return None
# Get identifying information about the user. This doubles as a way
# to authenticate the access token, which only Wikimedia can do,
# and thereby to authenticate the user (which is hard for us to do as
# we have no password.)
logger.info("Identifying user...")
try:
identity = handshaker.identify(access_token, 15)
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This error message is shown when there's a problem with the authenticated login process.
_("You tried to log in but presented an invalid access token."),
)
raise PermissionDenied
# Get or create the user.
logger.info("User has been identified; getting or creating user.")
user, created = self._get_and_update_user_from_identity(identity)
if created:
try:
user.editor.save()
except AssertionError:
# This was used to handle users not setting a home wiki
# but that information is no longer collected
pass
else:
logger.info("User has been updated.")
request.session["user_created"] = created
# The authenticate() function of a Django auth backend must return
# the user.
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist as e:
logger.exception(e)
return None
class OAuthInitializeView(View):
"""
Ask Wikipedia for a temporary key/secret for the user, and redirect
them to their home Wikipedia to confirm authorization.
"""
def get(self, request, *args, **kwargs):
# The site might be running under multiple URLs, so find out the current
# one (and make sure it's legit).
# The Sites framework was designed for different URLs that correspond to
# different databases or functionality - it's not a good fit here.
domain = self.request.get_host()
try:
assert domain in settings.ALLOWED_HOSTS # safety first!
except (AssertionError, DisallowedHost) as e:
logger.exception(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails because the request came from the wrong website. Don't translate {domain}.
_("{domain} is not an allowed host.").format(domain=domain),
)
raise PermissionDenied
# Try to capture the relevant page state, including desired destination
try:
request.session["get"] = request.GET
logger.info("Found get parameters for post-login redirection.")
except Exception as e:
logger.warning(e)
pass
# If the user has already logged in, let's not spam the OAuth provider.
if self.request.user.is_authenticated:
# We're using this twice. Not very DRY.
# Send user either to the destination specified in the 'next'
# parameter or to their own editor detail page.
try:
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(urlencode(request.session["get"]), mutable=True)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
# We don't need it here.
query_dict.pop("from_homepage", None)
# If there is anything left in the QueryDict after popping
# 'next', append it to the return url. This preserves state
# for filtered lists and redirected form submissions like
# the partner suggestion form.
if query_dict:
return_url += "&" + urlencode(query_dict)
logger.info(
"User is already authenticated. Sending them on "
'for post-login redirection per "next" parameter.'
)
except KeyError as e:
return_url = reverse_lazy("homepage")
logger.warning(e)
return HttpResponseRedirect(return_url)
# If the user isn't logged in
else:
# Get handshaker for the configured wiki oauth URL.
handshaker = _get_handshaker()
logger.info("handshaker gotten.")
try:
redirect, request_token = handshaker.initiate()
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This warning message is shown to users when OAuth handshaker can't be initiated.
_("Handshaker not initiated, please try logging in again."),
)
raise PermissionDenied
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(urlencode(request.session["get"]), mutable=True)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
from_homepage = query_dict.pop("from_homepage", None)
if from_homepage:
logger.info("Logging in from homepage, redirecting to Meta login")
local_redirect = _localize_oauth_redirect(redirect)
else:
logger.info(
"Trying to access a link while not logged in, redirecting to homepage"
)
messages.add_message(
request,
messages.INFO,
# fmt: off
# Translators: this message is displayed to users that don't have accounts and clicked on a proxied link.
_("To view this link you need to be an eligible library user. Please login to continue."),
# fmt: on
)
if return_url:
homepage = reverse_lazy("homepage")
local_redirect = "{homepage}?next_url={return_url}".format(
homepage=homepage, return_url=return_url
)
else:
local_redirect = reverse_lazy("homepage")
logger.info("handshaker initiated.")
self.request.session["request_token"] = _dehydrate_token(request_token)
return HttpResponseRedirect(local_redirect)
class OAuthCallbackView(View):
"""
Receive the redirect from Wikipedia and parse the response token.
"""
def get(self, request, *args, **kwargs):
request_meta_qs = request.META["QUERY_STRING"]
request_get = request.GET
response_qs = None
if request_meta_qs:
response_qs = request_meta_qs
elif "oauth_token" in request_get and "oauth_verifier" in request_get:
response_qs = request_get.urlencode()
try:
response_qs_parsed = urllib.parse.parse_qs(response_qs)
assert "oauth_token" in response_qs_parsed
assert "oauth_verifier" in response_qs_parsed
except (AssertionError, TypeError) as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This warning message is shown to users when the response received from Wikimedia OAuth servers is not a valid one.
_("Did not receive a valid oauth response."),
)
raise PermissionDenied
# Get the handshaker. It should have already been constructed by
# OAuthInitializeView.
domain = self.request.get_host()
try:
assert domain in settings.ALLOWED_HOSTS
except (AssertionError, DisallowedHost) as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails because the request came from the wrong website. Don't translate {domain}.
_("{domain} is not an allowed host.").format(domain=domain),
)
raise PermissionDenied
try:
handshaker = _get_handshaker()
except AssertionError as e:
# get_handshaker will throw AssertionErrors for invalid data.
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("Could not find handshaker."),
)
raise PermissionDenied
# Get the session token placed by OAuthInitializeView.
session_token = request.session.pop("request_token", None)
if not session_token:
logger.info("No session token.")
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("No session token."),
)
raise PermissionDenied
# Rehydrate it into a request token.
request_token = _rehydrate_token(session_token)
if not request_token:
logger.warning("No request token.")
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("No request token."),
)
raise PermissionDenied
# See if we can complete the OAuth process.
try:
access_token = handshaker.complete(request_token, response_qs)
except OAuthException as e:
logger.warning(e)
messages.add_message(
request,
messages.WARNING,
# Translators: This message is shown when the OAuth login process fails.
_("Access token generation failed."),
)
raise PermissionDenied
user = authenticate(
request=request, access_token=access_token, handshaker=handshaker
)
created = request.session.pop("user_created", False)
if user and not user.is_active:
# Do NOT log in the user.
if created:
messages.add_message(
request,
messages.WARNING,
# fmt: off
# Translators: If the user tries to log in, but their account does not meet certain requirements, they cannot login.
_("Your Wikipedia account does not meet the eligibility criteria in the terms of use, so your Wikipedia Library Card Platform account cannot be activated."),
# fmt: on
)
else:
messages.add_message(
request,
messages.WARNING,
# fmt: off
# Translators: If the user tries to log in, but their account does not meet certain requirements, they cannot login.
_("Your Wikipedia account no longer meets the eligibility criteria in the terms of use, so you cannot be logged in. If you think you should be able to log in, please email [email protected]."),
# fmt: on
)
return_url = reverse_lazy("terms")
elif user:
login(request, user)
if created:
messages.add_message(
request,
messages.INFO,
# Translators: this message is displayed to users with brand new accounts.
_("Welcome! Please agree to the terms of use."),
)
return_url = reverse_lazy("terms")
else:
# We're using this twice. Not very DRY.
# Send user either to the destination specified in the 'next'
# parameter or to their own editor detail page.
if user.userprofile.terms_of_use:
try:
# Create a QueryDict from the 'get' session dict.
query_dict = QueryDict(
urlencode(request.session["get"]), mutable=True
)
# Pop the 'next' parameter out of the QueryDict.
next = query_dict.pop("next")
# Set the return url to the value of 'next'. Basic.
return_url = next[0]
# Pop the 'from_homepage' parameter out of the QueryDict.
# We don't need it here.
query_dict.pop("from_homepage", None)
# If there is anything left in the QueryDict after popping
# 'next', append it to the return url. This preserves state
# for filtered lists and redirected form submissions like
# the partner suggestion form.
if query_dict:
return_url += "&" + urlencode(query_dict)
logger.info(
"User authenticated. Sending them on for "
'post-login redirection per "next" parameter.'
)
except KeyError as e:
return_url = reverse_lazy("homepage")
logger.warning(e)
else:
return_url = reverse_lazy("terms")
else:
return_url = reverse_lazy("homepage")
return HttpResponseRedirect(return_url)
| WikipediaLibrary/TWLight | TWLight/users/oauth.py | Python | mit | 22,204 |
#!/usr/bin/env python
"""
@author: Tobias
"""
"""@brief List of register classes"""
_registerClasses = [
['al', 'ah', 'ax', 'eax', 'rax'],
['bl', 'bh', 'bx', 'ebx', 'rbx'],
['cl', 'ch', 'cx', 'ecx', 'rcx'],
['dl', 'dh', 'dx', 'edx', 'rdx'],
['bpl', 'bp', 'ebp', 'rbp'],
['dil', 'di', 'edi', 'rdi'],
['sil', 'si', 'esi', 'rsi'],
['spl', 'sp', 'esp', 'rsp'],
['r8l', 'r8w', 'r8d', 'r8'],
['r9l', 'r9w', 'r9d', 'r9'],
['r10l', 'r10w', 'r10d', 'r10'],
['r11l', 'r11w', 'r11d', 'r11'],
['r12l', 'r12w', 'r12d', 'r12'],
['r13l', 'r13w', 'r13d', 'r13'],
['r14l', 'r14w', 'r14d', 'r14'],
['r15l', 'r15w', 'r15d', 'r15']
]
def get_reg_class(reg):
"""
@brief Determines the register class of a given reg.
All different register names that address the same register
belong to the same register class e.g.: 'ax' and 'eax'
@param reg name of register
@return register class
"""
lreg = reg.lower()
ret_value = None
for pos, reg_list in enumerate(_registerClasses):
for reg in reg_list:
found = False
if reg == lreg:
found = True
ret_value = pos
break
if found:
break
return ret_value
def get_reg_by_size(reg_class, reg_size):
"""
@brief Determines the register by its size and class
@param reg_class The register class of the register
@param reg_size The size of the register
@return Name of the register
"""
if reg_class >= len(_registerClasses):
return None
num_regs = len(_registerClasses[reg_class])
if num_regs < 4:
return None
reg_index = -1
if reg_size > 32: # 64-bit regs
reg_index = num_regs - 1
elif reg_size > 16: # 32-bit regs
reg_index = num_regs - 2
elif reg_size > 8: # 16-bit regs
reg_index = num_regs - 3
elif reg_size > 0: # 8-bit regs
reg_index = 0
else:
return None
return _registerClasses[reg_class][reg_index]
def get_size_by_reg(reg):
"""
@brief Determines the size of the given register
@param reg Register
@return Size of register
"""
reg_class = get_reg_class(reg)
num_regs = len(_registerClasses[reg_class])
for index, test_reg in enumerate(_registerClasses[reg_class]):
if test_reg == reg:
break
else: # no break
return None
if index == (num_regs-1):
return 64
elif index == (num_regs-2):
return 32
elif index == (num_regs-3):
return 16
else:
return 8
def get_reg_class_lst(reg_class):
"""
@return Returns the whole list of a given register class
"""
return _registerClasses[reg_class]
| anatolikalysch/VMAttack | lib/Register.py | Python | mit | 2,772 |
"""Remove brief status column
Revision ID: 590
Revises: 580
Create Date: 2016-03-03 14:56:59.218753
"""
# revision identifiers, used by Alembic.
revision = '590'
down_revision = '580'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('briefs', 'status')
def downgrade():
op.add_column('briefs', sa.Column('status', sa.VARCHAR(), autoincrement=False, nullable=True))
op.execute("""
UPDATE briefs SET status = (CASE WHEN published_at is not NULL THEN 'live' ELSE 'draft' END)
""")
op.alter_column('briefs', sa.Column('status', sa.VARCHAR(), nullable=False))
| alphagov/digitalmarketplace-api | migrations/versions/590_remove_brief_status_column.py | Python | mit | 619 |
class Penguin(object):
def __init__(self, name, mood, id=None):
self.name = name
self.mood = mood
self.id = id
def __repr__(self):
return '< %s the %s penguin >' % (self.name, self.mood)
class Goose(object):
def __init__(self, name, favorite_penguin, id=None):
self.name = name
self.favorite_penguin = favorite_penguin
self.id = id
def __repr__(self):
template = '< %s, the goose that likes %s >'
return template % (self.name, repr(self.favorite_penguin))
| natb1/query-tools | tests/examples/fixture_model.py | Python | mit | 554 |
from unittest import TestCase
from firstinbattle.deck import Card
from firstinbattle.json_util import js
class TestJson(TestCase):
def test_encode_loads(self):
cards = {
Card(5, 'diamond'),
Card(9, 'heart'),
}
encoded_str = js.encode({
'message': 'test_msg',
'cards': cards,
})
decoded_obj = js.loads(encoded_str)
self.assertEqual(decoded_obj['message'], 'test_msg')
for card in cards:
self.assertIn(
{'number': card.number, 'suit': card.suit},
decoded_obj['cards']
)
| mpharrigan/firstinbattle | firstinbattle/tests/test_json.py | Python | mit | 636 |
from collections import OrderedDict
n = int(input())
occurrences = OrderedDict()
for _ in range(0, n):
word = input().strip()
occurrences[word] = occurrences.get(word, 0) + 1
print(len(occurrences))
print(sep=' ', *[count for _, count in occurrences.items()])
| alexander-matsievsky/HackerRank | All_Domains/Python/Collections/word-order.py | Python | mit | 269 |
from distutils.core import setup
setup(
name = 'ical_dict',
packages = ['ical_dict'],
version = '0.2',
description = 'A Python library to convert an .ics file into a Dictionary object.',
author = 'Jay Ravaliya',
author_email = '[email protected]',
url = 'https://github.com/jayrav13/ical_dict',
download_url = 'https://github.com/jayrav13/ical_dict/tarball/0.2',
keywords = ['calendar', 'ical', 'ics', 'json', 'dictionary', 'python'],
classifiers = [],
)
| jayrav13/ical_dict | setup.py | Python | mit | 497 |
import os, pygame
#create window of correct size (320x200, with some multiple)
x = 320
y = 200
size_mult = 4
bright_mult = 4
pygame.init()
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
os.environ['SDL_VIDEO_WINDOW_POS'] = str(0) + "," + str(40) #put window in consistent location
screen = pygame.display.set_mode((x*size_mult, y*size_mult))
screen2 = pygame.Surface((x,y)) | delMar43/wcmodtoolsources | WC1_clone/room_engine/win_init.py | Python | mit | 424 |
from twisted.internet import defer
from nodeset.common import log
from nodeset.core import config
class Observer(object):
def __init__(self, callable, *args, **kwargs):
self.callable = callable
self.args = args
self.kwargs = kwargs
#print "-- %s, %s" % (self.args, self.kwargs)
self.assertfunc = lambda x: True
def setAssert(self, assertfunc):
self.assertfunc = assertfunc
def run(self, *args, **kwargs):
a = tuple(list(args) + list(self.args))
kw = dict(kwargs.items() + self.kwargs.items())
return self.callable(*a, **kw)
class ObserverCarousel(object):
def twist(self, observers, eventDict):
defers = []
if config.Configurator['verbose']:
log.msg("twist carousel %s, %s" % (observers, eventDict))
for i in observers:
defers.append(defer.maybeDeferred(i.run, eventDict))
return defers
| selfsk/nodeset.core | src/nodeset/core/observer.py | Python | mit | 1,039 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LocalNetworkGatewaysOperations(object):
"""LocalNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.LocalNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'LocalNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.LocalNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.LocalNetworkGateway"]
"""Creates or updates a local network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:param parameters: Parameters supplied to the create or update local network gateway operation.
:type parameters: ~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either LocalNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
"""Gets the specified local network gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LocalNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified local network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.LocalNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.LocalNetworkGateway"]
"""Updates a local network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:param parameters: Parameters supplied to update local network gateway tags.
:type parameters: ~azure.mgmt.network.v2018_01_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either LocalNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LocalNetworkGatewayListResult"]
"""Gets all the local network gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LocalNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_01_01.models.LocalNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LocalNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_01_01/operations/_local_network_gateways_operations.py | Python | mit | 27,633 |
# check_full_toc.py - Unit tests for SWIG-based libcueify full TOC APIs
#
# Copyright (c) 2011 Ian Jacobi <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# KLUDGE to allow tests to work.
import sys
sys.path.insert(0, '../../build/swig/python')
import cueify
import struct
import unittest
# Create a binary track descriptor from a full TOC.
def TRACK_DESCRIPTOR(session, adr, ctrl, track,
abs_min, abs_sec, abs_frm, min, sec, frm):
return [session, (((adr & 0xF) << 4) | (ctrl & 0xF)), 0, track,
abs_min, abs_sec, abs_frm, 0, min, sec, frm]
serialized_mock_full_toc = [(((13 + 2 * 3) * 11 + 2) >> 8),
(((13 + 2 * 3) * 11 + 2) & 0xFF), 1, 2]
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA0, 0, 0, 0, 1, cueify.SESSION_MODE_1, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA1, 0, 0, 0, 12, 0, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 0xA2, 0, 0, 0, 51, 44, 26))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 1, 0, 0, 0, 0, 2, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 2, 0, 0, 0, 4, 47, 70))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 3, 0, 0, 0, 7, 42, 57))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 4, 0, 0, 0, 13, 47, 28))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 5, 0, 0, 0, 18, 28, 50))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 6, 0, 0, 0, 21, 56, 70))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 7, 0, 0, 0, 24, 56, 74))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 8, 0, 0, 0, 30, 10, 55))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 9, 0, 0, 0, 34, 17, 20))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 10, 0, 0, 0, 39, 18, 66))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 11, 0, 0, 0, 43, 16, 40))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(1, 1, 4, 12, 0, 0, 0, 47, 27, 61))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA0, 0, 0, 0, 13, cueify.SESSION_MODE_2, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA1, 0, 0, 0, 13, 0, 0))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 0xA2, 0, 0, 0, 57, 35, 13))
serialized_mock_full_toc.extend(
TRACK_DESCRIPTOR(2, 1, 6, 13, 1, 2, 3, 54, 16, 26))
class TestFullTOCFunctions(unittest.TestCase):
def test_serialization(self):
# Test both deserialization and serialization (since, unlike
# in the C code, the Python library does not support directly
# specifying the mock TOC.
full_toc = cueify.FullTOC()
self.assertTrue(
full_toc.deserialize(
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc)))
s = full_toc.serialize()
self.assertEqual(full_toc.errorCode, cueify.OK)
self.assertEqual(len(s), len(serialized_mock_full_toc))
self.assertEqual(
s,
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc))
def test_getters(self):
full_toc = cueify.FullTOC()
self.assertTrue(
full_toc.deserialize(
struct.pack(
"B" * len(serialized_mock_full_toc),
*serialized_mock_full_toc)))
self.assertEqual(full_toc.firstSession, 1)
self.assertEqual(full_toc.lastSession, 2)
self.assertEqual(len(full_toc.tracks), 13)
self.assertEqual(full_toc.tracks[0].session, 1)
self.assertEqual(full_toc.tracks[12].session, 2)
self.assertEqual(full_toc.tracks[0].controlFlags, 4)
self.assertEqual(full_toc.tracks[12].controlFlags, 6)
self.assertEqual(full_toc.tracks[0].subQChannelFormat, 1)
self.assertEqual(full_toc.tracks[12].subQChannelFormat, 1)
self.assertEqual(len(full_toc.sessions), 2)
self.assertEqual(len(full_toc.sessions[0].pseudotracks), 3)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_FIRST_TRACK_PSEUDOTRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LAST_TRACK_PSEUDOTRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].controlFlags, 4)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].controlFlags, 6)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_FIRST_TRACK_PSEUDOTRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LAST_TRACK_PSEUDOTRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[0].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].subQChannelFormat, 1)
self.assertEqual(full_toc.tracks[0].pointAddress.min, 0)
self.assertEqual(full_toc.tracks[0].pointAddress.sec, 0)
self.assertEqual(full_toc.tracks[0].pointAddress.frm, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.min, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.sec, 0)
self.assertEqual(full_toc.sessions[1].pseudotracks[cueify.FULL_TOC_LEAD_OUT_TRACK].pointAddress.frm, 0)
self.assertEqual(full_toc.tracks[12].pointAddress.min, 1)
self.assertEqual(full_toc.tracks[12].pointAddress.sec, 2)
self.assertEqual(full_toc.tracks[12].pointAddress.frm, 3)
self.assertEqual(full_toc.tracks[0].address.min, 0)
self.assertEqual(full_toc.tracks[0].address.sec, 2)
self.assertEqual(full_toc.tracks[0].address.frm, 0)
self.assertEqual(full_toc.tracks[12].address.min, 54)
self.assertEqual(full_toc.tracks[12].address.sec, 16)
self.assertEqual(full_toc.tracks[12].address.frm, 26)
self.assertEqual(full_toc.sessions[0].firstTrack, 1)
self.assertEqual(full_toc.sessions[1].firstTrack, 13)
self.assertEqual(full_toc.sessions[0].lastTrack, 12)
self.assertEqual(full_toc.sessions[1].lastTrack, 13)
self.assertEqual(full_toc.firstTrack, 1)
self.assertEqual(full_toc.lastTrack, 13)
self.assertEqual(full_toc.sessions[0].type, cueify.SESSION_MODE_1)
self.assertEqual(full_toc.sessions[1].type, cueify.SESSION_MODE_2)
self.assertEqual(full_toc.sessions[1].leadoutAddress.min, 57)
self.assertEqual(full_toc.sessions[1].leadoutAddress.sec, 35)
self.assertEqual(full_toc.sessions[1].leadoutAddress.frm, 13)
self.assertEqual(full_toc.discLength.min, 57)
self.assertEqual(full_toc.discLength.sec, 35)
self.assertEqual(full_toc.discLength.frm, 13)
self.assertEqual(full_toc.tracks[11].length.min, 4)
self.assertEqual(full_toc.tracks[11].length.sec, 16)
self.assertEqual(full_toc.tracks[11].length.frm, 40)
self.assertEqual(full_toc.sessions[1].length.min, 3)
self.assertEqual(full_toc.sessions[1].length.sec, 18)
self.assertEqual(full_toc.sessions[1].length.frm, 62)
if __name__ == '__main__':
unittest.main()
| pipian/libcueify | tests/swig/check_full_toc.py | Python | mit | 8,463 |
from __future__ import absolute_import, unicode_literals
from qproject.celery import app as celery_app
__all__ = ['celery_app']
| KirovVerst/qproject | qproject/__init__.py | Python | mit | 130 |
# range_ex.py Test of asynchronous mqtt client with clean session False.
# Extended version publishes SSID
# (C) Copyright Peter Hinch 2017-2019.
# Released under the MIT licence.
# Public brokers https://github.com/mqtt/mqtt.github.io/wiki/public_brokers
# This demo is for wireless range tests. If OOR the red LED will light.
# In range the blue LED will pulse for each received message.
# Uses clean sessions to avoid backlog when OOR.
# red LED: ON == WiFi fail
# blue LED pulse == message received
# Publishes connection statistics.
from mqtt_as import MQTTClient, config
from config import wifi_led, blue_led
import uasyncio as asyncio
import network
import gc
TOPIC = 'shed' # For demo publication and last will use same topic
outages = 0
rssi = -199 # Effectively zero signal in dB.
async def pulse(): # This demo pulses blue LED each time a subscribed msg arrives.
blue_led(True)
await asyncio.sleep(1)
blue_led(False)
def sub_cb(topic, msg, retained):
print((topic, msg))
asyncio.create_task(pulse())
# The only way to measure RSSI is via scan(). Alas scan() blocks so the code
# causes the obvious uasyncio issues.
async def get_rssi():
global rssi
s = network.WLAN()
ssid = config['ssid'].encode('UTF8')
while True:
try:
rssi = [x[3] for x in s.scan() if x[0] == ssid][0]
except IndexError: # ssid not found.
rssi = -199
await asyncio.sleep(30)
async def wifi_han(state):
global outages
wifi_led(not state) # Light LED when WiFi down
if state:
print('We are connected to broker.')
else:
outages += 1
print('WiFi or broker is down.')
await asyncio.sleep(1)
async def conn_han(client):
await client.subscribe('foo_topic', 1)
async def main(client):
try:
await client.connect()
except OSError:
print('Connection failed.')
return
n = 0
s = '{} repubs: {} outages: {} rssi: {}dB free: {}bytes'
while True:
await asyncio.sleep(5)
gc.collect()
m = gc.mem_free()
print('publish', n)
# If WiFi is down the following will pause for the duration.
await client.publish(TOPIC, s.format(n, client.REPUB_COUNT, outages, rssi, m), qos = 1)
n += 1
# Define configuration
config['subs_cb'] = sub_cb
config['wifi_coro'] = wifi_han
config['will'] = (TOPIC, 'Goodbye cruel world!', False, 0)
config['connect_coro'] = conn_han
config['keepalive'] = 120
# Set up client. Enable optional debug statements.
MQTTClient.DEBUG = True
client = MQTTClient(config)
asyncio.create_task(get_rssi())
try:
asyncio.run(main(client))
finally: # Prevent LmacRxBlk:1 errors.
client.close()
blue_led(True)
asyncio.new_event_loop()
| peterhinch/micropython-mqtt | mqtt_as/range_ex.py | Python | mit | 2,773 |
import subprocess
import os
def start_service():
subprocess.Popen("ipy start_srv.py", stdout=subprocess.PIPE)
return 0
def close_service():
os.system("taskkill /im ipy.exe /f")
| QuentinJi/pyuiautomation | initial_work.py | Python | mit | 193 |
from openslides.core.config import config
from openslides.motions.exceptions import WorkflowError
from openslides.motions.models import Motion, State, Workflow
from openslides.users.models import User
from openslides.utils.test import TestCase
class ModelTest(TestCase):
def setUp(self):
self.motion = Motion.objects.create(title='v1')
self.test_user = User.objects.create(username='blub')
# Use the simple workflow
self.workflow = Workflow.objects.get(pk=1)
def test_create_new_version(self):
motion = self.motion
self.assertEqual(motion.versions.count(), 1)
# new data, but no new version
motion.title = 'new title'
motion.save()
self.assertEqual(motion.versions.count(), 1)
# new data and new version
motion.text = 'new text'
motion.save(use_version=motion.get_new_version())
self.assertEqual(motion.versions.count(), 2)
self.assertEqual(motion.title, 'new title')
self.assertEqual(motion.text, 'new text')
def test_version_data(self):
motion = Motion()
self.assertEqual(motion.title, '')
with self.assertRaises(AttributeError):
self._title
motion.title = 'title'
self.assertEqual(motion._title, 'title')
motion.text = 'text'
self.assertEqual(motion._text, 'text')
motion.reason = 'reason'
self.assertEqual(motion._reason, 'reason')
def test_version(self):
motion = self.motion
motion.title = 'v2'
motion.save(use_version=motion.get_new_version())
motion.title = 'v3'
motion.save(use_version=motion.get_new_version())
with self.assertRaises(AttributeError):
self._title
self.assertEqual(motion.title, 'v3')
def test_supporter(self):
self.assertFalse(self.motion.is_supporter(self.test_user))
self.motion.supporters.add(self.test_user)
self.assertTrue(self.motion.is_supporter(self.test_user))
self.motion.supporters.remove(self.test_user)
self.assertFalse(self.motion.is_supporter(self.test_user))
def test_state(self):
self.motion.reset_state()
self.assertEqual(self.motion.state.name, 'submitted')
self.motion.state = State.objects.get(pk=5)
self.assertEqual(self.motion.state.name, 'published')
with self.assertRaises(WorkflowError):
self.motion.create_poll()
self.motion.state = State.objects.get(pk=6)
self.assertEqual(self.motion.state.name, 'permitted')
self.assertEqual(self.motion.state.get_action_word(), 'Permit')
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['support'])
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['unsupport'])
def test_new_states_or_workflows(self):
workflow_1 = Workflow.objects.create(name='W1')
state_1 = State.objects.create(name='S1', workflow=workflow_1)
workflow_1.first_state = state_1
workflow_1.save()
workflow_2 = Workflow.objects.create(name='W2')
state_2 = State.objects.create(name='S2', workflow=workflow_2)
workflow_2.first_state = state_2
workflow_2.save()
state_3 = State.objects.create(name='S3', workflow=workflow_1)
with self.assertRaises(WorkflowError):
workflow_2.first_state = state_3
workflow_2.save()
with self.assertRaises(WorkflowError):
state_1.next_states.add(state_2)
state_1.save()
def test_two_empty_identifiers(self):
Motion.objects.create(title='foo', text='bar', identifier='')
Motion.objects.create(title='foo2', text='bar2', identifier='')
def test_do_not_create_new_version_when_permit_old_version(self):
motion = Motion()
motion.title = 'foo'
motion.text = 'bar'
motion.save()
first_version = motion.get_last_version()
motion = Motion.objects.get(pk=motion.pk)
motion.title = 'New Title'
motion.save(use_version=motion.get_new_version())
new_version = motion.get_last_version()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = new_version
motion.save()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = first_version
motion.save(use_version=False)
self.assertEqual(motion.versions.count(), 2)
def test_unicode_with_no_active_version(self):
motion = Motion.objects.create(
title='test_title_Koowoh1ISheemeey1air',
text='test_text_zieFohph0doChi1Uiyoh',
identifier='test_identifier_VohT1hu9uhiSh6ooVBFS')
motion.active_version = None
motion.save(update_fields=['active_version'])
# motion.__unicode__() raised an AttributeError
self.assertEqual(str(motion), 'test_title_Koowoh1ISheemeey1air')
def test_is_amendment(self):
config['motions_amendments_enabled'] = True
amendment = Motion.objects.create(title='amendment', parent=self.motion)
self.assertTrue(amendment.is_amendment())
self.assertFalse(self.motion.is_amendment())
def test_set_identifier_allready_set(self):
"""
If the motion already has a identifier, the method does nothing.
"""
motion = Motion(identifier='My test identifier')
motion.set_identifier()
self.assertEqual(motion.identifier, 'My test identifier')
def test_set_identifier_manually(self):
"""
If the config is set to manually, the method does nothing.
"""
config['motions_identifier'] = 'manually'
motion = Motion()
motion.set_identifier()
# If the identifier should be set manually, the method does nothing
self.assertIsNone(motion.identifier)
def test_set_identifier_amendment(self):
"""
If the motion is an amendment, the identifier is the identifier from the
parent + a suffix.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 1')
def test_set_identifier_second_amendment(self):
"""
If a motion has already an amendment, the second motion gets another
identifier.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
Motion.objects.create(title='Amendment1', parent=self.motion)
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 2')
class ConfigTest(TestCase):
def test_stop_submitting(self):
self.assertFalse(config['motions_stop_submitting'])
| rolandgeider/OpenSlides | tests/old/motions/test_models.py | Python | mit | 7,023 |
>>> myTuple = (1, 2, 3)
>>> myTuple[1]
2
>>> myTuple[1:3]
(2, 3)
| schmit/intro-python-course | lectures/code/tuples_basics.py | Python | mit | 65 |
# -*- coding: utf-8 -*-
"""
eve.methods.post
~~~~~~~~~~~~~~~~
This module imlements the POST method, supported by the resources
endopints.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app as app, abort
from eve.utils import config, parse_request, debug_error_message
from eve.auth import requires_auth
from eve.defaults import resolve_default_values
from eve.validation import ValidationError
from eve.methods.common import parse, payload, ratelimit, \
pre_event, store_media_files, resolve_user_restricted_access, \
resolve_embedded_fields, build_response_document, marshal_write_response, \
resolve_sub_resource_path, resolve_document_etag, oplog_push
from eve.versioning import resolve_document_version, \
insert_versioning_documents
@ratelimit()
@requires_auth('resource')
@pre_event
def post(resource, payl=None):
"""
Default function for handling POST requests, it has decorators for
rate limiting, authentication and for raising pre-request events. After the
decorators are applied forwards to call to :func:`post_internal`
.. versionchanged:: 0.5
Split original post() into post/post_internal combo.
"""
return post_internal(resource, payl, skip_validation=False)
def post_internal(resource, payl=None, skip_validation=False):
"""
Intended for internal post calls, this method is not rate limited,
authentication is not checked and pre-request events are not raised.
Adds one or more documents to a resource. Each document is validated
against the domain schema. If validation passes the document is inserted
and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the
document are returned. If validation fails, a list of validation issues
is returned.
:param resource: name of the resource involved.
:param payl: alternative payload. When calling post() from your own code
you can provide an alternative payload. This can be useful,
for example, when you have a callback function hooked to a
certain endpoint, and want to perform additional post() calls
from there.
Please be advised that in order to successfully use this
option, a request context must be available.
See https://github.com/nicolaiarocci/eve/issues/74 for a
discussion, and a typical use case.
:param skip_validation: skip payload validation before write (bool)
.. versionchanged:: 0.6
Fix: since v0.6, skip_validation = True causes a 422 response (#726).
.. versionchanged:: 0.6
Initialize DELETED field when soft_delete is enabled.
.. versionchanged:: 0.5
Back to resolving default values after validaton as now the validator
can properly validate dependency even when some have default values. See
#353.
Push updates to the OpLog.
Original post() has been split into post() and post_internal().
ETAGS are now stored with documents (#369).
.. versionchanged:: 0.4
Resolve default values before validation is performed. See #353.
Support for document versioning.
.. versionchanged:: 0.3
Return 201 if at least one document has been successfully inserted.
Fix #231 auth field not set if resource level authentication is set.
Support for media fields.
When IF_MATCH is disabled, no etag is included in the payload.
Support for new validation format introduced with Cerberus v0.5.
.. versionchanged:: 0.2
Use the new STATUS setting.
Use the new ISSUES setting.
Raise 'on_pre_<method>' event.
Explictly resolve default values instead of letting them be resolved
by common.parse. This avoids a validation error when a read-only field
also has a default value.
Added ``on_inserted*`` events after the database insert
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
More robust handling of auth_field.
Support for optional HATEOAS.
.. versionchanged: 0.0.9
Event hooks renamed to be more robuts and consistent: 'on_posting'
renamed to 'on_insert'.
You can now pass a pre-defined custom payload to the funcion.
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
.. versionchanged: 0.0.7
Support for Rate-Limiting.
Support for 'extra_response_fields'.
'on_posting' and 'on_posting_<resource>' events are raised before the
documents are inserted into the database. This allows callback functions
to arbitrarily edit/update the documents being stored.
.. versionchanged:: 0.0.6
Support for bulk inserts.
Please note: validation constraints are checked against the database,
and not between the payload documents themselves. This causes an
interesting corner case: in the event of a multiple documents payload
where two or more documents carry the same value for a field where the
'unique' constraint is set, the payload will validate successfully, as
there are no duplicates in the database (yet). If this is an issue, the
client can always send the documents once at a time for insertion, or
validate locally before submitting the payload to the API.
.. versionchanged:: 0.0.5
Support for 'application/json' Content-Type .
Support for 'user-restricted resource access'.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionchanged:: 0.0.3
JSON links. Superflous ``response`` container removed.
"""
date_utc = datetime.utcnow().replace(microsecond=0)
resource_def = app.config['DOMAIN'][resource]
schema = resource_def['schema']
validator = None if skip_validation else app.validator(schema, resource)
documents = []
results = []
failures = 0
if config.BANDWIDTH_SAVER is True:
embedded_fields = []
else:
req = parse_request(resource)
embedded_fields = resolve_embedded_fields(resource, req)
# validation, and additional fields
if payl is None:
payl = payload()
# print "\n\ninside eve post\n\n***************************************"
# print embedded_fields
# print "payl "
# print payl
'''
Added by : LHearen
E-mail : [email protected]
Description: Used to construct our own RESTful interfaces - but the extra
items should not be stored in DB;
'''
if "_id" in payl:
payl["_id"] = '27167fe7-fc9d-47d5-9cd0-717106ef67be'
if "Module" in payl:
del payl["Module"]
if "Method" in payl:
del payl["Method"]
# print "payl "
# print payl
# print "resource "
# print resource
# print "\n\nend here"
if isinstance(payl, dict):
payl = [payl]
if not payl:
# empty bulkd insert
abort(400, description=debug_error_message(
'Empty bulk insert'
))
if len(payl) > 1 and not config.DOMAIN[resource]['bulk_enabled']:
abort(400, description=debug_error_message(
'Bulk insert not allowed'
))
for value in payl:
document = []
doc_issues = {}
try:
document = parse(value, resource)
resolve_sub_resource_path(document, resource)
if skip_validation:
validation = True
else:
validation = validator.validate(document)
if validation: # validation is successful
# validator might be not available if skip_validation. #726.
if validator:
# Apply coerced values
document = validator.document
# Populate meta and default fields
document[config.LAST_UPDATED] = \
document[config.DATE_CREATED] = date_utc
if config.DOMAIN[resource]['soft_delete'] is True:
document[config.DELETED] = False
resolve_user_restricted_access(document, resource)
resolve_default_values(document, resource_def['defaults'])
store_media_files(document, resource)
resolve_document_version(document, resource, 'POST')
else:
# validation errors added to list of document issues
doc_issues = validator.errors
except ValidationError as e:
doc_issues['validation exception'] = str(e)
except Exception as e:
# most likely a problem with the incoming payload, report back to
# the client as if it was a validation issue
app.logger.exception(e)
doc_issues['exception'] = str(e)
if len(doc_issues):
document = {
config.STATUS: config.STATUS_ERR,
config.ISSUES: doc_issues,
}
failures += 1
documents.append(document)
if failures:
# If at least one document got issues, the whole request fails and a
# ``422 Bad Request`` status is return.
for document in documents:
if config.STATUS in document \
and document[config.STATUS] == config.STATUS_ERR:
results.append(document)
else:
results.append({config.STATUS: config.STATUS_OK})
return_code = config.VALIDATION_ERROR_STATUS
else:
# notify callbacks
getattr(app, "on_insert")(resource, documents)
getattr(app, "on_insert_%s" % resource)(documents)
# compute etags here as documents might have been updated by callbacks.
resolve_document_etag(documents, resource)
# bulk insert
ids = app.data.insert(resource, documents)
# update oplog if needed
oplog_push(resource, documents, 'POST')
# assign document ids
for document in documents:
# either return the custom ID_FIELD or the id returned by
# data.insert().
document[resource_def['id_field']] = \
document.get(resource_def['id_field'], ids.pop(0))
# build the full response document
result = document
build_response_document(
result, resource, embedded_fields, document)
# add extra write meta data
result[config.STATUS] = config.STATUS_OK
# limit what actually gets sent to minimize bandwidth usage
result = marshal_write_response(result, resource)
results.append(result)
# insert versioning docs
insert_versioning_documents(resource, documents)
# notify callbacks
getattr(app, "on_inserted")(resource, documents)
getattr(app, "on_inserted_%s" % resource)(documents)
# request was received and accepted; at least one document passed
# validation and was accepted for insertion.
return_code = 201
if len(results) == 1:
response = results.pop(0)
else:
response = {
config.STATUS: config.STATUS_ERR if failures else config.STATUS_OK,
config.ITEMS: results,
}
if failures:
response[config.ERROR] = {
"code": return_code,
"message": "Insertion failure: %d document(s) contain(s) error(s)"
% failures,
}
print "now we're inside post.py, before customizing response"
print response
for key in response.keys():
if key != "_id":
del response[key]
print 'final response'
print response
return response, None, None, return_code
| Hearen/OnceServer | Server/Eve/post.py | Python | mit | 12,002 |
# coding: utf-8
from __future__ import unicode_literals
# created by: Han Feng (https://github.com/hanx11)
import collections
import hashlib
import logging
import requests
from wxpy.api.messages import Message
from wxpy.ext.talk_bot_utils import get_context_user_id, next_topic
from wxpy.utils.misc import get_text_without_at_bot
from wxpy.utils import enhance_connection
logger = logging.getLogger(__name__)
from wxpy.compatible import *
class XiaoI(object):
"""
与 wxpy 深度整合的小 i 机器人
"""
# noinspection SpellCheckingInspection
def __init__(self, key, secret):
"""
| 需要通过注册获得 key 和 secret
| 免费申请: http://cloud.xiaoi.com/
:param key: 你申请的 key
:param secret: 你申请的 secret
"""
self.key = key
self.secret = secret
self.realm = "xiaoi.com"
self.http_method = "POST"
self.uri = "/ask.do"
self.url = "http://nlp.xiaoi.com/ask.do?platform=custom"
xauth = self._make_http_header_xauth()
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain",
}
headers.update(xauth)
self.session = requests.Session()
self.session.headers.update(headers)
enhance_connection(self.session)
def _make_signature(self):
"""
生成请求签名
"""
# 40位随机字符
# nonce = "".join([str(randint(0, 9)) for _ in range(40)])
nonce = "4103657107305326101203516108016101205331"
sha1 = "{0}:{1}:{2}".format(self.key, self.realm, self.secret).encode("utf-8")
sha1 = hashlib.sha1(sha1).hexdigest()
sha2 = "{0}:{1}".format(self.http_method, self.uri).encode("utf-8")
sha2 = hashlib.sha1(sha2).hexdigest()
signature = "{0}:{1}:{2}".format(sha1, nonce, sha2).encode("utf-8")
signature = hashlib.sha1(signature).hexdigest()
ret = collections.namedtuple("signature_return", "signature nonce")
ret.signature = signature
ret.nonce = nonce
return ret
def _make_http_header_xauth(self):
"""
生成请求认证
"""
sign = self._make_signature()
ret = {
"X-Auth": "app_key=\"{0}\",nonce=\"{1}\",signature=\"{2}\"".format(
self.key, sign.nonce, sign.signature)
}
return ret
def do_reply(self, msg):
"""
回复消息,并返回答复文本
:param msg: Message 对象
:return: 答复文本
"""
ret = self.reply_text(msg)
msg.reply(ret)
return ret
def reply_text(self, msg):
"""
仅返回答复文本
:param msg: Message 对象,或消息文本
:return: 答复文本
"""
error_response = (
"主人还没给我设置这类话题的回复",
)
if isinstance(msg, Message):
user_id = get_context_user_id(msg)
question = get_text_without_at_bot(msg)
else:
user_id = "abc"
question = msg or ""
params = {
"question": question,
"format": "json",
"platform": "custom",
"userId": user_id,
}
resp = self.session.post(self.url, data=params)
text = resp.text
for err in error_response:
if err in text:
return next_topic()
return text
| youfou/wxpy | wxpy/ext/xiaoi.py | Python | mit | 3,546 |
# encoding: utf-8
# Copyright 2013 maker
# License
"""
Sales module URLs
"""
from django.conf.urls.defaults import *
urlpatterns = patterns('maker.sales.views',
url(r'^(\.(?P<response_format>\w+))?$', 'index', name='sales'),
url(r'^index(\.(?P<response_format>\w+))?/?$', 'index', name='sales_index'),
url(r'^index/open(\.(?P<response_format>\w+))?/?$', 'index_open', name='sales_index_open'),
url(r'^index/assigned(\.(?P<response_format>\w+))?/?$',
'index_assigned', name='sales_index_assigned'),
# Orders
url(r'^order/add(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add'),
url(r'^order/add/lead/(?P<lead_id>\w+)(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add_with_lead'),
url(r'^order/add/opportunity/(?P<opportunity_id>\w+)(\.(?P<response_format>\w+))?/?$',
'order_add', name='sales_order_add_with_opportunity'),
url(r'^order/edit/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_edit', name='sales_order_edit'),
url(r'^order/view/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_view', name='sales_order_view'),
url(r'^order/invoice/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_invoice_view', name='sales_order_invoice_view'),
url(r'^order/delete/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'order_delete', name='sales_order_delete'),
# Products
url(r'^product/index(\.(?P<response_format>\w+))?/?$',
'product_index', name='sales_product_index'),
url(r'^product/add/(?P<parent_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_add', name='sales_product_add'),
url(r'^product/add(\.(?P<response_format>\w+))?/?$',
'product_add', name='sales_product_add'),
url(r'^product/edit/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_edit', name='sales_product_edit'),
url(r'^product/view/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_view', name='sales_product_view'),
url(r'^product/delete/(?P<product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'product_delete', name='sales_product_delete'),
# Settings
url(r'^settings/view(\.(?P<response_format>\w+))?/?$', 'settings_view', name='sales_settings_view'),
url(r'^settings/edit(\.(?P<response_format>\w+))?/?$', 'settings_edit', name='sales_settings_edit'),
# Statuses
url(r'^status/add(\.(?P<response_format>\w+))?/?$',
'status_add', name='sales_status_add'),
url(r'^status/edit/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_edit', name='sales_status_edit'),
url(r'^status/view/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_view', name='sales_status_view'),
url(r'^status/delete/(?P<status_id>\d+)(\.(?P<response_format>\w+))?/?$',
'status_delete', name='sales_status_delete'),
# Subscriptions
url(r'^subscription/add(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add'),
url(r'^subscription/add/order/(?P<order_id>\w+)/product/(?P<product_id>\w+)(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add_with_order_and_product'),
url(r'^subscription/add/(?P<productset_id>\w+)(\.(?P<response_format>\w+))?/?$',
'subscription_add', name='sales_subscription_add_with_product'),
url(r'^subscription/edit/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_edit', name='sales_subscription_edit'),
url(r'^subscription/view/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_view', name='sales_subscription_view'),
url(r'^subscription/delete/(?P<subscription_id>\d+)(\.(?P<response_format>\w+))?/?$',
'subscription_delete', name='sales_subscription_delete'),
# Ordered Products
url(r'^ordered_product/add/(?P<order_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_add', name='sales_ordered_product_add'),
url(r'^ordered_product/edit/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_edit', name='sales_ordered_product_edit'),
url(r'^ordered_product/view/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_view', name='sales_ordered_product_view'),
url(r'^ordered_product/delete/(?P<ordered_product_id>\d+)(\.(?P<response_format>\w+))?/?$',
'ordered_product_delete', name='sales_ordered_product_delete'),
# Sources
url(r'^source/add(\.(?P<response_format>\w+))?/?$',
'source_add', name='sales_source_add'),
url(r'^source/edit/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_edit', name='sales_source_edit'),
url(r'^source/view/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_view', name='sales_source_view'),
url(r'^source/delete/(?P<source_id>\d+)(\.(?P<response_format>\w+))?/?$',
'source_delete', name='sales_source_delete'),
# Leads
url(r'^lead/index(\.(?P<response_format>\w+))?/?$',
'lead_index', name='sales_lead_index'),
url(r'^lead/index/assigned(\.(?P<response_format>\w+))?/?$',
'lead_index_assigned', name='sales_lead_index_assigned'),
url(r'^lead/add(\.(?P<response_format>\w+))?/?$',
'lead_add', name='sales_lead_add'),
url(r'^lead/edit/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_edit', name='sales_lead_edit'),
url(r'^lead/view/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_view', name='sales_lead_view'),
url(r'^lead/delete/(?P<lead_id>\d+)(\.(?P<response_format>\w+))?/?$',
'lead_delete', name='sales_lead_delete'),
# Opportunities
url(r'^opportunity/index(\.(?P<response_format>\w+))?/?$',
'opportunity_index', name='sales_opportunity_index'),
url(r'^opportunity/index/assigned(\.(?P<response_format>\w+))?/?$',
'opportunity_index_assigned', name='sales_opportunity_index_assigned'),
url(r'^opportunity/add(\.(?P<response_format>\w+))?/?$',
'opportunity_add', name='sales_opportunity_add'),
url(r'^opportunity/add/lead/(?P<lead_id>\w+)(\.(?P<response_format>\w+))?/?$',
'opportunity_add', name='sales_opportunity_add_with_lead'),
url(r'^opportunity/edit/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_edit', name='sales_opportunity_edit'),
url(r'^opportunity/view/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_view', name='sales_opportunity_view'),
url(r'^opportunity/delete/(?P<opportunity_id>\d+)(\.(?P<response_format>\w+))?/?$',
'opportunity_delete', name='sales_opportunity_delete'),
# AJAX lookups
url(r'^ajax/subscription(\.(?P<response_format>\w+))?/?$',
'ajax_subscription_lookup', name='sales_ajax_subscription_lookup'),
)
| alejo8591/maker | sales/urls.py | Python | mit | 7,456 |
#!/usr/bin/env python
# Convert line elements with overlapping endpoints into polylines in an
# SVG file.
import os
import sys
try:
from lxml import etree
except ImportError:
import xml.etree.ElementTree as etree
from collections import defaultdict
from optparse import OptionParser
SVG_NS = 'http://www.w3.org/2000/svg'
START = 1
END = 2
class Line(object):
def __init__(self, line_element):
a = line_element.attrib
self.x1 = float(a['x1'])
self.y1 = float(a['y1'])
self.x2 = float(a['x2'])
self.y2 = float(a['y2'])
self.strokeWidth = float(a['stroke-width'])
def reverse(self):
self.x1, self.x2 = self.x2, self.x1
self.y1, self.y2 = self.y2, self.y1
def start_hash(self):
return str(self.x1) + ',' + str(self.y1)
def end_hash(self):
return str(self.x2) + ',' + str(self.y2)
def endpoint(self, direction):
if direction == START:
return self.start_hash()
else:
return self.end_hash()
def get_other_hash(self, key):
h = self.start_hash()
if h == key:
h = self.end_hash()
return h
def __repr__(self):
return '((%s,%s),(%s,%s),sw:%s)' % (self.x1, self.y1,
self.x2, self.y2,
self.strokeWidth)
class EndpointHash(object):
def __init__(self, lines):
self.endpoints = defaultdict(list)
for l in lines:
self.endpoints[l.start_hash()].append(l)
self.endpoints[l.end_hash()].append(l)
def count_overlapping_points(self):
count = 0
for key, lines in self.endpoints.iteritems():
l = len(lines)
if l > 1:
count += 1
return count
def _del_line(self, key, line):
self.endpoints[key].remove(line)
if len(self.endpoints[key]) == 0:
del self.endpoints[key]
def remove_line(self, line):
key = line.start_hash()
self._del_line(key, line)
self._del_line(line.get_other_hash(key), line)
def pop_connected_line(self, line, key):
if key in self.endpoints:
line = self.endpoints[key][0]
self.remove_line(line)
return line
else:
return
def parse_svg(fname):
print "Parsing '%s'..." % (fname)
return etree.parse(fname)
def get_lines(svg):
lines = []
for l in svg.getroot().iter('{%s}line' % SVG_NS):
lines.append(Line(l))
return lines
def align_lines(l1, l2):
if ( l1.x1 == l2.x1 and l1.y1 == l2.y1
or l1.x2 == l2.x2 and l1.y2 == l2.y2):
l2.reverse()
def connect_lines(lines, endpoint_hash, line, direction, poly):
while True:
key = line.endpoint(direction)
connected_line = endpoint_hash.pop_connected_line(line, key)
if connected_line:
if direction == START:
poly.insert(0, connected_line)
else:
poly.append(connected_line)
align_lines(line, connected_line)
lines.remove(connected_line)
line = connected_line
else:
break
def find_polylines(lines, endpoint_hash):
polylines = []
while lines:
line = lines.pop()
endpoint_hash.remove_line(line)
poly = [line]
connect_lines(lines, endpoint_hash, line, START, poly)
connect_lines(lines, endpoint_hash, line, END, poly)
polylines.append(poly)
return polylines
def optimize(svg):
lines = get_lines(svg)
print '%s line segments found' % len(lines)
lines_by_width = defaultdict(list)
for l in lines:
lines_by_width[l.strokeWidth].append(l)
del lines
print '%s different stroke widths found:' % len(lines_by_width)
for width, lines in lines_by_width.iteritems():
print ' strokeWidth: %s (%s lines)' % (width, len(lines))
polylines = []
for width, lines in lines_by_width.iteritems():
print 'Finding polylines (strokeWidth: %s)... ' % width
endpoint_hash = EndpointHash(lines)
overlapping_points = endpoint_hash.count_overlapping_points()
print (' %s line segments, %s overlapping points'
% (len(lines), overlapping_points)),
p = find_polylines(lines, endpoint_hash)
print '-> %s polylines' % len(p)
polylines += p
return polylines
def write_svg(polylines, outfile):
print "Writing '%s'..." % outfile
f = open(outfile, 'w')
f.write("""<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="100%" height="100%" xmlns="http://www.w3.org/2000/svg" version="1.1">
""")
def point_to_str(x, y):
return '%s,%s ' % (x, y)
for p in polylines:
points = []
for line in p:
if not points:
points.append(point_to_str(line.x1, line.y1))
points.append(point_to_str(line.x2, line.y2))
f.write('<polyline fill="none" stroke="#000" stroke-width="%s" points="%s"/>\n'
% (p[0].strokeWidth, ' '.join(points)))
f.write('</svg>\n')
f.close()
def get_filesize(fname):
return os.stat(fname).st_size
def print_size_stats(infile, outfile):
insize = get_filesize(infile)
outsize = get_filesize(outfile)
print ('Original file size: %.2fKiB, new file size: %.2fKiB (%.2f)'
% (insize / 1024., outsize / 1024., float(outsize) / insize * 100))
def main():
usage = 'Usage: %prog INFILE OUTFILE'
parser = OptionParser(usage=usage)
options, args = parser.parse_args()
if len(args) < 2:
parser.error('input and output files must be specified')
return 2
infile = args[0]
outfile = args[1]
svg = parse_svg(infile)
polylines = optimize(svg)
print '%s polyline(s) found in total' % len(polylines)
write_svg(polylines, outfile)
print_size_stats(infile, outfile)
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(1)
| johnnovak/polylinize.py | polylinize.py | Python | mit | 6,224 |
# -*- coding: UTF-8 -*-
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^', include('symcon.urls')),
url(r'^pages/', include('django.contrib.flatpages.urls')),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += staticfiles_urlpatterns()
| lociii/symcon-index | heroku/urls.py | Python | mit | 376 |
from django.contrib import admin
from django.contrib.auth.models import User
from .models import Stock, StockHistory, StockSelection, SectorHistory, StockNews
class CommonAdmin(admin.ModelAdmin):
date_hierarchy = 'pub_date'
class SectorAdmin(CommonAdmin):
list_display = ('Symbol', 'Sector', 'pub_date')
search_fields = ['Symbol', 'Sector']
class StockAdmin(CommonAdmin):
list_display = ('Symbol', 'Name','MarketCap', 'Catagory', 'pub_date')
search_fields = ['Symbol']
class StockRelativeAdmin(CommonAdmin):
def stock_info(obj):
return '{}, {}, {}, {}'.format(
obj.stock.Symbol,
obj.stock.Name,
obj.stock.MarketCap,
obj.stock.pub_date,
)
list_display = (stock_info, 'pub_date')
search_fields = ['stock__Symbol']
admin.site.register(Stock, StockAdmin)
admin.site.register(SectorHistory, SectorAdmin)
admin.site.register(StockHistory, StockRelativeAdmin)
admin.site.register(StockSelection, StockRelativeAdmin)
admin.site.register(StockNews, StockRelativeAdmin)
| yuxiang-zhou/MarketAnalysor | MADjangoProject/market/admin.py | Python | mit | 1,065 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-11 08:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('multiexplorer', '0006_pullhistory'),
]
operations = [
migrations.CreateModel(
name='PushHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_pushed', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.AddField(
model_name='memo',
name='signature',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='memo',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| priestc/MultiExplorer | multiexplorer/multiexplorer/migrations/0007_auto_20170211_0802.py | Python | mit | 967 |
"""
Jump Search
Find an element in a sorted array.
"""
import math
def jump_search(arr,target):
"""
Worst-case Complexity: O(√n) (root(n))
All items in list must be sorted like binary search
Find block that contains target value and search it linearly in that block
It returns a first target value in array
reference: https://en.wikipedia.org/wiki/Jump_search
"""
length = len(arr)
block_size = int(math.sqrt(length))
block_prev = 0
block= block_size
# return -1 means that array doesn't contain target value
# find block that contains target value
if arr[length - 1] < target:
return -1
while block <= length and arr[block - 1] < target:
block_prev = block
block += block_size
# find target value in block
while arr[block_prev] < target :
block_prev += 1
if block_prev == min(block, length) :
return -1
# if there is target value in array, return it
if arr[block_prev] == target :
return block_prev
return -1
| keon/algorithms | algorithms/search/jump_search.py | Python | mit | 1,064 |
#! /usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
os.chdir(sys.path[0])
sys.path.append('/mnt/sda2/github/TSF1KEV/TSFpy')
from TSF_io import *
#from TSF_Forth import *
from TSF_shuffle import *
from TSF_match import *
from TSF_calc import *
from TSF_time import *
TSF_Forth_init(TSF_io_argvs(),[TSF_shuffle_Initwords,TSF_match_Initwords,TSF_calc_Initwords,TSF_time_Initwords])
TSF_Forth_setTSF("TSF_Tab-Separated-Forth:",
"\t".join(["UTF-8","#TSF_encoding","replace:","#TSF_this","help:","#TSF_echothe","0","#TSF_fin."]),
TSF_style="T")
TSF_Forth_setTSF("help:",
"\t".join(["usage: ./TSF.py [command|file.tsf] [argv] ...",
"commands:",
" --help this commands view",
" --about about TSF UTF-8 text (Japanese) view\" ",
" --python TSF.tsf to Python.py view or save\" ",
" --helloworld \"Hello world 1 #TSF_echoN\" sample",
" --quine TSF_Forth_viewthey() Quine (self source) sample",
" --99beer 99 Bottles of Beer sample",
" --fizzbuzz ([0]#3Z1~0)+([0]#5Z2~0) Fizz Buzz Fizz&Buzz sample",
" --zundoko Zun Zun Zun Zun Doko VeronCho sample",
" --fibonacci Fibonacci number 0,1,1,2,3,5,8,13,21,55... sample",
" --prime prime numbers 2,3,5,7,11,13,17,19,23,29... sample",
" --calcFX fractions calculator \"1/3-m1|2\"-> p5|6 sample",
" --calcDC fractions calculator \"1/3-m1|2\"-> 0.8333... sample",
" --calcKN fractions calculator \"1/3-m1|2\"-> 6 bunno 5 sample",
" --calender \"@000y@0m@0dm@wdec@0h@0n@0s\"-> TSF_time_getdaytime() sample"]),
TSF_style="N")
TSF_Forth_setTSF("replace:",
"\t".join(["replaceN:","#TSF_carbonthe","#TSF_calender","replaceN:","0","#TSF_pokethe","help:","replaceO:","replaceN:","#TSF_replacestacks"]),
TSF_style="T")
TSF_Forth_setTSF("replaceO:",
"\t".join(["TSF_time_getdaytime()"]),
TSF_style="N")
TSF_Forth_setTSF("replaceN:",
"\t".join(["@000y@0m@0dm@wdec@0h@0n@0s"]),
TSF_style="N")
TSF_Forth_addfin(TSF_io_argvs())
TSF_Forth_argvsleftcut(TSF_io_argvs(),1)
TSF_Forth_run()
| ooblog/TSF1KEV | TSFpy/debug/sample_help.py | Python | mit | 2,181 |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.biz/status_available
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisBizStatusAvailable(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.biz/status_available.txt"
host = "whois.biz"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, None)
def test_available(self):
eq_(self.record.available, True)
def test_domain(self):
eq_(self.record.domain, "u34jedzcq.biz")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(self.record.admin_contacts, [])
def test_registered(self):
eq_(self.record.registered, False)
def test_created_on(self):
eq_(self.record.created_on, None)
def test_registrar(self):
eq_(self.record.registrar, None)
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(self.record.registrant_contacts, [])
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(self.record.technical_contacts, [])
def test_updated_on(self):
eq_(self.record.updated_on, None)
def test_domain_id(self):
eq_(self.record.domain_id, None)
def test_expires_on(self):
eq_(self.record.expires_on, None)
| huyphan/pyyawhois | test/record/parser/test_response_whois_biz_status_available.py | Python | mit | 1,928 |
"""
Redis Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.redis
settings:
redis:
# bind: 0.0.0.0 # Set the bind address specifically (Default: 127.0.0.1)
"""
import re
from fabric.decorators import task
from fabric.utils import abort
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
from refabric.operations import run
__all__ = ['start', 'stop', 'restart', 'setup', 'configure']
blueprint = blueprints.get(__name__)
start = debian.service_task('redis-server', 'start')
stop = debian.service_task('redis-server', 'stop')
restart = debian.service_task('redis-server', 'restart')
@task
def setup():
"""
Install and configure Redis
"""
install()
configure()
def install():
with sudo():
debian.apt_get('install', 'redis-server')
def get_installed_version():
"""
Get installed version as tuple.
Parsed output format:
Redis server v=2.8.4 sha=00000000:0 malloc=jemalloc-3.4.1 bits=64 build=a...
"""
retval = run('redis-server --version')
m = re.match('.+v=(?P<version>[0-9\.]+).+', retval.stdout)
try:
_v = m.group('version')
v = tuple(map(int, str(_v).split('.')))
return v
except IndexError:
abort('Failed to get installed redis version')
@task
def configure():
"""
Configure Redis
"""
context = {
'bind': blueprint.get('bind', '127.0.0.1')
}
version = get_installed_version()
if version <= (2, 4):
config = 'redis-2.4.conf'
elif version < (3, 0):
config = 'redis-2.8.conf'
else:
config = 'redis-3.conf'
uploads = blueprint.upload(config, '/etc/redis/redis.conf', context)
if uploads:
if debian.lbs_release() >= '16.04':
debian.chown(location='/etc/redis/redis.conf',
owner='redis', group='root')
restart()
| 5monkeys/blues | blues/redis.py | Python | mit | 1,983 |
# -*- coding: utf-8 -*-
import json
from flask import Flask
from flask import request
from flask import jsonify
import time
from psutil import net_io_counters
from asyncftp import __version__
import threading
from asyncftp.Logger import _LogFormatter
t = time.time()
net = net_io_counters()
formatter = _LogFormatter(color=False)
log_message = str()
def make_app(server, queue):
app = Flask(__name__)
@app.route('/api/info', methods=['GET'])
def speed():
if request.method == 'GET':
global t
global net
temp_t = time.time()
p = net_io_counters()
result = dict()
result['speed'] = dict(
up=(p[0] - net[0]) / (temp_t - t),
down=(p[1] - net[1]) / (temp_t - t)
)
result['up_time'] = server.up_time
result['running'] = True if server.up_time else False
t = temp_t
net = p
return jsonify(result)
@app.route('/api/start', methods=['GET'])
def run_server():
if not server.running:
thread = threading.Thread(target=server.run)
thread.start()
return 'ok'
@app.route('/api/stop', methods=['GET'])
def close_server():
server.close()
return 'ok'
@app.route('/api/config', methods=['GET', 'POST'])
def config():
if request.method == 'GET':
return jsonify({
'host': server.host,
'port': str(server.port),
'version': __version__,
'refuse_ip': server.ip_refuse
})
if request.method == 'POST':
data = json.loads(request.data.decode('utf-8'))
for ip in data['refuse_ip']:
server.add_refuse_ip(ip)
return 'ok'
@app.route('/api/log', methods=['GET'])
def log():
if request.method == 'GET':
result = str()
while not queue.empty():
record = queue.get(block=False)
result += formatter.format(record) + '\n'
global log_message
log_message += result
return log_message
return app
| helloqiu/AsyncFTP | asyncftp/console/app.py | Python | mit | 2,207 |
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('lipame')
class CeleryConfig(AppConfig):
name = 'lipame.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
| savioabuga/lipame | lipame/taskapp/celery.py | Python | mit | 903 |
from src.interfacing.ogs.connect import Authentication
import codecs
import sys
import os
from time import sleep
def loadList(pNameFile):
iList = []
with codecs.open(pNameFile, "r", "utf-8") as f:
for line in f:
iList.append(line)
return iList
if __name__ == "__main__":
a = Authentication("Kuksu League", "", testing=False);
iGroupNames = loadList("E:/Project/OGS/OGS-League/group_names.txt");
iGroupIDs = loadList("E:/Project/OGS/OGS-League/group_ids.txt");
nGroups = len(iGroupNames);
for i in range(nGroups):
iGroupNames[i] = iGroupNames[i].replace("\r\n", "");
iGroupNames[i] = iGroupNames[i].replace("\n", "");
iGroupIDs[i] = iGroupIDs[i].replace("\r\n", "");
iGroupIDs[i] = iGroupIDs[i].replace("\n", "");
iGroupIDs[i] = int(iGroupIDs[i]);
iDescription = """
Kuksu Main Title Tournament 9th Cycle Group %s
Title Holder: <a href='https://online-go.com/user/view/35184/vitality'>vitality (5d)</a>
Previous cycles:
<table style="text-align:center;" border='2'>
<tr><th rowspan=2>Cycle</th><td colspan=3><b>Title Match</b></td><td colspan=3><b>Title Tournament</b></td></tr>
<tr>
<th>Winner</th><th>Score</th><th>Runner-up</th>
<th>Winner<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/gold_title_19.png' alt='Gold'></img></th>
<th>Runner-up<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/silver_title_19.png' alt='Silver'></img></th>
<th>3rd Place<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/bronze_title_19.png' alt='Bronze'></img></th>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2375'>1</a></td>
<td><b>luke</b></td><td></td><td></td>
<td><b>luke (2d)</b></td><td>davos</td><td>gomad361</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2384'>2</a></td>
<td><b>gomad361</b></td><td>3-2</td><td>luke</td>
<td><b>luke (2d)</b></td><td>gomad361</td><td>hotspur</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2391'>3</a></td>
<td><b>Uberdude</b></td><td>∗</td><td>gomad361</td>
<td><b>Uberdude (6d)</b></td><td>KyuT</td><td>marigo</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2406'>4</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>KyuT</td>
<td><b>KyuT (4d)</b></td><td>quiller</td><td>morituri</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2424'>5</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>gomad361</td>
<td><b>gomad361 (2d)</b></td><td>morituri</td><td>betterlife</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2439'>6</a></td>
<td><b>Uberdude</b></td><td>5-0</td><td>Elin</td>
<td><b>Elin (3d)</b></td><td>gomad361</td><td>morituri</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2460'>7</a></td>
<td><b>Uberdude</b></td><td>3-2</td><td>vitality</td>
<td><b>vitality (5d)</b></td><td>Elin</td><td>gomad361</td>
</tr>
<tr>
<td><a href='https://online-go.com/tournament/2475'>8</a></td>
<td><b>vitality</b></td><td>∗</td><td>Uberdude</td>
<td><b>vitality (5d)</b></td><td>nrx</td><td>gojohn</td>
</tr>
<tr>
<td rowspan=5><a href='#'>9</a></td>
<td rowspan=5 colspan=3></td>
<td colspan=3>
<a href='https://online-go.com/tournament/12653'>[A]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12654'>[B1]</a>
<a href='https://online-go.com/tournament/12655'>[B2]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12656'>[C1]</a>
<a href='https://online-go.com/tournament/12657'>[C2]</a>
<a href='https://online-go.com/tournament/12658'>[C3]</a>
<a href='https://online-go.com/tournament/12659'>[C4]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12660'>[D1]</a>
<a href='https://online-go.com/tournament/12661'>[D2]</a>
<a href='https://online-go.com/tournament/12662'>[D3]</a>
<a href='https://online-go.com/tournament/12663'>[D4]</a>
<a href='https://online-go.com/tournament/12664'>[D5]</a>
<a href='https://online-go.com/tournament/12665'>[D6]</a>
<a href='https://online-go.com/tournament/12666'>[D7]</a>
<a href='https://online-go.com/tournament/12667'>[D8]</a>
</td>
</tr>
<tr>
<td colspan=3>
<a href='https://online-go.com/tournament/12668'>[E1]</a>
<a href='https://online-go.com/tournament/12669'>[E2]</a>
<a href='https://online-go.com/tournament/12670'>[E3]</a>
<a href='https://online-go.com/tournament/12671'>[E4]</a>
<a href='https://online-go.com/tournament/12672'>[E5]</a>
<a href='https://online-go.com/tournament/12673'>[E6]</a>
</td>
</tr>
</table>
∗ means the games were finished by timeout or retiring.
Rules could be found <a href='https://forums.online-go.com/t/league-format-kuksu-title-tournament-rules-and-discussion/5191'>here</a>.
""" % iGroupNames[i];
a.put(['tournaments', iGroupIDs[i]], {"description": iDescription
});
print("Tournament %s with id %d updated.\n" % (iGroupNames[i], iGroupIDs[i]));
sleep(2);
# tourney id 7370
"""
iTournament = a.post(['tournaments'],{
"id":12650,
"name":"Test Tournament 2",
"group":515,
"tournament_type":"roundrobin",
"description":"<b>Test 3</b>",
"board_size":19,
"handicap":0, #default -1 for auto
"time_start": "2015-12-01T00:00:00Z",
"time_control_parameters":{
"time_control":"fischer",
"initial_time":604800,
"max_time":604800,
"time_increment":86400
},
"rules": "korean",
"exclusivity": "invite", # open, group. default
"exclude_provisional": False, # default
"auto_start_on_max": True, # default
"analysis_enabled": True, #default
"settings":{
"maximum_players":10,
},
"players_start": 6, #default
"first_pairing_method": "slide", #slaughter, random, slide, strength . default
"subsequent_pairing_method": "slide", # default
"min_ranking":0,
"max_ranking":36
});
#print("Hello");
print(iTournament["id"]);
"""
#print "Tournament %s is created." % iTournament["id"];
# r= a.post (['tournaments', 12642, 'players'], app_param= {"player_id":40318} )
# print (r)
| juanchodepisa/sbtk | SBTK_League_Helper/update_tournaments.py | Python | mit | 6,552 |
"""
Helper module for Python version 3.0 and above
- Ordered dictionaries
- Encoding/decoding urls
- Unicode/Bytes (for sending/receiving data from/to socket, base64)
- Exception handling (except Exception as e)
"""
import base64
from urllib.parse import unquote, quote
from collections import OrderedDict
def modulename():
return 'Helper module for Python version 3.0 and above'
def url_decode(uri):
return unquote(uri)
def url_encode(uri):
return quote(uri)
def new_dictionary():
return OrderedDict()
def dictionary_keys(dictionary):
return list(dictionary.keys())
def dictionary_values(dictionary):
return list(dictionary.values())
def data_read(data):
# Convert bytes to string
return data.decode('utf8')
def data_write(data):
# Convert string to bytes
return bytes(data, 'utf8')
def base64_decode(data):
# Base64 returns decoded byte string, decode to convert to UTF8 string
return base64.b64decode(data).decode('utf8')
def base64_encode(data):
# Base64 needs ascii input to encode, which returns Base64 byte string, decode to convert to UTF8 string
return base64.b64encode(data.encode('ascii')).decode('utf8')
def unicode_chr(code):
return chr(code)
def unicode_string(string):
# Python 3.* uses unicode by default
return string
def is_digit(string):
# Check if string is digit
return isinstance(string, str) and string.isdigit()
def is_number(value):
return isinstance(value, int)
| martomo/SublimeTextXdebug | xdebug/helper/helper.py | Python | mit | 1,497 |
# -*- coding: utf-8 -*-
"""
Created on 27/04/2015
@author: C&C - HardSoft
"""
from util.HOFs import *
from util.CobolPatterns import *
from util.homogenize import Homogenize
def calc_length(copy):
if isinstance(copy, list):
book = copy
else:
if isinstance(copy, str):
book = copy.splitlines()
else:
book = []
lines = Homogenize(book)
havecopy = filter(isCopy, lines)
if havecopy:
bkm = ''.join(havecopy[0].split('COPY')[1].replace('.', '').split())
msg = 'COPY {} deve ser expandido.'.format(bkm)
return {'retorno': False, 'msg': msg, 'lrecl': 0}
lrecl = 0
redefines = False
occurs = 0
dicoccurs = {}
level_redefines = 0
for line in lines:
match = CobolPatterns.row_pattern.match(line.strip())
if not match:
continue
match = match.groupdict()
if not match['level']:
continue
if 'REDEFINES' in line and not match['redefines']:
match['redefines'] = CobolPatterns.row_pattern_redefines.search(line).groupdict().get('redefines')
if 'OCCURS' in line and not match['occurs']:
match['occurs'] = CobolPatterns.row_pattern_occurs.search(line).groupdict().get('occurs')
level = int(match['level'])
if redefines:
if level > level_redefines:
continue
redefines = False
level_redefines = 0
if match['redefines']:
level_redefines = level
redefines = True
continue
if occurs:
if level > dicoccurs[occurs]['level']:
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
continue
while True:
if occurs == 1:
lrecl += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
else:
dicoccurs[occurs-1]['length'] += dicoccurs[occurs]['length'] * dicoccurs[occurs]['occ']
del dicoccurs[occurs]
occurs -= 1
if not occurs:
break
if level > dicoccurs[occurs]['level']:
break
if match['occurs']:
occurs += 1
attrib = {}
attrib['occ'] = int(match['occurs'])
attrib['level'] = level
attrib['length'] = 0
dicoccurs[occurs] = attrib
if match['pic']:
if occurs:
dicoccurs[occurs]['length'] += FieldLength(match['pic'], match['usage'])
else:
lrecl += FieldLength(match['pic'], match['usage'])
return {'retorno': True, 'msg': None, 'lrecl': lrecl}
def FieldLength(pic_str, usage):
if pic_str[0] == 'S':
pic_str = pic_str[1:]
while True:
match = CobolPatterns.pic_pattern_repeats.search(pic_str)
if not match:
break
match = match.groupdict()
expanded_str = match['constant'] * int(match['repeat'])
pic_str = CobolPatterns.pic_pattern_repeats.sub(expanded_str, pic_str, 1)
len_field = len(pic_str.replace('V', ''))
if not usage:
usage = 'DISPLAY'
if 'COMP-3' in usage or 'COMPUTATIONAL-3' in usage:
len_field = len_field / 2 + 1
elif 'COMP' in usage or 'COMPUTATIONAL' in usage or 'BINARY' in usage:
len_field = len_field / 2
elif 'SIGN' in usage:
len_field += 1
return len_field
| flavio-casacurta/File-FixedS | calc_length.py | Python | mit | 3,848 |
from threading import local
from django.contrib.sites.models import Site
import os
_locals = local()
def get_current_request():
return getattr(_locals, 'request', None)
def get_current_site():
request = get_current_request()
host = request.get_host()
try:
return Site.objects.get(domain__iexact=host)
except:
return Site.objects.all()[0]
class GlobalRequestMiddleware(object):
def process_request(self, request):
_locals.request = request
| sergio-garcia/learning-django | polls/middleware.py | Python | mit | 492 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GenerateArmTemplateRequest(Model):
"""Parameters for generating an ARM template for deploying artifacts.
:param virtual_machine_name: The resource name of the virtual machine.
:type virtual_machine_name: str
:param parameters: The parameters of the ARM template.
:type parameters: list[~azure.mgmt.devtestlabs.models.ParameterInfo]
:param location: The location of the virtual machine.
:type location: str
:param file_upload_options: Options for uploading the files for the
artifact. UploadFilesAndGenerateSasTokens is the default value. Possible
values include: 'UploadFilesAndGenerateSasTokens', 'None'
:type file_upload_options: str or
~azure.mgmt.devtestlabs.models.FileUploadOptions
"""
_attribute_map = {
'virtual_machine_name': {'key': 'virtualMachineName', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[ParameterInfo]'},
'location': {'key': 'location', 'type': 'str'},
'file_upload_options': {'key': 'fileUploadOptions', 'type': 'str'},
}
def __init__(self, virtual_machine_name=None, parameters=None, location=None, file_upload_options=None):
self.virtual_machine_name = virtual_machine_name
self.parameters = parameters
self.location = location
self.file_upload_options = file_upload_options
| AutorestCI/azure-sdk-for-python | azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/models/generate_arm_template_request.py | Python | mit | 1,873 |
# -*- coding: utf-8 -*-
import factory
from data.tests.factories import DepartmentFactory
from ..models import Tourist, TouristCard
class TouristFactory(factory.DjangoModelFactory):
class Meta:
model = Tourist
first_name = 'Dave'
last_name = 'Greel'
email = '[email protected]'
class TouristCardFactory(factory.DjangoModelFactory):
class Meta:
model = TouristCard
tourist = factory.SubFactory(TouristFactory)
current_department = factory.SubFactory(DepartmentFactory)
| notfier/touristique | tourists/tests/factories.py | Python | mit | 524 |
__author__ = 'kjoseph'
import itertools
import Queue
from collections import defaultdict
from dependency_parse_object import DependencyParseObject, is_noun, is_verb
def get_parse(dp_objs):
term_map = {}
map_to_head = defaultdict(list)
for parse_object in dp_objs:
if parse_object.head > 0:
map_to_head[parse_object.head].append(parse_object.id)
term_map[parse_object.id] = parse_object
# first manually combine MWE
#mwe_to_combine = get_mwe_combinations(map_to_head,term_map)
#for mwe in mwe_to_combine:
# combine_terms(mwe,term_map,map_to_head)
#conj_to_combine = get_conj_combinations(map_to_head,term_map)
#for conj in conj_to_combine:
# combine_terms(conj,term_map,map_to_head)
# now manually chunk the nouns together
nouns_to_combine = get_noun_combinations(map_to_head,term_map)
for noun_set in nouns_to_combine:
combine_terms(noun_set,term_map, map_to_head)
verbs_to_combine = get_verb_combinations(map_to_head,term_map)
for verb_set in verbs_to_combine:
combine_terms(verb_set,term_map, map_to_head)
roots =[]
non_terms = []
for parse_object in term_map.values():
if parse_object.head == 0:
roots.append(parse_object)
elif parse_object.head == -1:
non_terms.append(parse_object)
# now build the parse tree
to_parse = Queue.LifoQueue()
for root in reversed(roots):
to_parse.put([root,0])
return to_parse, term_map, map_to_head, non_terms
def get_noun_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not (is_noun(head.postag) or head.postag in ['D','@','A','R']) :
continue
for child_id in children:
child = term_map[child_id]
if is_noun(child.postag) or child.postag in ['D','@','A','R']:
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_verb_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not is_verb(head.postag):
continue
for child_id in children:
child = term_map[child_id]
if is_verb(child.postag) and child.id == (head.id +1):
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_mwe_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'MWE':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_conj_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'CONJ':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_combinations(to_combine):
combination_found = True
while combination_found:
combination_found = False
combos = itertools.combinations(to_combine,2)
removed = []
for d in combos:
if len([d[0] == r or d[1] == r for r in removed]):
continue
if len(d[0].intersection(d[1])) > 0:
combination_found = True
to_combine.append(set.union(d[0],d[1]))
[to_combine.remove(x) for x in to_combine if x == d[0]]
[to_combine.remove(x) for x in to_combine if x == d[1]]
removed.append(d[0])
removed.append(d[1])
return to_combine
def combine_terms(noun_set,term_map, map_to_head):
new_parse_obj = DependencyParseObject(object_ids=noun_set,term_map=term_map)
# okay, we've created a new parse object
# now we need to update the relations to it
for id in noun_set:
if id == new_parse_obj.id:
term_map[id] = new_parse_obj
if id in map_to_head:
for child_id in noun_set:
if child_id in map_to_head[id]:
map_to_head[id].remove(child_id)
else:
# things dependent on this thing need to become dependent on the new parse object
if id in map_to_head:
for child in map_to_head[id]:
if child not in noun_set:
map_to_head[new_parse_obj.id].append(child)
term_map[child].head = new_parse_obj.id
del map_to_head[id]
del term_map[id]
def print_parse(parse_out, term_map, map_to_head):
while not parse_out.empty():
curr_head,level = parse_out.get()
print " "*level + str(level) +" " + curr_head.__unicode__()
for child in reversed(map_to_head.get(curr_head.id,[])):
parse_out.put([term_map[child],level+1])
def get_entities_from_parse(term_map):
all_proper = []
all_entities = []
all_entities_original_ids = []
all_proper_original_ids = []
for k,v in term_map.iteritems():
if is_noun(v.postag) or v.postag == '@' or v.postag == '#':
text = []
split_text = v.text.split()
ent_ids = []
for x in range(len(split_text)):
t = split_text[x]#.strip(string.punctuation)
#if x == 0 and t in stopwords:
# continue
text.append(t)
ent_ids.append(v.all_original_ids[x])
if len(text) > 0 and v.postag != 'O':
if '^' in v.postag and v.text[0].isupper():
all_proper.append(" ".join(text))
all_proper_original_ids.append(sorted(v.all_original_ids))
all_entities.append(" ".join([t.lower() for t in text]))
all_entities_original_ids.append(sorted(ent_ids))
return all_entities, all_proper, all_entities_original_ids, all_proper_original_ids
| kennyjoseph/identity_extraction_pub | python/utility_code/dependency_parse_handlers.py | Python | mit | 6,421 |
""" Python's random module includes a function choice(data) that returns a
random element from a non-empty sequence. The random module includes
a more basic function randrange, with parametrization similar to
the built-in range function, that return a random choice from the given
range. Using only the randrange function, implement your own version
of the choice function.
>>> data = [2,3,4,5,6,7,8,9,10,11,10,9,8,7,6,5,4,3,2,1]
>>> results = list()
>>> for x in range(len(data)*20):
... val = custom_choice(data)
... results.append(val in data)
>>> print(results)
[True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True]
"""
def custom_choice(data):
import random
return data[random.randrange(0,len(data))] | claudiordgz/GoodrichTamassiaGoldwasser | ch01/r112.py | Python | mit | 3,148 |
num = input("What is the numerator")
dem = input("What is the denominator")
counta = 2
countb = 2
def math (num,dem):
remainsa = 1
remainsb = 1
remains = remainsa - remainsb
while remains > 0:
a = num / counta
b = dem / countb
remainsa = num % counta
remainsb = num % countb
remains = remainsa - remainsb
if remains =
| cheesyc/2014KStateProgrammingCompetitionAnswers | 2011/1A.py | Python | mit | 386 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-13 04:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rii_Api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='location',
name='state',
field=models.CharField(max_length=2),
),
migrations.AlterField(
model_name='location',
name='venueName',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='year',
name='year',
field=models.IntegerField(choices=[(1919, 1919), (1920, 1920), (1921, 1921), (1922, 1922), (1923, 1923), (1924, 1924), (1925, 1925)], default=1919),
),
migrations.AlterField(
model_name='year',
name='yearSummary',
field=models.TextField(default='', max_length=2000),
),
]
| SimonHerrera/rock-island-independents | api/rii_Api/migrations/0002_auto_20160912_2314.py | Python | mit | 1,025 |
"""Some classes to support import of data files
"""
import os, glob
import numpy
import time
try:
import ConfigParser as configparser #gets rename to lowercase in python 3
except:
import configparser
class _BaseDataFile(object):
"""
"""
def __init__(self, filepath):
"""
"""
self.filepath = filepath
self.info = self._loadHeader()
self.data = self._loadData()
def _findFile(self, ending='', orSimilar=False):
"""Finds files using the base filename and the optional `ending` param (used to separate data from header)
If orSimilar==True then this function will first search for the exact file and then for any file of the appropriate
format in that folder. (For a header file that can be useful, just to retrieve the
"""
#fetch all header/data files matching path
searchPattern = self.filepath+'*'+ending
filenames = glob.glob(searchPattern)
#check if we have exactly one matching file
filename=None
if len(filenames)==0 and orSimilar:
folder = os.path.split(os.path.abspath(self.filepath))[0]
print('No exact match found for\n\t %s' %(searchPattern))
searchPattern = os.path.join(folder, '*'+ending)
print('...searching instead for\n\t %s' %(searchPattern))
filenames = glob.glob(searchPattern)
if len(filenames)==0:
print('No file found: %s' %(searchPattern))
elif len(filenames)>1:
print('Multiple files found')
else:
filename = filenames[0]
return filename
class DBPA_file(_BaseDataFile):
"""
DBPA amplifiers are made by Sensorium. Excellent signal to noise on the amp, with a very basic file format -
a 5-line ASCII header file (config-style file) and a binary data file.
Example usage:
datFile = io.DBPA_file('jwp_2013_18_02') #don't include file extension
print datFile.info #print the header info (samples, seconds etc)
{'channels': 122, 'duration': 761, 'rate': 1000, 'samples': 761000}
print datFile.data.shape
(122, 761000) #channels, samples
"""
def _loadHeader(self):
"""Load info from a header file ('*.h.txt')
"""
filename = self._findFile(ending='h.txt', orSimilar=True)
if not filename:
print('No header file')
#this header file looks like a config file with a single section
cfg = configparser.ConfigParser()
hdr = {}
f = open(filename)
cfg.readfp(f) #operates in place (doesn't return anything)
f.close()
hdr['channels'] = cfg.items('File Information')
for name, val in cfg.items('File Information'): #reads entries in File Info section as a list of tuples
if name.lower()=='number of channels':
hdr['channels']=int(val.replace('"', '')) # convert '"200"' to 200
elif name.lower()=='samples per second':
hdr['rate']=int(val.replace('"', '')) # convert '"200"' to 200
return hdr
def _loadData(self):
"""
:param offset: the sample number to start reading from
"""
data = []
filename = self._findFile(ending='dat')
fileSize = os.stat(filename).st_size
self.info['duration'] = int(fileSize/self.info['rate']/self.info['channels']/4) #4 bytes per sample
self.info['samples'] = self.info['duration']*self.info['rate']
if not filename:
print('No data file')
fileSize = os.stat(filename).st_size
data = numpy.fromfile(filename, dtype='>f')# data are big-endian float32
data = data.reshape([self.info['samples'],self.info['channels']])
data = data.transpose() # to get (channels, time)
return data
| peircej/PsychoEEG | psychoeeg/io.py | Python | mit | 3,894 |
__author__ = 'emre'
print "hello world" | ekutlu/raspberry-test | test.py | Python | mit | 40 |
import os
from flask import Flask, g, session, redirect, request, url_for, jsonify
from requests_oauthlib import OAuth2Session
OAUTH2_CLIENT_ID = os.environ['OAUTH2_CLIENT_ID']
OAUTH2_CLIENT_SECRET = os.environ['OAUTH2_CLIENT_SECRET']
OAUTH2_REDIRECT_URI = 'http://localhost:5000/callback'
API_BASE_URL = os.environ.get('API_BASE_URL', 'https://discordapp.com/api')
AUTHORIZATION_BASE_URL = API_BASE_URL + '/oauth2/authorize'
TOKEN_URL = API_BASE_URL + '/oauth2/token'
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = OAUTH2_CLIENT_SECRET
if 'http://' in OAUTH2_REDIRECT_URI:
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = 'true'
def token_updater(token):
session['oauth2_token'] = token
def make_session(token=None, state=None, scope=None):
return OAuth2Session(
client_id=OAUTH2_CLIENT_ID,
token=token,
state=state,
scope=scope,
redirect_uri=OAUTH2_REDIRECT_URI,
auto_refresh_kwargs={
'client_id': OAUTH2_CLIENT_ID,
'client_secret': OAUTH2_CLIENT_SECRET,
},
auto_refresh_url=TOKEN_URL,
token_updater=token_updater)
@app.route('/')
def index():
scope = request.args.get(
'scope',
'identify email connections guilds guilds.join')
discord = make_session(scope=scope.split(' '))
authorization_url, state = discord.authorization_url(AUTHORIZATION_BASE_URL)
session['oauth2_state'] = state
return redirect(authorization_url)
@app.route('/callback')
def callback():
if request.values.get('error'):
return request.values['error']
discord = make_session(state=session.get('oauth2_state'))
token = discord.fetch_token(
TOKEN_URL,
client_secret=OAUTH2_CLIENT_SECRET,
authorization_response=request.url)
session['oauth2_token'] = token
return redirect(url_for('.me'))
@app.route('/me')
def me():
discord = make_session(token=session.get('oauth2_token'))
user = discord.get(API_BASE_URL + '/users/@me').json()
guilds = discord.get(API_BASE_URL + '/users/@me/guilds').json()
connections = discord.get(API_BASE_URL + '/users/@me/connections').json()
return jsonify(user=user, guilds=guilds, connections=connections)
if __name__ == '__main__':
app.run()
| vishnevskiy/discord-oauth2-example | app.py | Python | mit | 2,290 |
from MirrorAI.dataset.directional.label_image import label_image
import numpy
def test():
d = numpy.array([1, 1, 0])
answer = label_image(d, target=0)
solution = [0, 0, 1]
assert (answer == solution).all()
| yuhangwang/MirrorAI | test/dataset/directional/label_image/test_3.py | Python | mit | 224 |
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 45888
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| WorldLeadCurrency/WLC | contrib/pyminer/pyminer.py | Python | mit | 6,435 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-31 23:57
from __future__ import unicode_literals
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('teams', '0001_initial'),
('accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Division',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('division_rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='division_representative', to=settings.AUTH_USER_MODEL)),
('teams', models.ManyToManyField(blank=True, to='teams.Team')),
],
),
migrations.CreateModel(
name='Session',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('slug', autoslug.fields.AutoSlugField(always_update=True, default='', editable=False, populate_from='name')),
('game', models.CharField(max_length=100)),
('start_date', models.DateTimeField(verbose_name='start date')),
('end_date', models.DateTimeField(verbose_name='end date')),
('division', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Division')),
],
),
migrations.CreateModel(
name='SessionEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.TimeField()),
('date', models.DateField()),
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.Session')),
],
),
migrations.CreateModel(
name='Sub',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True, verbose_name='sub date')),
('session_event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='substitutes.SessionEvent')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.CustomUser')),
],
),
]
| eSmelser/SnookR | SnookR/substitutes/migrations/0001_initial.py | Python | mit | 2,900 |
import re
import logging
from google.appengine.ext import db
from google.appengine.api import users, memcache
from handler import Handler, SlashRedirect
from webapp2_extras.routes import RedirectRoute, PathPrefixRoute
import webapp2
from webapp2_extras.routes import RedirectRoute, PathPrefixRoute
import articles
import api
from contents import panels
class Main(Handler):
def get(self):
logging.debug('%s'%self.request)
if 'main' in self.request.url:
self.redirect('/')
self.render('base.html')
def handle_404(request, response, exception):
Handler(request, response).throw_error(404)
def handle_500(request, response, exception):
Handler(request, response).throw_error(500)
# Define which urls to handle and how
PAGE_RE = r'((?:[a-zA-Z0-9_-]+/?)*)'
app = webapp2.WSGIApplication(
[
#Adding /? after everything allows for an option trailing slash
RedirectRoute('(.*)//+', SlashRedirect, 'slash-redirect', strict_slash=True), #Strip multiple trailing slashes
RedirectRoute('/', Main, 'home', strict_slash=False),
RedirectRoute('/main', Main, 'home', strict_slash=True),
# API calls:
PathPrefixRoute('/api', [
RedirectRoute('/articles/get', api.GetArticles, 'get-articles', strict_slash=True),
RedirectRoute('/articles/upvote', api.UpVote, 'upvote-article', strict_slash=True),
RedirectRoute('/articles/devote', api.DeVote, 'downvote-article', strict_slash=True),
RedirectRoute('/coursematerials/get', api.GetCourseMaterials, 'get-course-materials', strict_slash=True),
RedirectRoute('/coursematerials/set', api.MakeNote, 'make-note', strict_slash=True) # Testing only
]),
RedirectRoute('/votes', articles.ListVotes, 'list-votes', strict_slash=True), # Testing only
RedirectRoute('/notes', panels.ListNotes, 'list-notes', strict_slash=True), # Testing only
], debug=True)
app.error_handlers[404] = handle_404
app.error_handlers[500] = handle_500 | nixhope/UdacityX | main.py | Python | mit | 2,047 |
from datetime import datetime
import time
import json
from Commit import Commit;
import Constant;
import collections
from yattag import Doc
def generateHTML(commits, projectName, commitData, fileExtensionMap):
totalAuthors = len(commitData)
generateBestAuthors(projectName, commitData)
generateFileByExtension(fileExtensionMap, projectName)
totalLines, totalLinesAdded, totalLinesDeleted = generateLinesByDate(commits, projectName)
totalFiles = generateFilesByDate(commits, projectName)
generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, len(commits), totalAuthors)
def generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, totalCommits, totalAuthors):
with open(Constant.INDEX_HTML_TEMPLATE, "rt") as fin:
with open(Constant.INDEX_HTML, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$time' in line:
fout.write(line.replace('$time', time.strftime('%l:%M%p %Z on %b %d, %Y')))
elif '$files' in line:
fout.write(line.replace('$files', str(totalFiles)))
elif '$commits' in line:
fout.write(line.replace('$commits', str(totalCommits)))
elif '$totallines' in line:
fout.write(line.replace('$totallines', str(totalLines)))
elif '$linesadded' in line:
fout.write(line.replace('$linesadded', str(totalLinesAdded)))
elif '$linesdeleted' in line:
fout.write(line.replace('$linesdeleted', str(totalLinesDeleted)))
elif '$author' in line:
fout.write(line.replace('$author', str(totalAuthors)))
else:
fout.write(line)
def generateBestAuthors(projectName, commitData):
# Generate best author table
fields = ['author', 'commit_number', 'lines_added', 'lines_deleted']
doc, tag, text = Doc().tagtext()
with tag('table', ('class', 'table table-bordered table-condensed table-hover')):
with tag('tr'):
for i in range(len(fields)):
with tag('th'):
text(fields[i])
for commitdata in commitData:
with tag('tr'):
for i in range(len(fields)):
with tag('td', ('align', 'center')):
text(commitdata[i])
with open(Constant.BEST_AUTHORS_TEMPLATE, "rt") as fin:
with open(Constant.BEST_AUTHORS, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', doc.getvalue()))
else:
fout.write(line)
def generateLinesByDate(commits, projectName):
totalLines = 0
totalLinesAdded = 0
totalLinesDeleted = 0;
mydic = collections.OrderedDict()
for commit in reversed(commits):
dateKey = int(commit.date.strftime("%s")) * 1000
totalLinesAdded = totalLinesAdded + commit.linesAdded
totalLinesDeleted = totalLinesDeleted + commit.linesDeleted
linesDiff = commit.linesAdded - commit.linesDeleted;
totalLines = totalLines + linesDiff
if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + linesDiff
else:
mydic[dateKey] = totalLines + linesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.LINES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.LINES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalLines, totalLinesAdded, totalLinesDeleted
def generateFilesByDate(commits, projectName):
totalFiles = 0
mydic = collections.OrderedDict()
for commit in reversed(commits):
dateKey = int(commit.date.strftime("%s")) * 1000
filesDiff = commit.filesAdded - commit.filesDeleted;
totalFiles = totalFiles + filesDiff
if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + filesDiff
else:
mydic[dateKey] = totalFiles + filesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.FILES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalFiles
def generateFileByExtension(fileExtensionMap, projectName):
exts = fileExtensionMap.keys()
data = fileExtensionMap.values()
totalFiles = sum(data)
threshold = int(totalFiles/200)
for ext in fileExtensionMap.keys():
if fileExtensionMap[ext] <= threshold:
if 'other' not in fileExtensionMap:
fileExtensionMap['other'] = 0
fileExtensionMap['other'] += fileExtensionMap[ext]
del fileExtensionMap[ext]
with open(Constant.FILES_BY_EXTENSION_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_EXTENSION, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', '[' + ','.join(str(e) for e in fileExtensionMap.values()) + ']' ))
elif '$extensions' in line:
fout.write(line.replace('$extensions', json.dumps(fileExtensionMap.keys())))
else:
fout.write(line)
| gzc/gitstats | v1/DataDriver.py | Python | mit | 6,238 |
"""
Django settings for gnucash_explorer project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = None
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gnucash_explorer.urls'
WSGI_APPLICATION = 'gnucash_explorer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
try:
from gnucash_explorer.local_settings import *
except ImportError as e:
print('You should set up your local_settings.py')
| peap/gnucash_explorer | gnucash_explorer/settings.py | Python | mit | 2,089 |
import sublime
from . import SblmCmmnFnctns
class Spinner:
SYMBOLS_ROW = u'←↑→↓'
SYMBOLS_BOX = u'⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏'
def __init__(self, symbols, view, startStr, endStr):
self.symbols = symbols
self.length = len(symbols)
self.position = 0
self.stopFlag = False
self.view = view
self.startStr = startStr
self.endStr = endStr
def __next__(self):
self.position = self.position + 1
return self.startStr + self.symbols[self.position % self.length] + self.endStr
def start(self):
if not self.stopFlag:
self.view.set_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER, self.__next__())
sublime.set_timeout(lambda: self.start(), 300)
def stop(self):
self.view.erase_status(SblmCmmnFnctns.SUBLIME_STATUS_SPINNER)
self.stopFlag = True
| rusiv/BSScript | bsscript/bsscriptSblm/Spinner.py | Python | mit | 784 |
from django.conf.urls import url
from sms import views
app_name = 'sms'
urlpatterns = [
url(r'^$', views.index, name="index"),
]
| RobSpectre/garfield | garfield/sms/urls.py | Python | mit | 135 |
f = open('input.txt')
triangles = [map(int,l.split()) for l in f.readlines()]
possible = 0
for t in triangles:
t.sort()
if t[0] + t[1] > t[2]:
possible += 1
print(possible)
| pwicks86/adventofcode2016 | day03/p1.py | Python | mit | 190 |
import asyncio
import sys
import config
import sender
import receiver
print(sys.argv)
async def receiveMessageFromSerial():
return "Message"
def help():
print('Luiza 1.0 - ([email protected])')
print('Usage: python3 app.py [Options][Message][source][dest]')
print('')
print('SENDING MESSAGE')
print(' You will send a message from source to dest. 3 containing the text "Sending Message from Luiza"')
print(' python3 app.py --send "Sending Message from Luiza" 1 3')
print('RECEIVING MESSAGE')
print(' You will receive a message using the address 3')
print(' python3 app.py --read 3')
quit()
if len(sys.argv) == 1:
help()
if(sys.argv[1] == '--send'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid.')
help()
else:
if(len(sys.argv[2]) < 10):
print('ERR: Message size must be less than 10.')
quit()
sender.message(sys.argv[3], sys.argv[4], sys.argv[2])
if(sys.argv[1] == '--read'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid. Dest to read not informed !!')
help()
loop = asyncio.get_event_loop()
loop.run_until_complete(receiver.start())
loop.close() | gohackfelipe/luiza | luiza.py | Python | mit | 1,285 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Testing module from creating product using
http://www.sendfromchina.com/default/index/webservice
"""
from flask import Flask, render_template, request
from flask_wtf.csrf import CSRFProtect
from werkzeug.datastructures import MultiDict
from forms import SFCCreateOrder, SFCCreateProduct, SFCOrderDetail, SFCASNInfo, SFCgetOrderByCode
from sfc_api import SFCAPI
app = Flask(__name__)
csrf = CSRFProtect(app)
csrf.init_app(app)
app.secret_key = 's3cr3tasdasdasdasd'
header_request = {'customerId': 'R2036',
'appToken': 'MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDK1YNcdunmWXoK1ys6hyi+LWQdPx6Vmr/9kNlKOw4cK5Q8FWA3nfGeeG49Pq2TlYKVLdSw1fr60AAJFQOuXmol6lmyn+/xwx6j21XLx9/4vdDNSTR8Hcp7oqGNNr5DlI0onhJ7sd+rAxhIOwLNnZv6T/XtVqQNuGVXTq/dX0zkaQIDAQAB',
'appKey': 'MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDGmDLbsI4jELqCHgWikWqACICp299WSoiWgqghEXkQfvdEvwS5XWpdsSmdQwryR2rtg0DiS3vf74oVYBDJVHgcUdc2ov7QI5TPBqXJped7OoyrqYzaYFYshzGWgYC0wu5RCb71p2+4Z8NwDoJlvMVU4/fD9pL59PW8yYH1u3x4ewIDAQAB'}
wsdl = 'http://fulfill.sendfromchina.com/default/svc/wsdl'
sfcapi = SFCAPI(p_wsdl=wsdl, p_header_request=header_request)
@app.route("/")
def hello():
return render_template('index.html')
@app.route("/create_product", methods=['GET', 'POST'])
def create_product():
res = None
if request.method == 'POST':
product_info = {k: v for k, v in request.form.items() if k not in ('csrf_token', 'pocId', 'poValue', 'imgUrl')}
product_info['image'] = {'imgUrl': request.form['imgUrl']}
product_info['qcs'] = {'pocId': request.form['pocId'], 'poValue': request.form['poValue']}
res = sfcapi.create_product(product_info)
form = SFCCreateProduct(request.form)
else:
form = SFCCreateProduct()
return render_template('create_product.html', form=form, res=res)
@app.route("/create_order", methods=['GET', 'POST'])
def create_order():
res = None
if request.method == 'POST':
order_info = {k: v for k, v in request.form.items() if k in
[i for i in SFCCreateOrder.__dict__.keys() if i[0] != '_']}
order_detail = {k: v for k, v in request.form.items() if
k in [i for i in SFCOrderDetail.__dict__.keys() if i[0] != '_']}
res = sfcapi.create_order(p_order_info=order_info, p_order_detail=order_detail)
form_order = SFCCreateOrder(MultiDict(order_info))
form_order_detail = SFCOrderDetail(MultiDict(order_detail))
else:
form_order = SFCCreateOrder()
form_order_detail = SFCOrderDetail()
return render_template('create_order.html', form_master=form_order, form_detail=form_order_detail, res=res)
@app.route("/create_asn", methods=['GET', 'POST'])
def create_asn():
res = None
if request.method == 'POST':
asn_info = {k: v for k, v in request.form.items() if k in
[i for i in SFCASNInfo.__dict__.keys() if i[0] != '_']}
order_detail = {k: v for k, v in request.form.items() if
k in [i for i in SFCOrderDetail.__dict__.keys() if i[0] != '_']}
form_asn = SFCASNInfo(MultiDict(asn_info))
form_order_detail = SFCOrderDetail(MultiDict(order_detail))
res = sfcapi.create_asn(p_asn_info=asn_info, p_order_detail=order_detail)
else:
form_asn = SFCASNInfo()
form_order_detail = SFCOrderDetail()
return render_template('create_asn.html', form_master=form_asn, form_detail=form_order_detail, res=res)
@app.route("/get_order_by_code", methods=['GET', 'POST'])
def get_order():
res = None
if request.method == 'POST':
res = sfcapi.get_order_by_code(order_code=request.form['ordersCode'], detail_level=request.form['detailLevel'])
form_get_order = SFCgetOrderByCode(
MultiDict({'ordersCode': request.form['ordersCode'], 'detailLevel': request.form['detailLevel']}))
else:
form_get_order = SFCgetOrderByCode()
return render_template('get_order_by_code.html', form=form_get_order, res=res)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| dremdem/sfc_sdk | sfc_main.py | Python | mit | 4,144 |
'''
# Runtime:
python 2.7.3 (win)
# Result:
0
1
<...>
998
boom
'''
def f(n):
print n
f(n+1)
try:
f(0)
except:
print 'boom' | SnowOnion/CodeForcesLee | dp/testStackDepth.py | Python | mit | 146 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# name: test_parsable.py
# author: Harold Bradley III
# email: [email protected]
# created on: 01/16/2016
#
# pylint: disable=invalid-name,no-member
"""
A unit test for ext_pylib file module's Parsable mixin class.
"""
import pytest
from . import utils
from ext_pylib.files import File, Parsable
class ParsableFile(Parsable, File):
"""Dummy class extending Parsable and File."""
FILE = """This is a sample file.
This is a sample file.
This is a sample file.
DocumentRoot /var/www/google.com
This is a sample file.
DEBUG = True
SECURE = False
DocumentRoot /var/www/example.com
LIST = first_item
LIST = second_item
"""
EMPTY_FILE = ''
def test_parsable_parse_with_existing_attribute():
"""Test Parsable setup_parsing() method on an existing attribute."""
parsable = ParsableFile()
parsable.existing = 'already exists' # pylint: disable=attribute-defined-outside-init
with pytest.raises(AttributeError):
parsable.setup_parsing({'existing' : '*'})
def test_parsable_setup_parsing():
"""Test Parsable setup_parsing() method."""
the_file = Parsable()
Parsable.read = utils.mock_read_data
the_file.data = FILE
the_file.setup_parsing({
'htdocs' : ('DocumentRoot (.*)',),
'debug' : 'DEBUG = (.*)',
'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'),
'speed' : ('SPEED[ ]*=[ ]*([^ \n]*)', 'SPEED = {0}'),
'list' : ('LIST[ ]*=[ ]*([^ \n]*)', 'LIST = {0}'),
})
assert the_file.htdocs[0] == '/var/www/google.com'
assert the_file.htdocs[1] == '/var/www/example.com'
assert the_file.debug == 'True'
assert the_file.secure == 'False'
the_file.secure = 'True'
assert the_file.secure == 'True'
assert the_file.speed is None
the_file.speed = 'fastest'
assert the_file.speed == 'fastest'
the_file.speed = 'fastest' # setting it more than once with the same value
# shouldn't affect the number of times it is added.
assert isinstance(the_file.speed, str) \
or isinstance(the_file.speed, unicode) # Shouldn't be a list, checking unicode
# for Python 2 support.
assert len(the_file.list) == 2 # Should be a list
def test_parsable_setup_parsing_on_empty_file():
"""Test Parsable setup_paring() using an empty file."""
the_file = Parsable()
Parsable.read = utils.mock_read_data
the_file.data = EMPTY_FILE
the_file.setup_parsing({
'htdocs' : ('DocumentRoot (.*)', 'DocumentRoot {0}'),
'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'),
})
assert the_file.htdocs is None
the_file.htdocs = '/var/www/google.com'
assert the_file.htdocs == '/var/www/google.com'
assert the_file.secure is None
the_file.secure = 'True'
assert the_file.secure == 'True'
| hbradleyiii/ext_pylib | tests/files/test_parsable.py | Python | mit | 2,936 |
#!/usr/bin/python
# coding=utf-8
import sys
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
import xbmcplugin
xbmcplugin.setContent(addon_handle, 'episodes')
import urlparse
args = urlparse.parse_qs(sys.argv[2][1:])
mode = args.get('mode', None)
from urllib import FancyURLopener, urlencode
class URLOpener(FancyURLopener):
version = 'Mozilla/5.0 (X11; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0 Iceweasel/31.0'
urlopen = URLOpener().open
urlmake = lambda query: base_url + '?' + urlencode(query)
rooturl = 'http://nick.walla.co.il'
def getpage(url):
if url.startswith('/'): url = rooturl + url
elif not url.startswith('http://'): url = rooturl + '/' + url
resets = 0
for tries in range(5):
try:
page = urlopen(url).read()
break
except IOError:
page = u''
if isinstance(page, str): page = page.decode('windows-1255', 'replace')
page = page.encode('utf-8')
return page
import re
vidregexp = re.compile(
'class="vitem.*?"',
re.DOTALL
)
nextregexp = re.compile(
'<a class="p_r" style="" href="(.+?)"'
)
def vidsfromseason(url):
page = getpage(url)
vids = vidregexp.findall(page)
for nexturl in nextregexp.findall(page):
vids += vidregexp.findall(getpage(nexturl))
return vids
def vidsfromshow(showurl):
return [vidsfromseason(url) for url in re.findall(
'href="([^"]*)"[^>]*>[^<]*פרקים מלאים',
getpage(showurl)
)]
import xbmcgui
if mode is None:
for show in re.findall(
'<a href="([^"]+)" class="item right w3" style=".*?">([^<]+)</a>',
getpage('/')
):
xbmcplugin.addDirectoryItem(
handle=addon_handle,
url=urlmake({'mode': 'show', 'showurl': show[0]}),
listitem=xbmcgui.ListItem(show[1]),
isFolder=True
)
xbmcplugin.endOfDirectory(addon_handle)
elif mode[0] == 'show':
print(vidsfromshow(args['showurl'][0]))
xbmcplugin.addDirectoryItem(
handle=addon_handle,
url='/',
listitem=xbmcgui.ListItem('Video')
)
xbmcplugin.endOfDirectory(addon_handle)
| israellevin/plugin.video.walla | addon.py | Python | mit | 2,152 |
import operator
import ply.lex as lex
from jpp.parser.operation import Operation
from jpp.parser.expression import SimpleExpression
reserved = {
'extends': 'EXTENDS',
'import': 'IMPORT',
'local': 'LOCAL',
'imported': 'IMPORTED',
'user_input': 'USER_INPUT',
}
NAME_TOK = 'NAME'
tokens = [
'INTEGER',
'STRING_LITERAL',
'COLON',
NAME_TOK,
'COMMA',
'LCURL',
'RCURL',
'LBRAC',
'RBRAC',
'LPAREN',
'RPAREN',
'DOT',
'SEMICOLON',
'BOOLEAN',
'MINUS',
'COMPARISON_OP',
'PLUS',
'MUL_OP',
'BIT_SHIFT_OPS',
'BITWISE_OPS',
'INVERT',
'POW',
'FUNC',
]
tokens.extend(reserved.values())
t_DOT = r'\.'
t_LCURL = r'\{'
t_RCURL = r'\}'
t_COLON = r'\:'
t_LBRAC = r'\['
t_RBRAC = r'\]'
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_COMMA = ','
t_SEMICOLON = ';'
def _create_operation_token(t):
t.value = Operation(t.value)
return t
def t_BIT_SHIFT_OPS(t):
"""
<<|>>
"""
return _create_operation_token(t)
def t_COMPARISON_OP(t):
"""
<|<=|==|!=|>=
"""
return _create_operation_token(t)
def t_BITWISE_OPS(t):
r"""
&|\^|\|
"""
return _create_operation_token(t)
def t_PLUS(t):
r"""
\+
"""
return _create_operation_token(t)
def t_MINUS(t):
r"""
-
"""
t.value = Operation(t.value, operator.sub)
return t
def t_POW(t):
r"""
\*\*
"""
return _create_operation_token(t)
def t_MUL_OP(t):
r"""
\*|//|/|%
"""
return _create_operation_token(t)
def t_INVERT(t):
"""
~
"""
return _create_operation_token(t)
def t_FUNC(t):
"""
bool|abs
"""
return _create_operation_token(t)
def t_INTEGER(t):
r"""
\d+
"""
t.value = SimpleExpression(int(t.value))
return t
def t_STRING_LITERAL(t):
"""
"[^"\n]*"
"""
t.value = SimpleExpression(str(t.value).strip('"'))
return t
def t_BOOLEAN(t):
"""
true|false
"""
t.value = SimpleExpression(t.value == 'true')
return t
def t_NAME(t):
"""
[a-zA-Z_][a-zA-Z_0-9]*
"""
t.type = reserved.get(t.value, NAME_TOK) # Check for reserved words
return t
def t_COMMENT(t):
r"""
\#.*
"""
# No return value. Token discarded
pass
def t_newline(t):
r"""
\n+
"""
t.lexer.lineno += len(t.value)
def t_error(_):
return
t_ignore = ' \t'
def create_lexer():
return lex.lex(debug=False)
| asherbar/json-plus-plus | jpp/parser/lex.py | Python | mit | 2,482 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib import auth
admin.autodiscover()
urlpatterns = patterns('stepup.views',
# Examples:
# url(r'^$', 'volunteer.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
#auth
url(r'^admin/', include(admin.site.urls)),
# homepage
url(r'^$', 'index', name = 'homepage'),
# about
url(r'^about/$', 'about', name = 'view_about'),
# person
url(r'^person/(?P<slug>[^\.]+)', 'person', name = 'view_person'),
# all persons
url(r'^person/', 'all_person', name = 'view_all_person'),
# opportunity
url(r'^opportunity/(?P<slug>[^\.]+)', 'opportunity', name = 'view_opportunity'),
# all opportunities
url(r'^opportunity/', 'all_opportunity', name = 'view_all_opportunity'),
# organization
url(r'^organization/(?P<slug>[^\.]+)', 'organization', name = 'view_organization'),
#url(r'^tag/(?P<slug>[^\.]+)', 'tag', name = 'view_tag'),
# all organizations
url(r'^organization/', 'all_organizations', name = 'view_all_organization'),
# comments
url(r'^comments/', include('django.contrib.comments.urls')),
# search
url(r'^search/', 'search', name = 'view_search'),
)
#urlpatterns += patterns('',
# url(r'^login/', 'django.contrib.auth.views.login', {'template_name': 'login_request'}, name='user-login')
#)
urlpatterns += patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'login.html'},
name='mysite_login'),
url(r'^logout/$', 'logout', {'next_page': '/'}, name='mysite_logout'),
)
| jrouly/stepup | stepup/settings/urls.py | Python | mit | 1,653 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This program collects Portugal weather forecasts from IPMA and uploads them to the Orion Context Broker.
It uploads the list of stations on the fly from
- http://api.ipma.pt/json/locations.json.
Legal notes:
- http://www.ipma.pt/en/siteinfo/index.html?page=index.xml
Examples:
- get the weather forecast from IPMA:
curl -X GET --header 'Accept: application/json' \
'http://api.ipma.pt/json/alldata/1110600.json'
AsyncIO name convention:
async def name - entry point for asynchronous data processing/http requests and post processing
async def name_bounded - intermediate step to limit amount of parallel workers
async def name_one - worker process
"""
from aiohttp import ClientSession, ClientConnectorError
from argparse import ArgumentTypeError, ArgumentParser
from asyncio import Semaphore, ensure_future, gather, run, TimeoutError as ToE, set_event_loop_policy
from copy import deepcopy
from datetime import datetime, timedelta
from pytz import timezone
from re import sub
from requests import get, exceptions
from sys import stdout
from time import sleep
from uvloop import EventLoopPolicy
from yajl import dumps, loads
from yaml import safe_load as load
import logging
default_latest = False # preserve only latest values
default_limit_entities = 50 # amount of entities per 1 request to Orion
default_limit_source = 10 # amount of parallel request to IPMA
default_limit_target = 50 # amount of parallel request to Orion
default_log_level = 'INFO'
default_orion = 'http://orion:1026' # Orion Contest Broker endpoint
default_timeout = -1 # if value != -1, then work as a service
http_ok = [200, 201, 204]
log_levels = ['ERROR', 'INFO', 'DEBUG']
logger = None
logger_req = None
stations = dict() # preprocessed list of stations
tz = timezone('UTC')
tz_wet = 'Europe/Lisbon'
tz_azot = 'Atlantic/Azores'
tz_azot_codes = ['3490100', '3480200', '3470100', '3460200', '3450200', '3440100', '3420300', '3410100']
url_observation = 'http://api.ipma.pt/json/alldata/{}.json'
url_stations = 'http://api.ipma.pt/json/locations.json'
template = {
'id': 'urn:ngsi-ld:WeatherForecast:Portugal-WeatherForecast-',
'type': 'WeatherForecast',
'address': {
'type': 'PostalAddress',
'value': {
'addressCountry': 'PT',
'addressLocality': None,
'postalCode': None
}
},
'dateIssued': {
'type': 'DateTime',
'value': None
},
'dataProvider': {
'type': 'Text',
'value': 'FIWARE'
},
'dateRetrieved': {
'type': 'DateTime',
'value': None
},
'dayMaximum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'dayMinimum': {
'type': 'StructuredValue',
'value': {
'temperature': None
}
},
'feelsLikeTemperature': {
'type': 'Number',
'value': None
},
'precipitationProbability': {
'type': 'Number',
'value': None
},
'relativeHumidity': {
'type': 'Number',
'value': None
},
'source': {
'type': 'URL',
'value': 'http://www.ipma.pt'
},
'temperature': {
'type': 'Number',
'value': None
},
'validFrom': {
'type': 'DateTime',
'value': None
},
'validTo': {
'type': 'DateTime',
'value': None
},
'validity': {
'type': 'Text',
'value': None
},
'weatherType': {
'type': 'Text',
'value': None
},
'windDirection': {
'type': 'Number',
'value': None
},
'windSpeed': {
'type': 'Number',
'value': None
}
}
def check_entity(forecast, item):
if item in forecast:
if forecast[item] != '-99.0' and forecast[item] != -99:
return forecast[item]
return None
def decode_weather_type(item):
out = {
0: None,
1: 'clearSky',
2: 'partlyCloudy',
3: 'sunnyIntervals',
4: 'cloudy',
5: 'highClouds',
6: 'showers',
7: 'lightShowers',
8: 'heavyShowers',
9: 'rain',
10: 'lightRain',
11: 'heavyRain',
12: 'intermittentRain',
13: 'intermittentLightRain',
14: 'intermittentHeavyRain',
15: 'drizzle',
16: 'mist',
17: 'fog',
18: 'snow',
19: 'thunderstorms',
20: 'showersAndThunderstorms',
21: 'hail',
22: 'frost',
23: 'rainAndThunderstorms',
24: 'convectiveClouds',
25: 'partyCloudy',
26: 'fog',
27: 'cloudy'
}.get(item, None)
if out is None and item != 0:
logger.error('Unknown value of WeatherType detected, %s', item)
return out if out else None
def decode_wind_direction(item):
"""
North: 180
North-West: 135
West: 90
South-West: 45
South: 0
South-East: -45
East: -90
North-East: -135
"""
out = {
'9': 180,
'8': 135,
'7': 90,
'6': 45,
'5': 0,
'4': -45,
'3': -90,
'2': -135,
'N': 180,
'NW': 135,
'W': 90,
'SW': 45,
'S': 0,
'SE': -45,
'E': -90,
'NE': -135
}.get(item, None)
if out is None:
logger.error('Unknown value of WindDirection detected, %s', item)
return out if out else None
async def collect():
logger.debug('Connecting data from IPMA started')
tasks = list()
sem = Semaphore(limit_source)
async with ClientSession() as session:
for station in stations:
task = ensure_future(collect_bounded(station, sem, session))
tasks.append(task)
result = await gather(*tasks)
while False in result:
result.remove(False)
logger.debug('Collecting data from IPMA ended')
return result
async def collect_bounded(station, sem, session):
async with sem:
return await collect_one(station, session)
async def collect_one(station, session):
try:
async with session.get(stations[station]['url']) as response:
result = await response.text()
status = response.status
except ClientConnectorError:
logger.error('Collecting data from IPMA station %s failed due to the connection problem', station)
return False
except ToE:
logger.error('Collecting link from IPMA station %s failed due to the timeout problem', station)
return False
if status not in http_ok:
logger.error('Collecting data from IPMA station %s failed due to the return code %s', station, status)
return False
content = loads(result)
result = dict()
result['id'] = station
result['retrieved'] = datetime.now().replace(microsecond=0)
result['forecasts'] = dict()
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
for forecast in content:
if forecast['idPeriodo'] != 24:
continue
date = forecast['dataPrev']
if date not in [today, tomorrow]:
continue
result['forecasts'][date] = dict()
result['forecasts'][date]['feelsLikeTemperature'] = check_entity(forecast, 'utci')
result['forecasts'][date]['issued'] = datetime.strptime(forecast['dataUpdate'], '%Y-%m-%dT%H:%M:%S')
result['forecasts'][date]['period'] = forecast['idPeriodo']
result['forecasts'][date]['precipitationProbability'] = check_entity(forecast, 'probabilidadePrecipita')
result['forecasts'][date]['relativeHumidity'] = check_entity(forecast, 'hR')
result['forecasts'][date]['temperature'] = check_entity(forecast, 'tMed')
result['forecasts'][date]['tMax'] = check_entity(forecast, 'tMax')
result['forecasts'][date]['tMin'] = check_entity(forecast, 'tMin')
result['forecasts'][date]['weatherType'] = check_entity(forecast, 'idTipoTempo')
result['forecasts'][date]['windDirection'] = check_entity(forecast, 'ddVento')
result['forecasts'][date]['windSpeed'] = check_entity(forecast, 'ffVento')
return result
def log_level_to_int(log_level_string):
if log_level_string not in log_levels:
message = 'invalid choice: {0} (choose from {1})'.format(log_level_string, log_levels)
raise ArgumentTypeError(message)
return getattr(logging, log_level_string, logging.ERROR)
async def post(body):
logger.debug('Posting data to Orion started')
tasks = list()
headers = {
'Content-Type': 'application/json'
}
if service:
headers['FIWARE-SERVICE'] = service
if path:
headers['FIWARE-SERVICEPATH'] = path
sem = Semaphore(limit_target)
# splitting list to list of lists to fit into limits
block = 0
items = 0
body_divided = dict()
body_divided[0] = list()
while True:
if len(body) > 0:
if items < limit_entities:
body_divided[block].append(body.pop())
items += 1
else:
items = 0
block += 1
body_divided[block] = list()
else:
break
async with ClientSession() as session:
for item in body_divided:
task = ensure_future(post_bounded(body_divided[item], headers, sem, session))
tasks.append(task)
response = await gather(*tasks)
response = list(set(response))
if True in response:
response.remove(True)
for item in response:
logger.error('Posting data to Orion failed due to the %s', item)
logger.debug('Posting data to Orion ended')
async def post_bounded(item, headers, sem, session):
async with sem:
return await post_one(item, headers, session)
async def post_one(item, headers, session):
payload = {
'actionType': 'APPEND',
'entities': item
}
payload = dumps(payload)
url = orion + '/v2/op/update'
try:
async with session.post(url, headers=headers, data=payload) as response:
status = response.status
except ClientConnectorError:
return 'connection problem'
except ToE:
return 'timeout problem'
if status not in http_ok:
return 'response code ' + str(status)
return True
async def prepare_schema(source):
logger.debug('Schema preparation started')
tasks = list()
for item in source:
task = ensure_future(prepare_schema_one(item))
tasks.append(task)
result = await gather(*tasks)
logger.debug('Schema preparation ended')
return [j for i in result for j in i]
async def prepare_schema_one(source):
result = list()
id_local = source['id']
today = datetime.now(tz).strftime("%Y-%m-%d") + 'T00:00:00'
tomorrow = (datetime.now(tz) + timedelta(days=1)).strftime("%Y-%m-%d") + 'T00:00:00'
retrieved = source['retrieved'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
for date in source['forecasts']:
item = deepcopy(template)
forecast = source['forecasts'][date]
issued = forecast['issued'].replace(tzinfo=tz).isoformat().replace('+00:00', 'Z')
forecast_date = datetime.strptime(date, '%Y-%m-%dT00:00:00')
valid_from = forecast_date.replace(tzinfo=tz)
valid_to = valid_from + timedelta(hours=24)
valid_from_iso = valid_from.isoformat().replace('+00:00', 'Z')
valid_from_short = valid_from.strftime('%H:%M:%S')
valid_to_iso = valid_to.isoformat().replace('+00:00', 'Z')
valid_to_short = valid_to.strftime('%H:%M:%S')
if latest:
if date == today:
item['id'] = item['id'] + id_local + '_today_' + valid_from_short + '_' + valid_to_short
if date == tomorrow:
item['id'] = item['id'] + id_local + '_tomorrow_' + valid_from_short + '_' + valid_to_short
else:
item['id'] = item['id'] + id_local + '_' + valid_from_iso + '_' + valid_to_iso
item['address']['value']['addressLocality'] = stations[id_local]['addressLocality']
item['address']['value']['postalCode'] = stations[id_local]['postalCode']
item['dateIssued']['value'] = issued
item['dateRetrieved']['value'] = retrieved
if 'tMax' in forecast:
item['dayMaximum']['value']['temperature'] = float(forecast['tMax'])
else:
del item['dayMaximum']
if 'tMin' in forecast:
item['dayMinimum']['value']['temperature'] = float(forecast['tMin'])
else:
del item['dayMinimum']
if forecast['feelsLikeTemperature'] is not None:
item['feelsLikeTemperature']['value'] = float(forecast['feelsLikeTemperature'])
else:
del item['feelsLikeTemperature']
if forecast['precipitationProbability'] is not None:
item['precipitationProbability']['value'] = float(forecast['precipitationProbability'] / 100)
else:
del item['precipitationProbability']
if forecast['relativeHumidity'] is not None:
item['relativeHumidity']['value'] = float(forecast['relativeHumidity'])
else:
del item['relativeHumidity']
if forecast['temperature'] is not None:
item['temperature']['value'] = float(forecast['temperature'])
else:
del item['temperature']
item['validFrom']['value'] = valid_from_iso
item['validTo']['value'] = valid_to_iso
item['validity']['value'] = valid_from_iso + '/' + valid_to_iso
if forecast['weatherType'] is not None:
item['weatherType']['value'] = decode_weather_type(forecast['weatherType'])
if item['weatherType']['value'] is None:
del item['weatherType']
if forecast['windDirection'] is not None:
item['windDirection']['value'] = decode_wind_direction(forecast['windDirection'])
if item['windDirection']['value'] is None:
del item['windDirection']
if forecast['windSpeed'] is not None:
item['windSpeed']['value'] = round(float(forecast['windSpeed']) * 0.28, 2)
else:
del item['windSpeed']
result.append(item)
return result
def reply_status():
logger.info('Orion: %s', orion)
logger.info('FIWARE Service: %s', service)
logger.info('FIWARE Service-Path: %s', path)
logger.info('Timeout: %s', str(timeout))
logger.info('Stations: %s', str(len(stations)))
logger.info('Latest: %s', str(latest))
logger.info('Limit_source: %s', str(limit_source))
logger.info('limit_target: %s', str(limit_target))
logger.info('Log level: %s', args.log_level)
logger.info('Started')
def sanitize(str_in):
return sub(r"[<(>)\"\'=;-]", "", str_in)
def setup_logger():
local_logger = logging.getLogger('root')
local_logger.setLevel(log_level_to_int(args.log_level))
handler = logging.StreamHandler(stdout)
handler.setLevel(log_level_to_int(args.log_level))
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%dT%H:%M:%SZ')
handler.setFormatter(formatter)
local_logger.addHandler(handler)
local_logger_req = logging.getLogger('requests')
local_logger_req.setLevel(logging.WARNING)
return local_logger, local_logger_req
def setup_stations(stations_limit):
result = dict()
limit_on = False
limit_off = False
resp = None
if 'include' in stations_limit:
limit_on = True
if 'exclude' in stations_limit:
limit_off = True
try:
resp = get(url_stations)
except exceptions.ConnectionError:
exit(1)
if resp.status_code not in http_ok:
logger.error('Collecting the list of stations from IPMA failed due to the return code %s', resp.status_code)
exit(1)
content = loads(resp.text)
for station in content:
station_code = str(station['globalIdLocal'])
if limit_on:
if station_code not in stations_limit['include']:
continue
if limit_off:
if station_code in stations_limit['exclude']:
continue
result[station_code] = dict()
result[station_code]['postalCode'] = station_code
result[station_code]['addressLocality'] = sanitize(station['local'])
result[station_code]['url'] = url_observation.format(station_code)
if station_code in tz_azot_codes:
result[station_code]['timezone'] = tz_azot
else:
result[station_code]['timezone'] = tz_wet
if limit_on:
if len(result) != len(stations_limit['include']):
logger.error('Errors in the list of stations detected')
exit(1)
return result
def setup_stations_config(f):
local_stations = dict()
if f:
try:
with open(f, 'r', encoding='utf8') as f:
content = f.read()
config = sub(r'-.*\n?', setup_config_re, content)
f.close()
source = load(config)
if 'exclude' in source and 'include' in source:
logging.error('Config file is empty or wrong')
exit(1)
if 'exclude' in source:
local_stations['exclude'] = list()
for item in source['exclude']:
local_stations['exclude'].append(item)
if 'include' in source:
local_stations['include'] = list()
for item in source['include']:
local_stations['include'].append(item)
except TypeError:
logging.error('Config file is empty or wrong')
exit(1)
except FileNotFoundError:
logging.error('Config file not found')
exit(1)
return local_stations
def setup_config_re(station):
fix = sub('-', '', station.group()).strip()
return "- '{}'\n".format(fix)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--config',
dest='config',
help='YAML file with list of stations to be collected or excluded from collecting')
parser.add_argument('--latest',
action='store_true',
default=default_latest,
dest='latest',
help='Collect only latest forecast')
parser.add_argument('--limit-entities',
default=default_limit_entities,
dest='limit_entities',
help='Limit amount of entities per 1 request to orion')
parser.add_argument('--limit-source',
default=default_limit_source,
dest='limit_source',
help='Limit amount of parallel requests to IPMA')
parser.add_argument('--limit-target',
default=default_limit_target,
dest='limit_target',
help='Limit amount of parallel requests to Orion')
parser.add_argument('--log-level',
default=default_log_level,
dest='log_level',
help='Set the logging output level. {0}'.format(log_levels),
nargs='?')
parser.add_argument('--orion',
action='store',
default=default_orion,
dest='orion',
help='Orion Context Broker endpoint')
parser.add_argument('--path',
action='store',
dest='path',
help='FIWARE Service Path')
parser.add_argument('--service',
action='store',
dest="service",
help='FIWARE Service')
parser.add_argument('--timeout',
action='store',
default=default_timeout,
dest='timeout',
help='Run as a service')
args = parser.parse_args()
latest = args.latest
limit_entities = int(args.limit_entities)
limit_source = int(args.limit_source)
limit_target = int(args.limit_target)
orion = args.orion
timeout = int(args.timeout)
if 'path' in args:
path = args.path
if 'service' in args:
service = args.service
logger, logger_req = setup_logger()
set_event_loop_policy(EventLoopPolicy())
res = setup_stations_config(args.config)
stations = setup_stations(res)
reply_status()
while True:
res = run(collect())
if res:
res = run(prepare_schema(res))
run(post(res))
if timeout == -1:
break
else:
logger.debug('Sleeping for the %s seconds', timeout)
sleep(timeout)
logger.info('Ended')
exit(0)
| Fiware/dataModels | specs/Weather/WeatherForecast/harvesters/portugal/portugal_weather_forecast.py | Python | mit | 21,328 |
Subsets and Splits