repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
tiborsimko/invenio-demosite | invenio_demosite/testsuite/regression/test_oai_harvest_admin.py | 7 | 2804 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio Demosite.
# Copyright (C) 2009, 2010, 2011, 2012 CERN.
#
# Invenio Demosite is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio Demosite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAIHarvest Admin Regression Test Suite."""
__revision__ = "$Id$"
from invenio.testsuite import InvenioTestCase
from invenio.base.globals import cfg
from invenio.testsuite import make_test_suite, run_test_suite, \
test_web_page_content, merge_error_messages
class OAIHarvestAdminWebPagesAvailabilityTest(InvenioTestCase):
"""Check OAIHarvest Admin web pages whether they are up or not."""
def test_oaiharvestadmin_interface_pages_availability(self):
"""oaiharvestadmin - availability of OAI Harvest Admin interface pages"""
baseurl = cfg['CFG_SITE_URL'] + '/admin/oaiharvest/oaiharvestadmin.py/'
_exports = ['', 'editsource', 'addsource', 'delsource']
error_messages = []
for url in [baseurl + page for page in _exports]:
# first try as guest:
error_messages.extend(test_web_page_content(url,
username='guest',
expected_text=
'Authorization failure'))
# then try as admin:
error_messages.extend(test_web_page_content(url,
username='admin'))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
def test_oai_admin_guide_availability(self):
"""oaiharvestadmin - availability of OAIHarvest Admin Guide"""
url = cfg['CFG_SITE_URL'] + '/help/admin/oaiharvest-admin-guide'
error_messages = test_web_page_content(url,
expected_text="OAIHarvest Admin Guide")
if error_messages:
self.fail(merge_error_messages(error_messages))
return
TEST_SUITE = make_test_suite(OAIHarvestAdminWebPagesAvailabilityTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
| gpl-2.0 |
ZhaoCJ/django | django/contrib/gis/geoip/base.py | 5 | 11128 | import os
import re
from ctypes import c_char_p
from django.core.validators import ipv4_re
from django.contrib.gis.geoip.libgeoip import GEOIP_SETTINGS
from django.contrib.gis.geoip.prototypes import (
GeoIPRecord, GeoIPTag, GeoIP_open, GeoIP_delete, GeoIP_database_info,
GeoIP_lib_version, GeoIP_record_by_addr, GeoIP_record_by_name,
GeoIP_country_code_by_addr, GeoIP_country_code_by_name,
GeoIP_country_name_by_addr, GeoIP_country_name_by_name)
from django.utils import six
from django.utils.encoding import force_bytes
# Regular expressions for recognizing the GeoIP free database editions.
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP classes ####
class GeoIPException(Exception):
pass
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been
# updated, reload filehandle and/or memory cache. This option
# is not thread safe.
#
# GEOIP_INDEX_CACHE - just cache the most frequently accessed index
# portion of the database, resulting in faster lookups than
# GEOIP_STANDARD, but less memory usage than GEOIP_MEMORY_CACHE -
# useful for larger databases such as GeoIP Organization and
# GeoIP City. Note, for GeoIP Country, Region and Netspeed
# databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
# GEOIP_MMAP_CACHE - load database into mmap shared memory ( not available
# on Windows).
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
GEOIP_MMAP_CACHE = 8
cache_options = dict((opt, None) for opt in (0, 1, 2, 4, 8))
# Paths to the city & country binary databases.
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4, 8) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
GEOIP_INDEX_CACHE, and GEOIP_MMAP_CACHE, `GeoIPOptions` C API
settings, respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = cache
else:
raise GeoIPException('Invalid GeoIP caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, six.string_types):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = GeoIP_open(force_bytes(country_db), cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = GeoIP_open(force_bytes(city_db), cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = GeoIP_open(force_bytes(path), cache)
info = GeoIP_database_info(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if GeoIP_delete is None:
return
if self._country: GeoIP_delete(self._country)
if self._city: GeoIP_delete(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, six.string_types):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
# Return the query string back to the caller. GeoIP only takes bytestrings.
return force_bytes(query)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
enc_query = self._check_query(query, city=True)
if ipv4_re.match(query):
# If an IP address was passed in
return GeoIP_record_by_addr(self._city, c_char_p(enc_query))
else:
# If a FQDN was passed in.
return GeoIP_record_by_name(self._city, c_char_p(enc_query))
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_code_by_addr(self._country, enc_query)
else:
return GeoIP_country_code_by_name(self._country, enc_query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_name_by_addr(self._country, enc_query)
else:
return GeoIP_country_name_by_name(self._country, enc_query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
@property
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = GeoIP_database_info(self._country)
return ci
@property
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = GeoIP_database_info(self._city)
return ci
@property
def info(self):
"Returns information about the GeoIP library and databases in use."
info = ''
if GeoIP_lib_version:
info += 'GeoIP Library:\n\t%s\n' % GeoIP_lib_version()
return info + 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| bsd-3-clause |
bcesarg6/ccuem_bot | game.py | 2 | 3993 | #-*- coding: utf-8 -*-
#Lógica do game
import comandos as c
import bds
from emojis import *
def cancelarJogo(chat_id, u_id):
l = c.getLanguage(chat_id)
if bds.checkAdm(chat_id, u_id):
bds.delGame(chat_id)
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.cancelar_jogo_msg, replyMarkup = keyboard)]
return [c.toDict(chat_id, l.cantdo_msg)]
def chutarLetra(chat_id, u_id, u_name, message_id, letra):
l = c.getLanguage(chat_id)
r = bds.checkLetra(chat_id, u_id, letra)
#print r
rpl = []
if r == True: #Se acertou a letra
rpl.append(c.toDict(chat_id, l.acertou_letra_msg, replyTo = message_id, replyMarkup = c.makeKbh(True, selective = True)))
rpl.append(nextRound(chat_id))
elif r == 2: #Se a letra já foi chutada
rpl.append(c.toDict(chat_id, l.jachutada_msg))
elif type(r) == type("str"):
rpl = arriscarPalavra2(chat_id, u_id, u_name, message_id, r)
else: #Se errou a letra
rpl.append(c.toDict(chat_id, l.errou_letra_msg, replyTo = message_id, replyMarkup = c.makeKbh(True, selective = True)))
vida = bds.menosVida(chat_id)
if vida == True: #Se acabou as vidas
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
rpl.append(c.toDict(chat_id, l.gameover_msg, replyMarkup = keyboard))
elif vida == 2: #Se resta somente uma vida
rpl.append(c.toDict(chat_id, l.umavida_msg))
rpl.append(nextRound(chat_id))
else:
rpl.append(nextRound(chat_id))
return rpl
def vidasEmoji(chat_id):
vidas = bds.getVidas(chat_id)
return emoji_heart*vidas
def nextRound(chat_id):
l = c.getLanguage(chat_id)
bds.roundPlus(chat_id)
players = bds.getPlayers(chat_id)
aRound = bds.getRound(chat_id)
vidas = vidasEmoji(chat_id)
categoria = bds.getCategoria(chat_id)
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True, selective = True)
return c.toDict(chat_id, (categoria+'\n\n'+bds.getMascara(chat_id)+'\n\n'+l.vidas_msg+vidas+'\n'+l.nextPlayer(players[1][aRound])), replyTo = players[2][aRound], replyMarkup = keyboard)
def arriscarPalavra1(chat_id, u_id, message_id):
l = c.getLanguage(chat_id)
bds.setArriscarBlock(chat_id, True)
return [c.toDict(chat_id, l.arriscar_msg, replyTo = message_id, replyMarkup = c.makeFr(True, selective = True))]
def arriscarPalavra2(chat_id, u_id, u_name, message_id, text):
l = c.getLanguage(chat_id)
bds.setArriscarBlock(chat_id, False)
rpl = []
palavra = bds.getPalavra(chat_id)
if bds.checkPalavra(chat_id, u_id, text): #Jogador acertou o chute e venceu. O jogo acaba.
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
rpl.append(c.toDict(chat_id, l.venceu(u_name) + '\n' + l.googleMsg(palavra), replyMarkup = keyboard))
#rpl.append(c.toDict(chat_id, l.googleMsg(palavra)))
return rpl
else: #Jogador errou o chute
rm = bds.rmPlayer(chat_id, u_id, message_id)
if rm == True: #Jogador não era Adm, nada acontece.
rpl.append(c.toDict(chat_id, l.perdeu(u_name), replyTo = message_id, replyMarkup = c.makeKbh(True, selective = True)))
rpl.append(nextRound(chat_id))
return rpl
elif rm == 'setAdm': #Jogador era Adm e o Adm será passado para outro
adm = bds.getAdm(chat_id)
rpl.append(c.toDict(chat_id, l.perdeu(u_name) + '\n' + l.novoAdmMsg(adm[1]), replyTo = message_id, replyMarkup = c.makeKbh(True, selective = True)))
#rpl.append(c.toDict(chat_id, l.novoAdmMsg(adm[1])))
rpl.append(nextRound(chat_id))
return rpl
else: #O jogo acaba
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.perdeu(u_name)+'\n'+ l.gameover_msg, replyMarkup = keyboard)]
| apache-2.0 |
dulems/hue | desktop/core/ext-py/Django-1.6.10/django/conf/locale/de_CH/formats.py | 118 | 1448 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
from __future__ import unicode_literals
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
)
# these are the separators for non-monetary numbers. For monetary numbers,
# the DECIMAL_SEPARATOR is a . (decimal point) and the THOUSAND_SEPARATOR is a
# ' (single quote).
# For details, please refer to http://www.bk.admin.ch/dokumentation/sprachen/04915/05016/index.html?lang=de
# (in German) and the documentation
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| apache-2.0 |
addition-it-solutions/project-all | addons/hr_gamification/models/gamification.py | 388 | 4836 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class hr_gamification_badge_user(osv.Model):
"""User having received a badge"""
_name = 'gamification.badge.user'
_inherit = ['gamification.badge.user']
_columns = {
'employee_id': fields.many2one("hr.employee", string='Employee'),
}
def _check_employee_related_user(self, cr, uid, ids, context=None):
for badge_user in self.browse(cr, uid, ids, context=context):
if badge_user.user_id and badge_user.employee_id:
if badge_user.employee_id not in badge_user.user_id.employee_ids:
return False
return True
_constraints = [
(_check_employee_related_user, "The selected employee does not correspond to the selected user.", ['employee_id']),
]
class gamification_badge(osv.Model):
_name = 'gamification.badge'
_inherit = ['gamification.badge']
def get_granted_employees(self, cr, uid, badge_ids, context=None):
if context is None:
context = {}
employee_ids = []
badge_user_ids = self.pool.get('gamification.badge.user').search(cr, uid, [('badge_id', 'in', badge_ids), ('employee_id', '!=', False)], context=context)
for badge_user in self.pool.get('gamification.badge.user').browse(cr, uid, badge_user_ids, context):
employee_ids.append(badge_user.employee_id.id)
# remove duplicates
employee_ids = list(set(employee_ids))
return {
'type': 'ir.actions.act_window',
'name': 'Granted Employees',
'view_mode': 'kanban,tree,form',
'view_type': 'form',
'res_model': 'hr.employee',
'domain': [('id', 'in', employee_ids)]
}
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
def _get_employee_goals(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of goals assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
res[employee.id] = self.pool.get('gamification.goal').search(cr,uid,[('user_id', '=', employee.user_id.id), ('challenge_id.category', '=', 'hr')], context=context)
return res
def _get_employee_badges(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of badge_users assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
res[employee.id] = self.pool.get('gamification.badge.user').search(cr, uid, [
'|',
('employee_id', '=', employee.id),
'&',
('employee_id', '=', False),
('user_id', '=', employee.user_id.id)
], context=context)
return res
def _has_badges(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of badge_users assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
employee_badge_ids = self.pool.get('gamification.badge.user').search(cr, uid, [
'|',
('employee_id', '=', employee.id),
'&',
('employee_id', '=', False),
('user_id', '=', employee.user_id.id)
], context=context)
res[employee.id] = len(employee_badge_ids) > 0
return res
_columns = {
'goal_ids': fields.function(_get_employee_goals, type="one2many", obj='gamification.goal', string="Employee HR Goals"),
'badge_ids': fields.function(_get_employee_badges, type="one2many", obj='gamification.badge.user', string="Employee Badges"),
'has_badges': fields.function(_has_badges, type="boolean", string="Has Badges"),
}
| agpl-3.0 |
vmindru/ansible | lib/ansible/plugins/lookup/grafana_dashboard.py | 36 | 6300 | # (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
lookup: grafana_dashboard
author: Thierry Salle (@seuf)
version_added: "2.7"
short_description: list or search grafana dashboards
description:
- This lookup returns a list of grafana dashboards with possibility to filter them by query.
options:
grafana_url:
description: url of grafana.
env:
- name: GRAFANA_URL
default: http://127.0.0.1:3000
grafana_api_key:
description:
- api key of grafana.
- when C(grafana_api_key) is set, the options C(grafan_user), C(grafana_password) and C(grafana_org_id) are ignored.
- Attention, please remove the two == at the end of the grafana_api_key
- because ansible lookup plugins options are splited on = (see example).
env:
- name: GRAFANA_API_KEY
grafana_user:
description: grafana authentication user.
env:
- name: GRAFANA_USER
default: admin
grafana_password:
description: grafana authentication password.
env:
- name: GRAFANA_PASSWORD
default: admin
grafana_org_id:
description: grafana organisation id.
env:
- name: GRAFANA_ORG_ID
default: 1
search:
description: optional filter for dashboard search.
env:
- name: GRAFANA_DASHBOARD_SEARCH
"""
EXAMPLES = """
- name: get project foo grafana dashboards
set_fact:
grafana_dashboards: "{{ lookup('grafana_dashboard', 'grafana_url=http://grafana.company.com grafana_user=admin grafana_password=admin search=foo') }}"
- name: get all grafana dashboards
set_fact:
grafana_dashboards: "{{ lookup('grafana_dashboard', 'grafana_url=http://grafana.company.com grafana_api_key=' ~ grafana_api_key|replace('==', '')) }}"
"""
import base64
import json
import os
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils.urls import open_url
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible.utils.display import Display
display = Display()
ANSIBLE_GRAFANA_URL = 'http://127.0.0.1:3000'
ANSIBLE_GRAFANA_API_KEY = None
ANSIBLE_GRAFANA_USER = 'admin'
ANSIBLE_GRAFANA_PASSWORD = 'admin'
ANSIBLE_GRAFANA_ORG_ID = 1
ANSIBLE_GRAFANA_DASHBOARD_SEARCH = None
if os.getenv('GRAFANA_URL') is not None:
ANSIBLE_GRAFANA_URL = os.environ['GRAFANA_URL']
if os.getenv('GRAFANA_API_KEY') is not None:
ANSIBLE_GRAFANA_API_KEY = os.environ['GRAFANA_API_KEY']
if os.getenv('GRAFANA_USER') is not None:
ANSIBLE_GRAFANA_USER = os.environ['GRAFANA_USER']
if os.getenv('GRAFANA_PASSWORD') is not None:
ANSIBLE_GRAFANA_PASSWORD = os.environ['GRAFANA_PASSWORD']
if os.getenv('GRAFANA_ORG_ID') is not None:
ANSIBLE_GRAFANA_ORG_ID = os.environ['GRAFANA_ORG_ID']
if os.getenv('GRAFANA_DASHBOARD_SEARCH') is not None:
ANSIBLE_GRAFANA_DASHBOARD_SEARCH = os.environ['GRAFANA_DASHBOARD_SEARCH']
class GrafanaAPIException(Exception):
pass
class GrafanaAPI:
def __init__(self, **kwargs):
self.grafana_url = kwargs.get('grafana_url', ANSIBLE_GRAFANA_URL)
self.grafana_api_key = kwargs.get('grafana_api_key', ANSIBLE_GRAFANA_API_KEY)
self.grafana_user = kwargs.get('grafana_user', ANSIBLE_GRAFANA_USER)
self.grafana_password = kwargs.get('grafana_password', ANSIBLE_GRAFANA_PASSWORD)
self.grafana_org_id = kwargs.get('grafana_org_id', ANSIBLE_GRAFANA_ORG_ID)
self.search = kwargs.get('search', ANSIBLE_GRAFANA_DASHBOARD_SEARCH)
def grafana_switch_organisation(self, headers):
try:
r = open_url('%s/api/user/using/%s' % (self.grafana_url, self.grafana_org_id), headers=headers, method='POST')
except HTTPError as e:
raise GrafanaAPIException('Unable to switch to organization %s : %s' % (self.grafana_org_id, to_native(e)))
if r.getcode() != 200:
raise GrafanaAPIException('Unable to switch to organization %s : %s' % (self.grafana_org_id, str(r.getcode())))
def grafana_headers(self):
headers = {'content-type': 'application/json; charset=utf8'}
if self.grafana_api_key:
headers['Authorization'] = "Bearer %s==" % self.grafana_api_key
else:
auth = base64.b64encode(to_bytes('%s:%s' % (self.grafana_user, self.grafana_password)).replace('\n', ''))
headers['Authorization'] = 'Basic %s' % auth
self.grafana_switch_organisation(headers)
return headers
def grafana_list_dashboards(self):
# define http headers
headers = self.grafana_headers()
dashboard_list = []
try:
if self.search:
r = open_url('%s/api/search?query=%s' % (self.grafana_url, self.search), headers=headers, method='GET')
else:
r = open_url('%s/api/search/' % self.grafana_url, headers=headers, method='GET')
except HTTPError as e:
raise GrafanaAPIException('Unable to search dashboards : %s' % to_native(e))
if r.getcode() == 200:
try:
dashboard_list = json.loads(r.read())
except Exception as e:
raise GrafanaAPIException('Unable to parse json list %s' % to_native(e))
else:
raise GrafanaAPIException('Unable to list grafana dashboards : %s' % str(r.getcode()))
return dashboard_list
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
grafana_args = terms[0].split(' ')
grafana_dict = {}
ret = []
for param in grafana_args:
try:
key, value = param.split('=')
except ValueError:
raise AnsibleError("grafana_dashboard lookup plugin needs key=value pairs, but received %s" % terms)
grafana_dict[key] = value
grafana = GrafanaAPI(**grafana_dict)
ret = grafana.grafana_list_dashboards()
return ret
| gpl-3.0 |
popazerty/e2 | lib/python/Components/Converter/ClockToText.py | 4 | 3347 | from Converter import Converter
from time import localtime, strftime
from Components.Element import cached
class ClockToText(Converter, object):
DEFAULT = 0
WITH_SECONDS = 1
IN_MINUTES = 2
DATE = 3
FORMAT = 4
AS_LENGTH = 5
TIMESTAMP = 6
FULL = 7
SHORT_DATE = 8
LONG_DATE = 9
VFD = 10
AS_LENGTHHOURS = 11
AS_LENGTHSECONDS = 12
FULL_DATE = 13
# add: date, date as string, weekday, ...
# (whatever you need!)
def __init__(self, type):
Converter.__init__(self, type)
if type == "WithSeconds":
self.type = self.WITH_SECONDS
elif type == "InMinutes":
self.type = self.IN_MINUTES
elif type == "Date":
self.type = self.DATE
elif type == "AsLength":
self.type = self.AS_LENGTH
elif type == "AsLengthHours":
self.type = self.AS_LENGTHHOURS
elif type == "AsLengthSeconds":
self.type = self.AS_LENGTHSECONDS
elif type == "Timestamp":
self.type = self.TIMESTAMP
elif type == "Full":
self.type = self.FULL
elif type == "ShortDate":
self.type = self.SHORT_DATE
elif type == "LongDate":
self.type = self.LONG_DATE
elif type == "FullDate":
self.type = self.FULL_DATE
elif type == "VFD":
self.type = self.VFD
elif "Format" in type:
self.type = self.FORMAT
self.fmt_string = type[7:]
else:
self.type = self.DEFAULT
@cached
def getText(self):
time = self.source.time
if time is None:
return ""
# handle durations
if self.type == self.IN_MINUTES:
return ngettext("%d Min", "%d Mins", (time / 60)) % (time / 60)
elif self.type == self.AS_LENGTH:
if time < 0:
return ""
return "%d:%02d" % (time / 60, time % 60)
elif self.type == self.AS_LENGTHHOURS:
if time < 0:
return ""
return "%d:%02d" % (time / 3600, time / 60 % 60)
elif self.type == self.AS_LENGTHSECONDS:
if time < 0:
return ""
return "%d:%02d:%02d" % (time / 3600, time / 60 % 60, time % 60)
elif self.type == self.TIMESTAMP:
return str(time)
t = localtime(time)
if self.type == self.WITH_SECONDS:
# TRANSLATORS: full time representation hour:minute:seconds
d = _("%T")
elif self.type == self.DEFAULT:
# TRANSLATORS: short time representation hour:minute
d = _("%R")
elif self.type == self.DATE:
# TRANSLATORS: full date representation dayname daynum monthname year in strftime() format! See 'man strftime'
d = _("%A %e %B %Y")
elif self.type == self.FULL:
# TRANSLATORS: long date representation short dayname daynum short monthname hour:minute in strftime() format! See 'man strftime'
d = _("%a %e %b %R")
elif self.type == self.SHORT_DATE:
# TRANSLATORS: short date representation short dayname daynum short monthname in strftime() format! See 'man strftime'
d = _("%a %e/%m")
elif self.type == self.LONG_DATE:
# TRANSLATORS: long date representations dayname daynum monthname in strftime() format! See 'man strftime'
d = _("%A %e %B")
elif self.type == self.FULL_DATE:
# TRANSLATORS: full date representations sort dayname daynum monthname long year in strftime() format! See 'man strftime'
d = _("%a %e %B %Y")
elif self.type == self.VFD:
# TRANSLATORS: VFD hour:minute daynum short monthname in strftime() format! See 'man strftime'
d = _("%R %e/%m")
elif self.type == self.FORMAT:
d = self.fmt_string
else:
return "???"
return strftime(d, t)
text = property(getText)
| gpl-2.0 |
oscardagrach/linux | tools/perf/scripts/python/check-perf-trace.py | 1997 | 2539 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
tchellomello/home-assistant | homeassistant/components/geofency/device_tracker.py | 7 | 4237 | """Support for the Geofency device tracker platform."""
import logging
from homeassistant.components.device_tracker import SOURCE_TYPE_GPS
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import callback
from homeassistant.helpers import device_registry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
from . import DOMAIN as GF_DOMAIN, TRACKER_UPDATE
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Geofency config entry."""
@callback
def _receive_data(device, gps, location_name, attributes):
"""Fire HA event to set location."""
if device in hass.data[GF_DOMAIN]["devices"]:
return
hass.data[GF_DOMAIN]["devices"].add(device)
async_add_entities([GeofencyEntity(device, gps, location_name, attributes)])
hass.data[GF_DOMAIN]["unsub_device_tracker"][
config_entry.entry_id
] = async_dispatcher_connect(hass, TRACKER_UPDATE, _receive_data)
# Restore previously loaded devices
dev_reg = await device_registry.async_get_registry(hass)
dev_ids = {
identifier[1]
for device in dev_reg.devices.values()
for identifier in device.identifiers
if identifier[0] == GF_DOMAIN
}
if dev_ids:
hass.data[GF_DOMAIN]["devices"].update(dev_ids)
async_add_entities(GeofencyEntity(dev_id) for dev_id in dev_ids)
return True
class GeofencyEntity(TrackerEntity, RestoreEntity):
"""Represent a tracked device."""
def __init__(self, device, gps=None, location_name=None, attributes=None):
"""Set up Geofency entity."""
self._attributes = attributes or {}
self._name = device
self._location_name = location_name
self._gps = gps
self._unsub_dispatcher = None
self._unique_id = device
@property
def device_state_attributes(self):
"""Return device specific attributes."""
return self._attributes
@property
def latitude(self):
"""Return latitude value of the device."""
return self._gps[0]
@property
def longitude(self):
"""Return longitude value of the device."""
return self._gps[1]
@property
def location_name(self):
"""Return a location name for the current location of the device."""
return self._location_name
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return the unique ID."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
return {"name": self._name, "identifiers": {(GF_DOMAIN, self._unique_id)}}
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
async def async_added_to_hass(self):
"""Register state update callback."""
await super().async_added_to_hass()
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, TRACKER_UPDATE, self._async_receive_data
)
if self._attributes:
return
state = await self.async_get_last_state()
if state is None:
self._gps = (None, None)
return
attr = state.attributes
self._gps = (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE))
async def async_will_remove_from_hass(self):
"""Clean up after entity before removal."""
await super().async_will_remove_from_hass()
self._unsub_dispatcher()
self.hass.data[GF_DOMAIN]["devices"].remove(self._unique_id)
@callback
def _async_receive_data(self, device, gps, location_name, attributes):
"""Mark the device as seen."""
if device != self.name:
return
self._attributes.update(attributes)
self._location_name = location_name
self._gps = gps
self.async_write_ha_state()
| apache-2.0 |
drexly/tonginBlobStore | lib/django/contrib/auth/checks.py | 374 | 2098 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import apps
from django.conf import settings
from django.core import checks
def check_user_model(**kwargs):
errors = []
cls = apps.get_model(settings.AUTH_USER_MODEL)
# Check that REQUIRED_FIELDS is a list
if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)):
errors.append(
checks.Error(
"'REQUIRED_FIELDS' must be a list or tuple.",
hint=None,
obj=cls,
id='auth.E001',
)
)
# Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS.
if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS:
errors.append(
checks.Error(
("The field named as the 'USERNAME_FIELD' "
"for a custom user model must not be included in 'REQUIRED_FIELDS'."),
hint=None,
obj=cls,
id='auth.E002',
)
)
# Check that the username field is unique
if not cls._meta.get_field(cls.USERNAME_FIELD).unique:
if (settings.AUTHENTICATION_BACKENDS ==
['django.contrib.auth.backends.ModelBackend']):
errors.append(
checks.Error(
"'%s.%s' must be unique because it is named as the 'USERNAME_FIELD'." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
hint=None,
obj=cls,
id='auth.E003',
)
)
else:
errors.append(
checks.Warning(
"'%s.%s' is named as the 'USERNAME_FIELD', but it is not unique." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
hint=('Ensure that your authentication backend(s) can handle '
'non-unique usernames.'),
obj=cls,
id='auth.W004',
)
)
return errors
| bsd-3-clause |
UKTradeInvestment/export-wins-data | mi/tests/test_sector_team_months.py | 1 | 55916 | import datetime
import json
from django.urls import reverse
from freezegun import freeze_time
from mi.models import FinancialYear
from mi.tests.base_test_case import MiApiViewsBaseTestCase
from mi.tests.test_sector_views import SectorTeamBaseTestCase
from mi.utils import month_iterator
@freeze_time(datetime.datetime(2017, 1, 31))
class SectorTeamMonthlyViewsTestCase(SectorTeamBaseTestCase):
"""
Tests covering SectorTeam Campaigns API endpoint
"""
url = reverse('mi:sector_team_months', kwargs={'team_id': 1}) + "?year=2016"
expected_response = {}
def setUp(self):
super().setUp()
self.expected_response = {
"avg_time_to_confirm": 0.0,
"hvcs": {
"campaigns": [
'HVC: E006',
'HVC: E019',
'HVC: E031',
'HVC: E072',
'HVC: E095',
'HVC: E115',
'HVC: E128',
'HVC: E160',
'HVC: E167',
'HVC: E191'
],
"target": self.CAMPAIGN_TARGET * len(self.TEAM_1_HVCS)
},
"months": [
{
"date": "2016-04",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 2,
"unconfirmed": 2
},
"value": {
"confirmed": 0,
"total": 200000,
"unconfirmed": 200000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 2,
"unconfirmed": 2
},
"value": {
"confirmed": 0,
"grand_total": 200000,
"unconfirmed": 200000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 2,
"unconfirmed": 2
},
"value": {
"confirmed": 0,
"total": 4600,
"unconfirmed": 4600
}
}
}
},
{
"date": "2016-05",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 4,
"unconfirmed": 4
},
"value": {
"confirmed": 0,
"total": 400000,
"unconfirmed": 400000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 4,
"unconfirmed": 4
},
"value": {
"confirmed": 0,
"grand_total": 400000,
"unconfirmed": 400000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 4,
"unconfirmed": 4
},
"value": {
"confirmed": 0,
"total": 9200,
"unconfirmed": 9200
}
}
}
},
{
"date": "2016-06",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 5,
"unconfirmed": 5
},
"value": {
"confirmed": 0,
"total": 500000,
"unconfirmed": 500000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 5,
"unconfirmed": 5
},
"value": {
"confirmed": 0,
"grand_total": 500000,
"unconfirmed": 500000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 5,
"unconfirmed": 5
},
"value": {
"confirmed": 0,
"total": 11500,
"unconfirmed": 11500
}
}
}
},
{
"date": "2016-07",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 6,
"unconfirmed": 6
},
"value": {
"confirmed": 0,
"total": 600000,
"unconfirmed": 600000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 6,
"unconfirmed": 6
},
"value": {
"confirmed": 0,
"grand_total": 600000,
"unconfirmed": 600000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 6,
"unconfirmed": 6
},
"value": {
"confirmed": 0,
"total": 13800,
"unconfirmed": 13800
}
}
}
},
{
"date": "2016-08",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 7,
"unconfirmed": 7
},
"value": {
"confirmed": 0,
"total": 700000,
"unconfirmed": 700000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 7,
"unconfirmed": 7
},
"value": {
"confirmed": 0,
"grand_total": 700000,
"unconfirmed": 700000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 7,
"unconfirmed": 7
},
"value": {
"confirmed": 0,
"total": 16100,
"unconfirmed": 16100
}
}
}
},
{
"date": "2016-09",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 8,
"unconfirmed": 8
},
"value": {
"confirmed": 0,
"total": 800000,
"unconfirmed": 800000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 8,
"unconfirmed": 8
},
"value": {
"confirmed": 0,
"grand_total": 800000,
"unconfirmed": 800000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 8,
"unconfirmed": 8
},
"value": {
"confirmed": 0,
"total": 18400,
"unconfirmed": 18400
}
}
}
},
{
"date": "2016-10",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 9,
"unconfirmed": 9
},
"value": {
"confirmed": 0,
"total": 900000,
"unconfirmed": 900000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 9,
"unconfirmed": 9
},
"value": {
"confirmed": 0,
"grand_total": 900000,
"unconfirmed": 900000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 9,
"unconfirmed": 9
},
"value": {
"confirmed": 0,
"total": 20700,
"unconfirmed": 20700
}
}
}
},
{
"date": "2016-11",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 10,
"unconfirmed": 10
},
"value": {
"confirmed": 0,
"total": 1000000,
"unconfirmed": 1000000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 10,
"unconfirmed": 10
},
"value": {
"confirmed": 0,
"grand_total": 1000000,
"unconfirmed": 1000000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 10,
"unconfirmed": 10
},
"value": {
"confirmed": 0,
"total": 23000,
"unconfirmed": 23000
}
}
}
},
{
"date": "2016-12",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 11,
"unconfirmed": 11
},
"value": {
"confirmed": 0,
"total": 1100000,
"unconfirmed": 1100000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 11,
"unconfirmed": 11
},
"value": {
"confirmed": 0,
"grand_total": 1100000,
"unconfirmed": 1100000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 11,
"unconfirmed": 11
},
"value": {
"confirmed": 0,
"total": 25300,
"unconfirmed": 25300
}
}
}
},
{
"date": "2017-01",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 12,
"unconfirmed": 12
},
"value": {
"confirmed": 0,
"total": 1200000,
"unconfirmed": 1200000
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 12,
"unconfirmed": 12
},
"value": {
"confirmed": 0,
"grand_total": 1200000,
"unconfirmed": 1200000
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 12,
"unconfirmed": 12
},
"value": {
"confirmed": 0,
"total": 27600,
"unconfirmed": 27600
}
}
}
}
],
"name": "Financial & Professional Services"
}
def test_sector_team_month_1(self):
""" Tests covering SectorTeam Campaigns API endpoint """
for i in range(4, 13):
self._create_hvc_win(win_date=datetime.datetime(2016, i, 1))
# Add few random ones
self._create_hvc_win(win_date=datetime.datetime(2017, 1, 1))
self._create_hvc_win(win_date=datetime.datetime(2016, 4, 1))
self._create_hvc_win(win_date=datetime.datetime(2016, 5, 1))
self.assertResponse()
def test_sector_team_month_1_confirmed(self):
""" Tests covering SectorTeam Campaigns API endpoint """
for i in range(4, 13):
self._create_hvc_win(win_date=datetime.datetime(2016, i, 1), confirm=True,
notify_date=datetime.datetime(2016, i, 1),
response_date=datetime.datetime(2016, i, 2))
# Add few random ones
self._create_hvc_win(win_date=datetime.datetime(2017, 1, 1), confirm=True,
notify_date=datetime.datetime(2017, 1, 1),
response_date=datetime.datetime(2017, 1, 2))
self._create_hvc_win(win_date=datetime.datetime(2016, 4, 1), confirm=True,
notify_date=datetime.datetime(2016, 4, 1),
response_date=datetime.datetime(2016, 4, 2))
self._create_hvc_win(win_date=datetime.datetime(2016, 5, 1), confirm=True,
notify_date=datetime.datetime(2016, 5, 1),
response_date=datetime.datetime(2016, 5, 2))
self.expected_response["avg_time_to_confirm"] = 1.0
self.expected_response["months"] = [
{
"date": "2016-04",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 2,
"total": 2,
"unconfirmed": 0
},
"value": {
"confirmed": 200000,
"total": 200000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 2,
"grand_total": 2,
"unconfirmed": 0
},
"value": {
"confirmed": 200000,
"grand_total": 200000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 2,
"total": 2,
"unconfirmed": 0
},
"value": {
"confirmed": 4600,
"total": 4600,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-05",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 4,
"total": 4,
"unconfirmed": 0
},
"value": {
"confirmed": 400000,
"total": 400000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 4,
"grand_total": 4,
"unconfirmed": 0
},
"value": {
"confirmed": 400000,
"grand_total": 400000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 4,
"total": 4,
"unconfirmed": 0
},
"value": {
"confirmed": 9200,
"total": 9200,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-06",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 5,
"total": 5,
"unconfirmed": 0
},
"value": {
"confirmed": 500000,
"total": 500000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 5,
"grand_total": 5,
"unconfirmed": 0
},
"value": {
"confirmed": 500000,
"grand_total": 500000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 5,
"total": 5,
"unconfirmed": 0
},
"value": {
"confirmed": 11500,
"total": 11500,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-07",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 6,
"total": 6,
"unconfirmed": 0
},
"value": {
"confirmed": 600000,
"total": 600000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 6,
"grand_total": 6,
"unconfirmed": 0
},
"value": {
"confirmed": 600000,
"grand_total": 600000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 6,
"total": 6,
"unconfirmed": 0
},
"value": {
"confirmed": 13800,
"total": 13800,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-08",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 7,
"total": 7,
"unconfirmed": 0
},
"value": {
"confirmed": 700000,
"total": 700000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 7,
"grand_total": 7,
"unconfirmed": 0
},
"value": {
"confirmed": 700000,
"grand_total": 700000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 7,
"total": 7,
"unconfirmed": 0
},
"value": {
"confirmed": 16100,
"total": 16100,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-09",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 8,
"total": 8,
"unconfirmed": 0
},
"value": {
"confirmed": 800000,
"total": 800000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 8,
"grand_total": 8,
"unconfirmed": 0
},
"value": {
"confirmed": 800000,
"grand_total": 800000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 8,
"total": 8,
"unconfirmed": 0
},
"value": {
"confirmed": 18400,
"total": 18400,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-10",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 9,
"total": 9,
"unconfirmed": 0
},
"value": {
"confirmed": 900000,
"total": 900000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 9,
"grand_total": 9,
"unconfirmed": 0
},
"value": {
"confirmed": 900000,
"grand_total": 900000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 9,
"total": 9,
"unconfirmed": 0
},
"value": {
"confirmed": 20700,
"total": 20700,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-11",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 10,
"total": 10,
"unconfirmed": 0
},
"value": {
"confirmed": 1000000,
"total": 1000000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 10,
"grand_total": 10,
"unconfirmed": 0
},
"value": {
"confirmed": 1000000,
"grand_total": 1000000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 10,
"total": 10,
"unconfirmed": 0
},
"value": {
"confirmed": 23000,
"total": 23000,
"unconfirmed": 0
}
}
}
},
{
"date": "2016-12",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 11,
"total": 11,
"unconfirmed": 0
},
"value": {
"confirmed": 1100000,
"total": 1100000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 11,
"grand_total": 11,
"unconfirmed": 0
},
"value": {
"confirmed": 1100000,
"grand_total": 1100000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 11,
"total": 11,
"unconfirmed": 0
},
"value": {
"confirmed": 25300,
"total": 25300,
"unconfirmed": 0
}
}
}
},
{
"date": "2017-01",
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 12,
"total": 12,
"unconfirmed": 0
},
"value": {
"confirmed": 1200000,
"total": 1200000,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 12,
"grand_total": 12,
"unconfirmed": 0
},
"value": {
"confirmed": 1200000,
"grand_total": 1200000,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 12,
"total": 12,
"unconfirmed": 0
},
"value": {
"confirmed": 27600,
"total": 27600,
"unconfirmed": 0
}
}
}
}
]
self.assertResponse()
def _test_sector_team_month_1_some_wins_out_of_date(self):
""" Check that out of date, wins that were added with date that is not within current financial year
are not accounted for """
for i in list(range(3, 13)) + [4, 5]:
self._create_hvc_win(win_date=datetime.datetime(2016, i, 1))
# add few more random financial year wins, both in and out
for i in [6, 12]:
self._create_hvc_win(win_date=datetime.datetime(2015, i, 1))
self._create_hvc_win(win_date=datetime.datetime(2017, 1, 1))
for i in [4, 8]:
self._create_hvc_win(win_date=datetime.datetime(2017, i, 1))
self.assertResponse()
def test_months_no_wins(self):
"""
Test, when there are no wins, that the response still spread across all the months from starting from
financial start till today (frozen date), albeit all 0 numbers
"""
def _setup_empty_months_response():
""" Helper to build response """
self.expected_response["months"] = []
fin_year = FinancialYear.objects.get(id=2016)
for item in month_iterator(fin_year.start, datetime.datetime(2017, 1, 31)):
month_str = '{:d}-{:02d}'.format(*item)
month_dict = {
"date": month_str,
"totals": {
"export": {
"hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"non_hvc": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
},
"totals": {
"number": {
"confirmed": 0,
"grand_total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"grand_total": 0,
"unconfirmed": 0
}
}
},
"non_export": {
"number": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
},
"value": {
"confirmed": 0,
"total": 0,
"unconfirmed": 0
}
}
}
}
self.expected_response["months"].append(month_dict)
_setup_empty_months_response()
self.assertResponse()
def test_number_of_months_in_april(self):
"""
Check that there will only be one month aggregated data when we are in April, financial year start -
with one win
"""
with freeze_time(self.fin_start_date):
self._create_hvc_win(win_date=datetime.datetime(2016, 4, 1))
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 1)
def test_number_of_months_in_april_confirmed(self):
"""
Check that there will only be one month aggregated data when we are in April, financial year start -
with one confirmed win
"""
with freeze_time(self.fin_start_date):
self._create_hvc_win(win_date=datetime.datetime(2016, 4, 1), confirm=True)
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 1)
def test_number_of_months_in_april_no_wins(self):
"""
Check that there will only be one month aggregated data when we are in April, financial year start -
with no wins
"""
with freeze_time(self.fin_start_date):
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 1)
def test_number_of_months_in_march_with_wins(self):
"""
Check that there will be 12 months aggregated data when we are in March, financial year end -
with wins all the way
"""
with freeze_time(self.fin_end_date):
for i in range(4, 13):
self._create_hvc_win(win_date=datetime.datetime(2016, i, 1))
for i in range(1, 3):
self._create_hvc_win(win_date=datetime.datetime(2017, i, 1))
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 12)
def test_number_of_months_in_march_with_confirmed_wins(self):
"""
Check that there will be 12 months aggregated data when we are in March, financial year end -
with confirmed wins all the way
"""
with freeze_time(self.fin_end_date):
for i in range(4, 13):
self._create_hvc_win(win_date=datetime.datetime(2016, i, 1), confirm=True)
for i in range(1, 3):
self._create_hvc_win(win_date=datetime.datetime(2017, i, 1), confirm=True)
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 12)
def test_number_of_months_in_march_with_no_wins(self):
"""
Check that there will be 12 months aggregated data when we are in March, financial year end -
with no wins
"""
with freeze_time(self.fin_end_date):
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 12)
def test_number_of_months_in_april_non_hvc(self):
"""
Check that there will only be one month aggregated data when we are in April, financial year start -
with one non hvc win
"""
with freeze_time(self.fin_start_date):
self._create_non_hvc_win(win_date=datetime.datetime(2016, 4, 1))
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 1)
def test_number_of_months_in_march_with_wins_non_hvc(self):
"""
Check that there will be 12 months aggregated data when we are in March, financial year end -
with non hvc wins all the way
"""
with freeze_time(self.fin_end_date):
for i in range(4, 13):
self._create_non_hvc_win(win_date=datetime.datetime(2016, i, 1))
for i in range(1, 3):
self._create_non_hvc_win(win_date=datetime.datetime(2017, i, 1))
api_response = self._get_api_response(self.url)
response_decoded = json.loads(api_response.content.decode("utf-8"))["results"]
self.assertEqual(len(response_decoded["months"]), 12)
| gpl-3.0 |
hfvaldesg/neltume | audio.py | 1 | 5384 | import subprocess
import time
# sound of start up
def startup():
opt = 'audio/startup.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', opt], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()
# time.sleep(0.5)
# sound of the intro
def intro_audio(x):
if x == 0:
for i in range(3):
intro_item = 'audio/intro/' + str(i) + '.mp3'
audio_intro = subprocess.Popen(['mpg123', intro_item, "-ss", "10"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
time.sleep(1)
elif x == 1:
intro_item = 'audio/intro/' + str(4) + '.mp3'
audio_intro = subprocess.Popen(['mpg123', intro_item, "-ss", "10"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
def menu_audio():
x = [1, 2, 3, 6]
menu = 'audio/menu/menu.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
time.sleep(1)
select_button = 'audio/menu/instruction.mp3'
audio_instruction = subprocess.Popen(['mpg123', '-q', select_button], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).wait()
# Effects
def effect(x):
effect = 'audio/effects/' + str(x) + '.mp3'
audio_effect = subprocess.Popen(['mpg123', '-q', effect], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
# Audio description for every button on Neltume
def button(x):
button = 'audio/buttons/b' + str(x) + '.mp3'
audio_button = subprocess.Popen(['mpg123', '-q', button]).wait()
# time.sleep(0.5)
def exercise_button(x):
e_button = 'audio/buttons/exercise/e' + str(x) + '.mp3'
audio_e_button = subprocess.Popen(['mpg123', '-q', e_button]).wait()
# time.sleep(0.5)
def good_answer():
effect('correct')
time.sleep(0.5)
right_ans = 'audio/effects/answers/right.mp3'
audio_right_ans = subprocess.Popen(['mpg123', '-q', right_ans], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
def wrong_answer():
effect('wrong')
time.sleep(0.5)
wrong_ans = 'audio/effects/answers/wrong.mp3'
audio_wrong_ans = subprocess.Popen(['mpg123', '-q', wrong_ans]).wait()
# time.sleep(0.5)
def menu_opt(x):
menu = 'audio/menu/opt' + x + '.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
def menu_alphabet(x):
menu = 'audio/menu/alphabet/opt' + x + '.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu]).wait()
# time.sleep(0.5)
def menu_vocabulary(x):
menu = 'audio/menu/vocabulary/opt' + x + '.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
def back():
menu = 'audio/menu/back.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
# Spelling words
def spell(x):
len_x = len(x)
for i in range(len_x):
y = 'audio/alphabet/eng/' + x[i] + '.mp3'
audio_word = subprocess.Popen(['mpg123', '-q', y]).wait()
def play_letter(x, y):
if y == "eng":
letter_eng = 'audio/alphabet/eng/' + x + '.mp3'
audio_letter_eng = subprocess.Popen(['mpg123', '-q', letter_eng]).wait()
# time.sleep(0.5)
elif y == "spa":
letter_spa = 'audio/alphabet/spa/' + x + '.mp3'
audio_letter_spa = subprocess.Popen(['mpg123', '-q', letter_spa]).wait()
# time.sleep(0.5)
# ej: play_word(#index,"eng")
def play_word(x, y, z):
if y == "eng":
word_eng = 'audio/vocabulary/categories/' + z + '/' + x + 'b.mp3'
audio_word_eng = subprocess.Popen(['mpg123', '-q', word_eng]).wait()
# time.sleep(0.5)
elif y == "spa":
word_spa = 'audio/vocabulary/categories/' + z + '/' + x + 'a.mp3'
audio_word_spa = subprocess.Popen(['mpg123', '-q', word_spa]).wait()
# time.sleep(0.5)
# Word presentation (twice)
def word_presentation(x, y):
for i in range(2):
play_word(str(x), "spa", y)
time.sleep(1)
play_word(str(x), "eng", y)
# time.sleep(1)
# Name of the categories of the vocabulary
def play_category_name(x):
categorie_name = 'audio/vocabulary/categories/names/' + x + 'a.mp3'
audio_inst3 = subprocess.Popen(['mpg123', '-q', categorie_name], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
# Instruction to learn a new word
def play_word_instruction():
inst = 'audio/vocabulary/instructions/learn/instruction1.mp3'
audio_inst = subprocess.Popen(['mpg123', '-q', inst]).wait()
# time.sleep(0.5)
# Review levels
def play_review_level(x):
review_level = 'audio/vocabulary/instructions/review/level/' + x + '.mp3'
level_instruction = subprocess.Popen(['mpg123', '-q', review_level]).wait()
# time.sleep(0.5)
def play_menu_name(x):
menu = 'audio/menu/menu_name_' + x + '.mp3'
audio_menu = subprocess.Popen(['mpg123', '-q', menu], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).wait()
# time.sleep(0.5)
| mit |
adykstra/mne-python | examples/preprocessing/plot_shift_evoked.py | 29 | 1245 | """
==================================
Shifting time-scale in evoked data
==================================
"""
# Author: Mainak Jas <[email protected]>
#
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.viz import tight_layout
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
fname = data_path + '/MEG/sample/sample_audvis-ave.fif'
# Reading evoked data
condition = 'Left Auditory'
evoked = mne.read_evokeds(fname, condition=condition, baseline=(None, 0),
proj=True)
ch_names = evoked.info['ch_names']
picks = mne.pick_channels(ch_names=ch_names, include=["MEG 2332"])
# Create subplots
f, (ax1, ax2, ax3) = plt.subplots(3)
evoked.plot(exclude=[], picks=picks, axes=ax1,
titles=dict(grad='Before time shifting'), time_unit='s')
# Apply relative time-shift of 500 ms
evoked.shift_time(0.5, relative=True)
evoked.plot(exclude=[], picks=picks, axes=ax2,
titles=dict(grad='Relative shift: 500 ms'), time_unit='s')
# Apply absolute time-shift of 500 ms
evoked.shift_time(0.5, relative=False)
evoked.plot(exclude=[], picks=picks, axes=ax3,
titles=dict(grad='Absolute shift: 500 ms'), time_unit='s')
tight_layout()
| bsd-3-clause |
shanot/imp | modules/cnmultifit/test/test_param.py | 1 | 1887 | import IMP
import IMP.test
import IMP.cnmultifit
import os
from IMP.cnmultifit import param
class Tests(IMP.test.TestCase):
def test_param_help(self):
"""Test param module help"""
self.check_runnable_python_module("IMP.cnmultifit.param")
def test_param_usage(self):
"""Test param module incorrect usage"""
r = self.run_python_module("IMP.cnmultifit.param", [])
out, err = r.communicate()
self.assertEqual(out, "")
self.assertIn("incorrect number of arguments", err)
self.assertNotEqual(r.returncode, 0)
def test_param_run(self):
"""Test run of param module"""
self.run_python_module(param,
['-o', 'test.output', '-i', 'test.int',
'--params', 'test.params',
'--model', 'test.model',
'--numsols', '42', '--',
'7', 'testmonomer.pdb', 'test.mrc', '8.0',
'4.0', '5.0', '-10.0', '-20.0', '-30.0'])
contents = open('test.params').read()
self.assertIn('output = test.output', contents)
self.assertIn('intermediate = test.int', contents)
self.assertIn('model = test.model', contents)
self.assertIn('solutions = 42', contents)
self.assertIn('cn = 7', contents)
self.assertIn('monomer = testmonomer.pdb', contents)
self.assertIn('map = test.mrc', contents)
self.assertIn('resolution = 8.0', contents)
self.assertIn('spacing = 4.0', contents)
self.assertIn('threshold = 5.0', contents)
self.assertIn('origin_x = -10.0', contents)
self.assertIn('origin_y = -20.0', contents)
self.assertIn('origin_z = -30.0', contents)
os.unlink('test.params')
if __name__ == '__main__':
IMP.test.main()
| gpl-3.0 |
vibhorag/scikit-learn | sklearn/cluster/bicluster.py | 211 | 19443 | """Spectral biclustering algorithms.
Authors : Kemal Eren
License: BSD 3 clause
"""
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import dia_matrix
from scipy.sparse import issparse
from . import KMeans, MiniBatchKMeans
from ..base import BaseEstimator, BiclusterMixin
from ..externals import six
from ..utils.arpack import eigsh, svds
from ..utils.extmath import (make_nonnegative, norm, randomized_svd,
safe_sparse_dot)
from ..utils.validation import assert_all_finite, check_array
__all__ = ['SpectralCoclustering',
'SpectralBiclustering']
def _scale_normalize(X):
"""Normalize ``X`` by scaling rows and columns independently.
Returns the normalized matrix and the row and column scaling
factors.
"""
X = make_nonnegative(X)
row_diag = np.asarray(1.0 / np.sqrt(X.sum(axis=1))).squeeze()
col_diag = np.asarray(1.0 / np.sqrt(X.sum(axis=0))).squeeze()
row_diag = np.where(np.isnan(row_diag), 0, row_diag)
col_diag = np.where(np.isnan(col_diag), 0, col_diag)
if issparse(X):
n_rows, n_cols = X.shape
r = dia_matrix((row_diag, [0]), shape=(n_rows, n_rows))
c = dia_matrix((col_diag, [0]), shape=(n_cols, n_cols))
an = r * X * c
else:
an = row_diag[:, np.newaxis] * X * col_diag
return an, row_diag, col_diag
def _bistochastic_normalize(X, max_iter=1000, tol=1e-5):
"""Normalize rows and columns of ``X`` simultaneously so that all
rows sum to one constant and all columns sum to a different
constant.
"""
# According to paper, this can also be done more efficiently with
# deviation reduction and balancing algorithms.
X = make_nonnegative(X)
X_scaled = X
dist = None
for _ in range(max_iter):
X_new, _, _ = _scale_normalize(X_scaled)
if issparse(X):
dist = norm(X_scaled.data - X.data)
else:
dist = norm(X_scaled - X_new)
X_scaled = X_new
if dist is not None and dist < tol:
break
return X_scaled
def _log_normalize(X):
"""Normalize ``X`` according to Kluger's log-interactions scheme."""
X = make_nonnegative(X, min_value=1)
if issparse(X):
raise ValueError("Cannot compute log of a sparse matrix,"
" because log(x) diverges to -infinity as x"
" goes to 0.")
L = np.log(X)
row_avg = L.mean(axis=1)[:, np.newaxis]
col_avg = L.mean(axis=0)
avg = L.mean()
return L - row_avg - col_avg + avg
class BaseSpectral(six.with_metaclass(ABCMeta, BaseEstimator,
BiclusterMixin)):
"""Base class for spectral biclustering."""
@abstractmethod
def __init__(self, n_clusters=3, svd_method="randomized",
n_svd_vecs=None, mini_batch=False, init="k-means++",
n_init=10, n_jobs=1, random_state=None):
self.n_clusters = n_clusters
self.svd_method = svd_method
self.n_svd_vecs = n_svd_vecs
self.mini_batch = mini_batch
self.init = init
self.n_init = n_init
self.n_jobs = n_jobs
self.random_state = random_state
def _check_parameters(self):
legal_svd_methods = ('randomized', 'arpack')
if self.svd_method not in legal_svd_methods:
raise ValueError("Unknown SVD method: '{0}'. svd_method must be"
" one of {1}.".format(self.svd_method,
legal_svd_methods))
def fit(self, X):
"""Creates a biclustering for X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
X = check_array(X, accept_sparse='csr', dtype=np.float64)
self._check_parameters()
self._fit(X)
def _svd(self, array, n_components, n_discard):
"""Returns first `n_components` left and right singular
vectors u and v, discarding the first `n_discard`.
"""
if self.svd_method == 'randomized':
kwargs = {}
if self.n_svd_vecs is not None:
kwargs['n_oversamples'] = self.n_svd_vecs
u, _, vt = randomized_svd(array, n_components,
random_state=self.random_state,
**kwargs)
elif self.svd_method == 'arpack':
u, _, vt = svds(array, k=n_components, ncv=self.n_svd_vecs)
if np.any(np.isnan(vt)):
# some eigenvalues of A * A.T are negative, causing
# sqrt() to be np.nan. This causes some vectors in vt
# to be np.nan.
_, v = eigsh(safe_sparse_dot(array.T, array),
ncv=self.n_svd_vecs)
vt = v.T
if np.any(np.isnan(u)):
_, u = eigsh(safe_sparse_dot(array, array.T),
ncv=self.n_svd_vecs)
assert_all_finite(u)
assert_all_finite(vt)
u = u[:, n_discard:]
vt = vt[n_discard:]
return u, vt.T
def _k_means(self, data, n_clusters):
if self.mini_batch:
model = MiniBatchKMeans(n_clusters,
init=self.init,
n_init=self.n_init,
random_state=self.random_state)
else:
model = KMeans(n_clusters, init=self.init,
n_init=self.n_init, n_jobs=self.n_jobs,
random_state=self.random_state)
model.fit(data)
centroid = model.cluster_centers_
labels = model.labels_
return centroid, labels
class SpectralCoclustering(BaseSpectral):
"""Spectral Co-Clustering algorithm (Dhillon, 2001).
Clusters rows and columns of an array `X` to solve the relaxed
normalized cut of the bipartite graph created from `X` as follows:
the edge between row vertex `i` and column vertex `j` has weight
`X[i, j]`.
The resulting bicluster structure is block-diagonal, since each
row and each column belongs to exactly one bicluster.
Supports sparse matrices, as long as they are nonnegative.
Read more in the :ref:`User Guide <spectral_coclustering>`.
Parameters
----------
n_clusters : integer, optional, default: 3
The number of biclusters to find.
svd_method : string, optional, default: 'randomized'
Selects the algorithm for finding singular vectors. May be
'randomized' or 'arpack'. If 'randomized', use
:func:`sklearn.utils.extmath.randomized_svd`, which may be faster
for large matrices. If 'arpack', use
:func:`sklearn.utils.arpack.svds`, which is more accurate, but
possibly slower in some cases.
n_svd_vecs : int, optional, default: None
Number of vectors to use in calculating the SVD. Corresponds
to `ncv` when `svd_method=arpack` and `n_oversamples` when
`svd_method` is 'randomized`.
mini_batch : bool, optional, default: False
Whether to use mini-batch k-means, which is faster but may get
different results.
init : {'k-means++', 'random' or an ndarray}
Method for initialization of k-means algorithm; defaults to
'k-means++'.
n_init : int, optional, default: 10
Number of random initializations that are tried with the
k-means algorithm.
If mini-batch k-means is used, the best initialization is
chosen and the algorithm runs once. Otherwise, the algorithm
is run for each initialization and the best solution chosen.
n_jobs : int, optional, default: 1
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
random_state : int seed, RandomState instance, or None (default)
A pseudo random number generator used by the K-Means
initialization.
Attributes
----------
rows_ : array-like, shape (n_row_clusters, n_rows)
Results of the clustering. `rows[i, r]` is True if
cluster `i` contains row `r`. Available only after calling ``fit``.
columns_ : array-like, shape (n_column_clusters, n_columns)
Results of the clustering, like `rows`.
row_labels_ : array-like, shape (n_rows,)
The bicluster label of each row.
column_labels_ : array-like, shape (n_cols,)
The bicluster label of each column.
References
----------
* Dhillon, Inderjit S, 2001. `Co-clustering documents and words using
bipartite spectral graph partitioning
<http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.140.3011>`__.
"""
def __init__(self, n_clusters=3, svd_method='randomized',
n_svd_vecs=None, mini_batch=False, init='k-means++',
n_init=10, n_jobs=1, random_state=None):
super(SpectralCoclustering, self).__init__(n_clusters,
svd_method,
n_svd_vecs,
mini_batch,
init,
n_init,
n_jobs,
random_state)
def _fit(self, X):
normalized_data, row_diag, col_diag = _scale_normalize(X)
n_sv = 1 + int(np.ceil(np.log2(self.n_clusters)))
u, v = self._svd(normalized_data, n_sv, n_discard=1)
z = np.vstack((row_diag[:, np.newaxis] * u,
col_diag[:, np.newaxis] * v))
_, labels = self._k_means(z, self.n_clusters)
n_rows = X.shape[0]
self.row_labels_ = labels[:n_rows]
self.column_labels_ = labels[n_rows:]
self.rows_ = np.vstack(self.row_labels_ == c
for c in range(self.n_clusters))
self.columns_ = np.vstack(self.column_labels_ == c
for c in range(self.n_clusters))
class SpectralBiclustering(BaseSpectral):
"""Spectral biclustering (Kluger, 2003).
Partitions rows and columns under the assumption that the data has
an underlying checkerboard structure. For instance, if there are
two row partitions and three column partitions, each row will
belong to three biclusters, and each column will belong to two
biclusters. The outer product of the corresponding row and column
label vectors gives this checkerboard structure.
Read more in the :ref:`User Guide <spectral_biclustering>`.
Parameters
----------
n_clusters : integer or tuple (n_row_clusters, n_column_clusters)
The number of row and column clusters in the checkerboard
structure.
method : string, optional, default: 'bistochastic'
Method of normalizing and converting singular vectors into
biclusters. May be one of 'scale', 'bistochastic', or 'log'.
The authors recommend using 'log'. If the data is sparse,
however, log normalization will not work, which is why the
default is 'bistochastic'. CAUTION: if `method='log'`, the
data must not be sparse.
n_components : integer, optional, default: 6
Number of singular vectors to check.
n_best : integer, optional, default: 3
Number of best singular vectors to which to project the data
for clustering.
svd_method : string, optional, default: 'randomized'
Selects the algorithm for finding singular vectors. May be
'randomized' or 'arpack'. If 'randomized', uses
`sklearn.utils.extmath.randomized_svd`, which may be faster
for large matrices. If 'arpack', uses
`sklearn.utils.arpack.svds`, which is more accurate, but
possibly slower in some cases.
n_svd_vecs : int, optional, default: None
Number of vectors to use in calculating the SVD. Corresponds
to `ncv` when `svd_method=arpack` and `n_oversamples` when
`svd_method` is 'randomized`.
mini_batch : bool, optional, default: False
Whether to use mini-batch k-means, which is faster but may get
different results.
init : {'k-means++', 'random' or an ndarray}
Method for initialization of k-means algorithm; defaults to
'k-means++'.
n_init : int, optional, default: 10
Number of random initializations that are tried with the
k-means algorithm.
If mini-batch k-means is used, the best initialization is
chosen and the algorithm runs once. Otherwise, the algorithm
is run for each initialization and the best solution chosen.
n_jobs : int, optional, default: 1
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
random_state : int seed, RandomState instance, or None (default)
A pseudo random number generator used by the K-Means
initialization.
Attributes
----------
rows_ : array-like, shape (n_row_clusters, n_rows)
Results of the clustering. `rows[i, r]` is True if
cluster `i` contains row `r`. Available only after calling ``fit``.
columns_ : array-like, shape (n_column_clusters, n_columns)
Results of the clustering, like `rows`.
row_labels_ : array-like, shape (n_rows,)
Row partition labels.
column_labels_ : array-like, shape (n_cols,)
Column partition labels.
References
----------
* Kluger, Yuval, et. al., 2003. `Spectral biclustering of microarray
data: coclustering genes and conditions
<http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.135.1608>`__.
"""
def __init__(self, n_clusters=3, method='bistochastic',
n_components=6, n_best=3, svd_method='randomized',
n_svd_vecs=None, mini_batch=False, init='k-means++',
n_init=10, n_jobs=1, random_state=None):
super(SpectralBiclustering, self).__init__(n_clusters,
svd_method,
n_svd_vecs,
mini_batch,
init,
n_init,
n_jobs,
random_state)
self.method = method
self.n_components = n_components
self.n_best = n_best
def _check_parameters(self):
super(SpectralBiclustering, self)._check_parameters()
legal_methods = ('bistochastic', 'scale', 'log')
if self.method not in legal_methods:
raise ValueError("Unknown method: '{0}'. method must be"
" one of {1}.".format(self.method, legal_methods))
try:
int(self.n_clusters)
except TypeError:
try:
r, c = self.n_clusters
int(r)
int(c)
except (ValueError, TypeError):
raise ValueError("Incorrect parameter n_clusters has value:"
" {}. It should either be a single integer"
" or an iterable with two integers:"
" (n_row_clusters, n_column_clusters)")
if self.n_components < 1:
raise ValueError("Parameter n_components must be greater than 0,"
" but its value is {}".format(self.n_components))
if self.n_best < 1:
raise ValueError("Parameter n_best must be greater than 0,"
" but its value is {}".format(self.n_best))
if self.n_best > self.n_components:
raise ValueError("n_best cannot be larger than"
" n_components, but {} > {}"
"".format(self.n_best, self.n_components))
def _fit(self, X):
n_sv = self.n_components
if self.method == 'bistochastic':
normalized_data = _bistochastic_normalize(X)
n_sv += 1
elif self.method == 'scale':
normalized_data, _, _ = _scale_normalize(X)
n_sv += 1
elif self.method == 'log':
normalized_data = _log_normalize(X)
n_discard = 0 if self.method == 'log' else 1
u, v = self._svd(normalized_data, n_sv, n_discard)
ut = u.T
vt = v.T
try:
n_row_clusters, n_col_clusters = self.n_clusters
except TypeError:
n_row_clusters = n_col_clusters = self.n_clusters
best_ut = self._fit_best_piecewise(ut, self.n_best,
n_row_clusters)
best_vt = self._fit_best_piecewise(vt, self.n_best,
n_col_clusters)
self.row_labels_ = self._project_and_cluster(X, best_vt.T,
n_row_clusters)
self.column_labels_ = self._project_and_cluster(X.T, best_ut.T,
n_col_clusters)
self.rows_ = np.vstack(self.row_labels_ == label
for label in range(n_row_clusters)
for _ in range(n_col_clusters))
self.columns_ = np.vstack(self.column_labels_ == label
for _ in range(n_row_clusters)
for label in range(n_col_clusters))
def _fit_best_piecewise(self, vectors, n_best, n_clusters):
"""Find the ``n_best`` vectors that are best approximated by piecewise
constant vectors.
The piecewise vectors are found by k-means; the best is chosen
according to Euclidean distance.
"""
def make_piecewise(v):
centroid, labels = self._k_means(v.reshape(-1, 1), n_clusters)
return centroid[labels].ravel()
piecewise_vectors = np.apply_along_axis(make_piecewise,
axis=1, arr=vectors)
dists = np.apply_along_axis(norm, axis=1,
arr=(vectors - piecewise_vectors))
result = vectors[np.argsort(dists)[:n_best]]
return result
def _project_and_cluster(self, data, vectors, n_clusters):
"""Project ``data`` to ``vectors`` and cluster the result."""
projected = safe_sparse_dot(data, vectors)
_, labels = self._k_means(projected, n_clusters)
return labels
| bsd-3-clause |
garwynn/L900_3.9_Experiment | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | 2058 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
ahmadio/edx-platform | common/test/acceptance/tests/lms/test_learner_profile.py | 17 | 31839 | # -*- coding: utf-8 -*-
"""
End-to-end tests for Student's Profile Page.
"""
from flaky import flaky
from contextlib import contextmanager
from datetime import datetime
from bok_choy.web_app_test import WebAppTest
from nose.plugins.attrib import attr
from ...pages.common.logout import LogoutPage
from ...pages.lms.account_settings import AccountSettingsPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.learner_profile import LearnerProfilePage
from ...pages.lms.dashboard import DashboardPage
from ..helpers import EventsTestMixin
class LearnerProfileTestMixin(EventsTestMixin):
"""
Mixin with helper methods for testing learner profile pages.
"""
PRIVACY_PUBLIC = u'all_users'
PRIVACY_PRIVATE = u'private'
PUBLIC_PROFILE_FIELDS = ['username', 'country', 'language_proficiencies', 'bio']
PRIVATE_PROFILE_FIELDS = ['username']
PUBLIC_PROFILE_EDITABLE_FIELDS = ['country', 'language_proficiencies', 'bio']
USER_SETTINGS_CHANGED_EVENT_NAME = u"edx.user.settings.changed"
def log_in_as_unique_user(self):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def set_public_profile_fields_data(self, profile_page):
"""
Fill in the public profile fields of a user.
"""
profile_page.value_for_dropdown_field('language_proficiencies', 'English')
profile_page.value_for_dropdown_field('country', 'United Arab Emirates')
profile_page.value_for_textarea_field('bio', 'Nothing Special')
def visit_profile_page(self, username, privacy=None):
"""
Visits a user's profile page.
"""
profile_page = LearnerProfilePage(self.browser, username)
# Change the privacy if requested by loading the page and
# changing the drop down
if privacy is not None:
profile_page.visit()
profile_page.wait_for_page()
profile_page.privacy = privacy
if privacy == self.PRIVACY_PUBLIC:
self.set_public_profile_fields_data(profile_page)
# Reset event tracking so that the tests only see events from
# loading the profile page.
self.reset_event_tracking()
# Load the page
profile_page.visit()
profile_page.wait_for_page()
return profile_page
def set_birth_year(self, birth_year):
"""
Set birth year for the current user to the specified value.
"""
account_settings_page = AccountSettingsPage(self.browser)
account_settings_page.visit()
account_settings_page.wait_for_page()
self.assertEqual(
account_settings_page.value_for_dropdown_field('year_of_birth', str(birth_year)),
str(birth_year)
)
def verify_profile_page_is_public(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently public.
"""
self.assertEqual(profile_page.visible_fields, self.PUBLIC_PROFILE_FIELDS)
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.editable_fields, self.PUBLIC_PROFILE_EDITABLE_FIELDS)
else:
self.assertEqual(profile_page.editable_fields, [])
def verify_profile_page_is_private(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently private.
"""
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.visible_fields, self.PRIVATE_PROFILE_FIELDS)
def verify_profile_page_view_event(self, requesting_username, profile_user_id, visibility=None):
"""
Verifies that the correct view event was captured for the profile page.
"""
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.user.settings.viewed'}, number_of_matches=1)
self.assert_events_match(
[
{
'username': requesting_username,
'event': {
'user_id': int(profile_user_id),
'page': 'profile',
'visibility': unicode(visibility)
}
}
],
actual_events
)
@contextmanager
def verify_pref_change_event_during(self, username, user_id, setting, **kwargs):
"""Assert that a single setting changed event is emitted for the user_api_userpreference table."""
expected_event = {
'username': username,
'event': {
'setting': setting,
'user_id': int(user_id),
'table': 'user_api_userpreference',
'truncated': []
}
}
expected_event['event'].update(kwargs)
event_filter = {
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
}
with self.assert_events_match_during(event_filter=event_filter, expected_events=[expected_event]):
yield
@attr('shard_4')
class OwnLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify a student's own profile page.
"""
def verify_profile_forced_private_message(self, username, birth_year, message=None):
"""
Verify age limit messages for a user.
"""
if birth_year is None:
birth_year = ""
self.set_birth_year(birth_year=birth_year)
profile_page = self.visit_profile_page(username)
self.assertTrue(profile_page.privacy_field_visible)
if message:
self.assertTrue(profile_page.age_limit_message_present)
else:
self.assertFalse(profile_page.age_limit_message_present)
self.assertIn(message, profile_page.profile_forced_private_message)
def test_profile_defaults_to_public(self):
"""
Scenario: Verify that a new user's profile defaults to public.
Given that I am a new user.
When I go to my profile page.
Then I see that the profile visibility is set to public.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
self.verify_profile_page_is_public(profile_page)
def assert_default_image_has_public_access(self, profile_page):
"""
Assert that profile image has public access.
"""
self.assertTrue(profile_page.profile_has_default_image)
self.assertTrue(profile_page.profile_has_image_with_public_access())
def test_make_profile_public(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my private profile page
And I set the profile visibility to public
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as public
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=self.PRIVACY_PRIVATE, new=self.PRIVACY_PUBLIC
):
profile_page.privacy = self.PRIVACY_PUBLIC
# Reload the page and verify that the profile is now public
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_public(profile_page)
def test_make_profile_private(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my public profile page
And I set the profile visibility to private
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as private
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=None, new=self.PRIVACY_PRIVATE
):
profile_page.privacy = self.PRIVACY_PRIVATE
# Reload the page and verify that the profile is now private
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_private(profile_page)
def test_dashboard_learner_profile_link(self):
"""
Scenario: Verify that my profile link is present on dashboard page and we can navigate to correct page.
Given that I am a registered user.
When I go to Dashboard page.
And I click on username dropdown.
Then I see Profile link in the dropdown menu.
When I click on Profile link.
Then I will be navigated to Profile page.
"""
username, user_id = self.log_in_as_unique_user()
dashboard_page = DashboardPage(self.browser)
dashboard_page.visit()
dashboard_page.click_username_dropdown()
self.assertIn('Profile', dashboard_page.username_dropdown_link_text)
dashboard_page.click_my_profile_link()
my_profile_page = LearnerProfilePage(self.browser, username)
my_profile_page.wait_for_page()
def test_fields_on_my_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own private profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to private.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_page_is_private(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_fields_on_my_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own public profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see all the profile fields are shown.
And `location`, `language` and `about me` fields are editable.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.verify_profile_page_is_public(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PUBLIC)
def _test_dropdown_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a dropdown field.
"""
profile_page.value_for_dropdown_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def _test_textarea_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a textarea field.
"""
profile_page.value_for_textarea_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def test_country_field(self):
"""
Test behaviour of `Country` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set country value to `Pakistan`.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I reload the page.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I make `country` field editable
Then `country` field mode should be `edit`
And `country` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'country', 'Pakistan', 'Pakistan', 'display')
profile_page.make_field_editable('country')
self.assertEqual(profile_page.mode_for_field('country'), 'edit')
self.assertTrue(profile_page.field_icon_present('country'))
def test_language_field(self):
"""
Test behaviour of `Language` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set language value to `Urdu`.
Then displayed language should be `Urdu` and language field mode should be `display`
And I reload the page.
Then displayed language should be `Urdu` and language field mode should be `display`
Then I set empty value for language.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I reload the page.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I make `language` field editable
Then `language` field mode should be `edit`
And `language` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'language_proficiencies', 'Urdu', 'Urdu', 'display')
self._test_dropdown_field(profile_page, 'language_proficiencies', '', 'Add language', 'placeholder')
profile_page.make_field_editable('language_proficiencies')
self.assertTrue(profile_page.mode_for_field('language_proficiencies'), 'edit')
self.assertTrue(profile_page.field_icon_present('language_proficiencies'))
def test_about_me_field(self):
"""
Test behaviour of `About Me` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set about me value to `Eat Sleep Code`.
Then displayed about me should be `Eat Sleep Code` and about me field mode should be `display`
And I reload the page.
Then displayed about me should be `Eat Sleep Code` and about me field mode should be `display`
Then I set empty value for about me.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I reload the page.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I make `about me` field editable
Then `about me` field mode should be `edit`
"""
placeholder_value = (
"Tell other learners a little about yourself: where you live, what your interests are, "
"why you're taking courses, or what you hope to learn."
)
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_textarea_field(profile_page, 'bio', 'Eat Sleep Code', 'Eat Sleep Code', 'display')
self._test_textarea_field(profile_page, 'bio', '', placeholder_value, 'placeholder')
profile_page.make_field_editable('bio')
self.assertTrue(profile_page.mode_for_field('bio'), 'edit')
def test_birth_year_not_set(self):
"""
Verify message if birth year is not set.
Given that I am a registered user.
And birth year is not set for the user.
And I visit my profile page.
Then I should see a message that the profile is private until the year of birth is set.
"""
username, user_id = self.log_in_as_unique_user()
message = "You must specify your birth year before you can share your full profile."
self.verify_profile_forced_private_message(username, birth_year=None, message=message)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_is_under_age(self):
"""
Verify message if user is under age.
Given that I am a registered user.
And birth year is set so that age is less than 13.
And I visit my profile page.
Then I should see a message that the profile is private as I am under thirteen.
"""
username, user_id = self.log_in_as_unique_user()
under_age_birth_year = datetime.now().year - 10
self.verify_profile_forced_private_message(
username,
birth_year=under_age_birth_year,
message='You must be over 13 to share a full profile.'
)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_can_only_see_default_image_for_private_profile(self):
"""
Scenario: Default profile image behaves correctly for under age user.
Given that I am on my profile page with private access
And I can see default image
When I move my cursor to the image
Then i cannot see the upload/remove image text
And i cannot upload/remove the image.
"""
year_of_birth = datetime.now().year - 5
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_forced_private_message(
username,
year_of_birth,
message='You must be over 13 to share a full profile.'
)
self.assertTrue(profile_page.profile_has_default_image)
self.assertFalse(profile_page.profile_has_image_with_private_access())
def test_user_can_see_default_image_for_public_profile(self):
"""
Scenario: Default profile image behaves correctly for public profile.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
And i am able to upload new image
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
@flaky # TODO fix this, see TNL-2704
def test_user_can_upload_the_profile_image_with_success(self):
"""
Scenario: Upload profile image works correctly.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new image via file uploader
Then i can see the changed image
And i can also see the latest image after reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
profile_page.visit()
self.assertTrue(profile_page.image_upload_success)
def test_user_can_see_error_for_exceeding_max_file_size_limit(self):
"""
Scenario: Upload profile image does not work for > 1MB image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new > 1MB image via file uploader
Then i can see the error message for file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='larger_image.jpg')
self.assertEqual(profile_page.profile_image_message, "The file must be smaller than 1 MB in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_file_size_below_the_min_limit(self):
"""
Scenario: Upload profile image does not work for < 100 Bytes image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new < 100 Bytes image via file uploader
Then i can see the error message for minimum file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='list-icon-visited.png')
self.assertEqual(profile_page.profile_image_message, "The file must be at least 100 bytes in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_wrong_file_type(self):
"""
Scenario: Upload profile image does not work for wrong file types.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new csv file via file uploader
Then i can see the error message for wrong/unsupported file type
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='cohort_users_only_username.csv')
self.assertEqual(
profile_page.profile_image_message,
"The file must be one of the following types: .gif, .png, .jpeg, .jpg."
)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_remove_profile_image(self):
"""
Scenario: Remove profile image works correctly.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i click on the remove image link
Then i can see the default image
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
self.assertTrue(profile_page.remove_profile_image())
self.assertTrue(profile_page.profile_has_default_image)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
def test_user_cannot_remove_default_image(self):
"""
Scenario: Remove profile image does not works for default images.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see only the upload image text
And i cannot see the remove image text
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
self.assertFalse(profile_page.remove_link_present)
def test_eventing_after_multiple_uploads(self):
"""
Scenario: An event is fired when a user with a profile image uploads another image
Given that I am on my profile page with public access
And I upload a new image via file uploader
When I upload another image via the file uploader
Then two upload events have been emitted
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg', wait_for_upload_button=False)
@attr('shard_4')
class DifferentUserLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify viewing the profile page of a different user.
"""
def test_different_user_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's private profile.
Given that I am a registered user.
And I visit a different user's private profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
different_username, different_user_id = self._initialize_different_user(privacy=self.PRIVACY_PRIVATE)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
def test_different_user_under_age(self):
"""
Scenario: Verify that an under age user's profile is private to others.
Given that I am a registered user.
And I visit an under age user's profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see that only the private fields are shown.
"""
under_age_birth_year = datetime.now().year - 10
different_username, different_user_id = self._initialize_different_user(
privacy=self.PRIVACY_PUBLIC,
birth_year=under_age_birth_year
)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
@flaky # TODO fix this, see TNL-2199
def test_different_user_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's public profile.
Given that I am a registered user.
And I visit a different user's public profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then all the profile fields are shown.
Then I shouldn't see the profile visibility selector dropdown.
Also `location`, `language` and `about me` fields are not editable.
"""
different_username, different_user_id = self._initialize_different_user(privacy=self.PRIVACY_PUBLIC)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.wait_for_public_fields()
self.verify_profile_page_is_public(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PUBLIC)
def _initialize_different_user(self, privacy=None, birth_year=None):
"""
Initialize the profile page for a different test user
"""
username, user_id = self.log_in_as_unique_user()
# Set the privacy for the new user
if privacy is None:
privacy = self.PRIVACY_PUBLIC
self.visit_profile_page(username, privacy=privacy)
# Set the user's year of birth
if birth_year:
self.set_birth_year(birth_year)
# Log the user out
LogoutPage(self.browser).visit()
return username, user_id
| agpl-3.0 |
LeeKamentsky/CellProfiler | cellprofiler/modules/tests/test_calculateimageoverlap.py | 2 | 42813 | '''test_calculateimageoverlap - test the CalculateImageOverlap module
CellProfiler is distributed under the GNU General Public License.
See the accompanying file LICENSE for details.
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2015 Broad Institute
Please see the AUTHORS file for credits.
Website: http://www.cellprofiler.org
'''
import base64
import numpy as np
import scipy.ndimage as ndimage
import os
import tempfile
from StringIO import StringIO
import unittest
import zlib
from cellprofiler.preferences import set_headless
set_headless()
import cellprofiler.pipeline as cpp
import cellprofiler.cpmodule as cpm
import cellprofiler.cpimage as cpi
import cellprofiler.measurements as cpmeas
import cellprofiler.objects as cpo
import cellprofiler.workspace as cpw
import cellprofiler.modules.calculateimageoverlap as C
GROUND_TRUTH_IMAGE_NAME = 'groundtruth'
TEST_IMAGE_NAME = 'test'
O_IMG = 'Foreground/background segmentation'
O_OBJ = 'Segmented objects'
GROUND_TRUTH_OBJ_IMAGE_NAME = 'DNA'
ID_OBJ_IMAGE_NAME = 'Protein'
GROUND_TRUTH_OBJ = 'Nuclei'
ID_OBJ = 'Protein'
class TestCalculateImageOverlap(unittest.TestCase):
def test_01_01_load_matlab(self):
data = ('eJzzdQzxcXRSMNUzUPB1DNFNy8xJ1VEIyEksScsvyrVSCHAO9/TTUXAuSk0s'
'SU1RyM+zUggpTVXwSsxTMDJTMDSzMrWwMrRQMDIwNFAgGTAwevryMzAwnGJk'
'YKiY83TqWa8jBgINy++G/WEMdBSwubFY/L/AkcULmpredek1Hd9x9prDrSlm'
'sWW1TPLH3T/01rAK5+7OjHl7fpX4cp7TS4zj1Sa/fKPE8EaP4UzH/ciFajUd'
'c2+1Lbq07+CcyQe6czck6MbWC1Z1KzW909znanLQ8MmktfPZa6/Jtv5c7f1M'
'TWT7xPVW3Jnq/2z7HvnZc/vEx09UXGb+RNA50Gh7to2CYaKj1PxPX6/WxB/b'
'Wiveanf7sZVElGxKbNiU0pf1LMn12U53p4n8Xxqyu+LEdr/2Fcdl5T/ecxY9'
'13Rs4jbxjSoFlVM+tNvMvyvUF/ogYoMsW9xkC7GJKx9JPT/tGd/7f+Wu0Kdn'
's20y5xuoJ9zxTOL5Py+8bL5bY/qBf/rPi8MORV+ruKTW6M12xFegtKN/T43X'
'FcvlkQ9mXUi7fDVzd9rcZ1uKPW93X9B4p9gl6ne1Joo1jvudS+i/TXK//h3k'
'0zoiP+v++dytcpe/3a4OPqEo9E9ufY/xy/rUM4Uu3I8Dzq7Om/e/9z9T/Pf+'
'e/t93/9sUvweI6kR+uzyqmMTvxku2tui1Hc/cS5jnwbXw8/lJsFFfME22mmb'
'K7dcL7Df7ThHdV5z7/VSye8sX99lfpb+7zPr135GNddNRQBlKwLx')
pipeline = cpp.Pipeline()
def callback(caller,event):
self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
pipeline.add_listener(callback)
pipeline.load(StringIO(zlib.decompress(base64.b64decode(data))))
self.assertEqual(len(pipeline.modules()), 2)
module = pipeline.modules()[-1]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.ground_truth, "groundtruth")
self.assertEqual(module.test_img, "orig")
def test_01_02_load_v1(self):
data = r"""CellProfiler Pipeline: http://www.cellprofiler.org
Version:1
SVNRevision:9169
CalculateImageOverlap:[module_num:1|svn_version:\'9000\'|variable_revision_number:1|show_window:True|notes:\x5B\x5D]
Which image do you want to use as the basis for calculating the amount of overlap? :GroundTruth
Which image do you want to compare for overlap?:Segmentation
"""
pipeline = cpp.Pipeline()
def callback(caller,event):
self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
pipeline.add_listener(callback)
pipeline.load(StringIO(data))
self.assertEqual(len(pipeline.modules()), 1)
module = pipeline.modules()[0]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.ground_truth, "GroundTruth")
self.assertEqual(module.test_img, "Segmentation")
def test_01_03_load_v3(self):
data = r"""CellProfiler Pipeline: http://www.cellprofiler.org
Version:3
DateRevision:20131210175632
GitHash:63ec479
ModuleCount:2
HasImagePlaneDetails:False
CalculateImageOverlap:[module_num:1|svn_version:\'Unknown\'|variable_revision_number:3|show_window:False|notes:\x5B\x5D|batch_state:array(\x5B\x5D, dtype=uint8)|enabled:True]
Compare segmented objects, or foreground/background?:Segmented objects
Select the image to be used as the ground truth basis for calculating the amount of overlap:Bar
Select the image to be used to test for overlap:Foo
Select the objects to be used as the ground truth basis for calculating the amount of overlap:Nuclei2_0
Select the objects to be tested for overlap against the ground truth:Nuclei2_1
CalculateImageOverlap:[module_num:2|svn_version:\'Unknown\'|variable_revision_number:3|show_window:False|notes:\x5B\x5D|batch_state:array(\x5B\x5D, dtype=uint8)|enabled:True]
Compare segmented objects, or foreground/background?:Foreground/background segmentation
Select the image to be used as the ground truth basis for calculating the amount of overlap:Foo
Select the image to be used to test for overlap:Bar
Select the objects to be used as the ground truth basis for calculating the amount of overlap:Cell2_0
Select the objects to be tested for overlap against the ground truth:Cell2_1
"""
pipeline = cpp.Pipeline()
def callback(caller,event):
self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
pipeline.add_listener(callback)
pipeline.load(StringIO(data))
self.assertEqual(len(pipeline.modules()), 2)
module = pipeline.modules()[0]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.obj_or_img, C.O_OBJ)
self.assertEqual(module.ground_truth, "Bar")
self.assertEqual(module.test_img, "Foo")
self.assertEqual(module.object_name_GT, "Nuclei2_0")
self.assertEqual(module.object_name_ID, "Nuclei2_1")
self.assertFalse(module.wants_emd)
self.assertEqual(module.decimation_method, C.DM_KMEANS)
self.assertEqual(module.max_distance, 250)
self.assertEqual(module.max_points, 250)
self.assertFalse(module.penalize_missing)
module = pipeline.modules()[1]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.obj_or_img, C.O_IMG)
self.assertEqual(module.ground_truth, "Foo")
self.assertEqual(module.test_img, "Bar")
self.assertEqual(module.object_name_GT, "Cell2_0")
self.assertEqual(module.object_name_ID, "Cell2_1")
self.assertFalse(module.wants_emd)
def test_01_04_load_v4(self):
data = """CellProfiler Pipeline: http://www.cellprofiler.org
Version:3
DateRevision:20141015195823
GitHash:051040e
ModuleCount:2
HasImagePlaneDetails:False
CalculateImageOverlap:[module_num:1|svn_version:\'Unknown\'|variable_revision_number:4|show_window:False|notes:\x5B\x5D|batch_state:array(\x5B\x5D, dtype=uint8)|enabled:True|wants_pause:False]
Compare segmented objects, or foreground/background?:Segmented objects
Select the image to be used as the ground truth basis for calculating the amount of overlap:Bar
Select the image to be used to test for overlap:Foo
Select the objects to be used as the ground truth basis for calculating the amount of overlap:Nuclei2_0
Select the objects to be tested for overlap against the ground truth:Nuclei2_1
Calculate earth mover\'s distance?:No
Maximum # of points:201
Point selection method:K Means
Maximum distance:202
Penalize missing pixels:No
CalculateImageOverlap:[module_num:2|svn_version:\'Unknown\'|variable_revision_number:4|show_window:False|notes:\x5B\x5D|batch_state:array(\x5B\x5D, dtype=uint8)|enabled:True|wants_pause:False]
Compare segmented objects, or foreground/background?:Foreground/background segmentation
Select the image to be used as the ground truth basis for calculating the amount of overlap:Foo
Select the image to be used to test for overlap:Bar
Select the objects to be used as the ground truth basis for calculating the amount of overlap:Cell2_0
Select the objects to be tested for overlap against the ground truth:Cell2_1
Calculate earth mover\'s distance?:Yes
Maximum # of points:101
Point selection method:Skeleton
Maximum distance:102
Penalize missing pixels:Yes
"""
pipeline = cpp.Pipeline()
def callback(caller,event):
self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
pipeline.add_listener(callback)
pipeline.load(StringIO(data))
self.assertEqual(len(pipeline.modules()), 2)
module = pipeline.modules()[0]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.obj_or_img, C.O_OBJ)
self.assertEqual(module.ground_truth, "Bar")
self.assertEqual(module.test_img, "Foo")
self.assertEqual(module.object_name_GT, "Nuclei2_0")
self.assertEqual(module.object_name_ID, "Nuclei2_1")
self.assertFalse(module.wants_emd)
self.assertEqual(module.decimation_method, C.DM_KMEANS)
self.assertEqual(module.max_distance, 202)
self.assertEqual(module.max_points, 201)
self.assertFalse(module.penalize_missing)
module = pipeline.modules()[1]
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
self.assertEqual(module.obj_or_img, C.O_IMG)
self.assertEqual(module.ground_truth, "Foo")
self.assertEqual(module.test_img, "Bar")
self.assertEqual(module.object_name_GT, "Cell2_0")
self.assertEqual(module.object_name_ID, "Cell2_1")
self.assertTrue(module.wants_emd)
self.assertEqual(module.decimation_method, C.DM_SKEL)
self.assertEqual(module.max_distance, 102)
self.assertEqual(module.max_points, 101)
self.assertTrue(module.penalize_missing)
def make_workspace(self, ground_truth, test):
'''Make a workspace with a ground-truth image and a test image
ground_truth and test are dictionaries with the following keys:
image - the pixel data
mask - (optional) the mask data
crop_mask - (optional) a cropping mask
returns a workspace and module
'''
module = C.CalculateImageOverlap()
module.module_num = 1
module.obj_or_img.value = O_IMG
module.ground_truth.value = GROUND_TRUTH_IMAGE_NAME
module.test_img.value = TEST_IMAGE_NAME
module.wants_emd.value = True
pipeline = cpp.Pipeline()
def callback(caller, event):
self.assertFalse(isinstance(event, cpp.RunExceptionEvent))
pipeline.add_listener(callback)
pipeline.add_module(module)
image_set_list = cpi.ImageSetList()
image_set = image_set_list.get_image_set(0)
for name, d in ((GROUND_TRUTH_IMAGE_NAME, ground_truth),
(TEST_IMAGE_NAME, test)):
image = cpi.Image(d["image"],
mask = d.get("mask"),
crop_mask = d.get("crop_mask"))
image_set.add(name, image)
workspace = cpw.Workspace(pipeline, module, image_set,
cpo.ObjectSet(), cpmeas.Measurements(),
image_set_list)
return workspace, module
def make_obj_workspace(self, ground_truth_obj, id_obj, ground_truth, id):
'''make a workspace to test comparing objects'''
''' ground truth object and ID object are dictionaires w/ the following keys'''
'''i - i component of pixel coordinates
j - j component of pixel coordinates
l - label '''
module = C.CalculateImageOverlap()
module.module_num = 1
module.obj_or_img.value = O_OBJ
module.object_name_GT.value = GROUND_TRUTH_OBJ
module.object_name_ID.value = ID_OBJ
module.wants_emd.value = True
pipeline = cpp.Pipeline()
def callback(caller, event):
self.assertFalse(isinstance(event, cpp.RunExceptionEvent))
pipeline.add_listener(callback)
pipeline.add_module(module)
image_set_list = cpi.ImageSetList()
image_set = image_set_list.get_image_set(0)
for name, d in ((GROUND_TRUTH_OBJ_IMAGE_NAME, ground_truth),
(ID_OBJ_IMAGE_NAME, id)):
image = cpi.Image(d["image"],
mask = d.get("mask"),
crop_mask = d.get("crop_mask"))
image_set.add(name, image)
object_set = cpo.ObjectSet()
for name, d in ((GROUND_TRUTH_OBJ, ground_truth_obj),
(ID_OBJ, id_obj)):
object = cpo.Objects()
if d.shape[1] == 3:
object.ijv = d
else:
object.segmented = d
object_set.add_objects(object, name)
workspace = cpw.Workspace(pipeline, module, image_set,
object_set, cpmeas.Measurements(),
image_set_list)
return workspace, module
def test_03_01_zeros(self):
'''Test ground-truth of zeros and image of zeros'''
workspace, module = self.make_workspace(
dict(image = np.ones((20,10), bool)),
dict(image = np.ones((20,10), bool)))
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
module.run(workspace)
measurements = workspace.measurements
self.assertTrue(isinstance(measurements, cpmeas.Measurements))
self.assertEqual(
measurements.get_current_image_measurement("Overlap_FalseNegRate_test"),
0)
features = measurements.get_feature_names(cpmeas.IMAGE)
for feature in C.FTR_ALL + [C.FTR_EARTH_MOVERS_DISTANCE]:
field = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
self.assertTrue(field in features,
"Missing feature: %s" % feature)
ftr_emd = module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)
self.assertEqual(measurements[cpmeas.IMAGE, ftr_emd], 0)
def test_03_02_ones(self):
'''Test ground-truth of ones and image of ones'''
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
module.run(workspace)
measurements = workspace.measurements
self.assertTrue(isinstance(measurements, cpmeas.Measurements))
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, 1),
(C.FTR_RAND_INDEX, 1),
(C.FTR_EARTH_MOVERS_DISTANCE, 0)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertEqual(expected, value)
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_ADJUSTED_RAND_INDEX,
TEST_IMAGE_NAME))
self.assertTrue(np.isnan(measurements.get_current_image_measurement(
mname)))
def test_03_03_masked(self):
'''Test ground-truth of a masked image'''
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool),
mask = np.zeros((20,10), bool)))
self.assertTrue(isinstance(module, C.CalculateImageOverlap))
module.run(workspace)
measurements = workspace.measurements
self.assertTrue(isinstance(measurements, cpmeas.Measurements))
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, 1),
(C.FTR_EARTH_MOVERS_DISTANCE, 0)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertEqual(expected, value)
for feature in (C.FTR_RAND_INDEX, C.FTR_ADJUSTED_RAND_INDEX):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertTrue(np.isnan(value))
def test_03_04_all_right(self):
np.random.seed(34)
image = np.random.uniform(size=(10,20)) > .5
workspace, module = self.make_workspace(
dict(image=image), dict(image=image))
module.run(workspace)
measurements = workspace.measurements
self.assertTrue(isinstance(measurements, cpmeas.Measurements))
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, 1),
(C.FTR_RAND_INDEX, 1),
(C.FTR_ADJUSTED_RAND_INDEX, 1),
(C.FTR_EARTH_MOVERS_DISTANCE, 0)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertEqual(expected, value)
def test_03_05_one_false_positive(self):
i,j = np.mgrid[0:10,0:20]
ground_truth = ((i+j) % 2) == 0
test = ground_truth.copy()
test[0,1] = True
workspace, module = self.make_workspace(
dict(image=ground_truth), dict(image=test))
module.run(workspace)
measurements = workspace.measurements
precision = 100.0 / 101.0
f_factor = 2 * precision / (1 + precision)
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0.01),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 0.99),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, precision),
(C.FTR_F_FACTOR, f_factor),
(C.FTR_EARTH_MOVERS_DISTANCE, 0)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong" % feature)
def test_03_05_one_false_negative(self):
i,j = np.mgrid[0:10,0:20]
ground_truth = ((i+j) % 2) == 0
test = ground_truth.copy()
test[0,0] = False
workspace, module = self.make_workspace(
dict(image=ground_truth), dict(image=test))
module.run(workspace)
measurements = workspace.measurements
recall = 0.99
f_factor = 2 * recall / (1 + recall)
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0.01),
(C.FTR_TRUE_POS_RATE, 0.99),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, recall),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, f_factor),
(C.FTR_EARTH_MOVERS_DISTANCE, 0)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong" % feature)
def test_03_06_one_false_positive_and_mask(self):
i,j = np.mgrid[0:10,0:20]
ground_truth = ((i+j) % 2) == 0
test = ground_truth.copy()
test[0,1] = True
mask = j < 10
workspace, module = self.make_workspace(
dict(image=ground_truth), dict(image=test, mask=mask))
module.run(workspace)
measurements = workspace.measurements
precision = 50.0 / 51.0
f_factor = 2 * precision / (1 + precision)
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0.02),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 0.98),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, precision),
(C.FTR_F_FACTOR, f_factor)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong" % feature)
def test_03_07_one_false_negative_and_mask(self):
i,j = np.mgrid[0:10,0:20]
ground_truth = ((i+j) % 2) == 0
test = ground_truth.copy()
test[0,0] = False
mask = j < 10
workspace, module = self.make_workspace(
dict(image=ground_truth), dict(image=test, mask=mask))
module.run(workspace)
measurements = workspace.measurements
recall = 0.98
f_factor = 2 * recall / (1 + recall)
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0.02),
(C.FTR_TRUE_POS_RATE, 0.98),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, recall),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, f_factor)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong" % feature)
def test_03_08_masked_errors(self):
np.random.seed(38)
ground_truth = np.random.uniform(size=(20,10)) > .5
test = ground_truth.copy()
mask = np.random.uniform(size=(20,10)) > .5
test[~ mask] = np.random.uniform(size=np.sum(~ mask)) > .5
workspace, module = self.make_workspace(
dict(image=ground_truth), dict(image=test, mask=mask))
module.run(workspace)
measurements = workspace.measurements
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 1),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, 1),
(C.FTR_F_FACTOR, 1)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong" % feature)
def test_03_09_cropped(self):
np.random.seed(39)
i,j = np.mgrid[0:10,0:20]
ground_truth = ((i+j) % 2) == 0
test = ground_truth.copy()
test[0,1] = True
cropping = np.zeros((20,40),bool)
cropping[10:20, 10:30] = True
big_ground_truth = np.random.uniform(size=(20,40)) > .5
big_ground_truth[10:20, 10:30] = ground_truth
workspace, module = self.make_workspace(
dict(image=big_ground_truth),
dict(image=test, crop_mask = cropping))
module.run(workspace)
measurements = workspace.measurements
precision = 100.0 / 101.0
f_factor = 2 * precision / (1 + precision)
for feature, expected in ((C.FTR_FALSE_POS_RATE, 0.01),
(C.FTR_FALSE_NEG_RATE, 0),
(C.FTR_TRUE_POS_RATE, 1),
(C.FTR_TRUE_NEG_RATE, 0.99),
(C.FTR_RECALL, 1),
(C.FTR_PRECISION, precision),
(C.FTR_F_FACTOR, f_factor)):
mname = '_'.join((C.C_IMAGE_OVERLAP, feature, TEST_IMAGE_NAME))
value = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(expected, value,
msg = "%s is wrong. Expected %f, got %f" %
(feature, expected, value))
def test_03_10_rand_index(self):
np.random.seed(310)
i, j = np.mgrid[0:10, 0:20]
#
# Create a labeling with two objects 0:10, 0:5 and 0:10, 15:20.
# The background class is 0:10, 5:15
#
ground_truth = (j < 5) | (j >= 15)
#
# Add a 3x4 square in the middle
#
test = ground_truth.copy()
test[4:7,8:12] = True
#
# I used R to generate the rand index and adjusted rand index
# of the two segmentations: a 10 x 5 rectangle, a 10x10 background
# and a 10x5 rectangle with 12 pixels that disagree in the middle
#
# The rand index is from rand.index in the fossil package and
# the adjusted rand index is from cluster.stats in the fpc package.
# There's an adjusted rand index in the fossil package but it gives
# the wrong numbers (!!!!)
#
expected_rand_index = 0.9469347
expected_adj_rand_index = 0.8830027
workspace, module = self.make_workspace(
dict(image=ground_truth),
dict(image=test))
module.run(workspace)
measurements = workspace.measurements
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_RAND_INDEX, TEST_IMAGE_NAME))
self.assertAlmostEqual(
measurements.get_current_image_measurement(mname),
expected_rand_index, 6)
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_ADJUSTED_RAND_INDEX,
TEST_IMAGE_NAME))
self.assertAlmostEqual(
measurements.get_current_image_measurement(mname),
expected_adj_rand_index, 6)
def test_03_11_masked_rand_index(self):
np.random.seed(310)
i, j = np.mgrid[0:10, 0:20]
#
# Create a labeling with two objects 0:10, 0:5 and 0:10, 15:20.
# The background class is 0:10, 5:15
#
ground_truth = (j < 5) | (j >= 15)
#
# Add a 3x4 square in the middle
#
test = ground_truth.copy()
test[4:7,8:12] = True
#
# Remove both one correct and one incorect pixel
#
mask = np.ones(ground_truth.shape, bool)
mask[4,4] = False
mask[5,9] = False
#
# See notes from 03_10
#
expected_rand_index = 0.9503666
expected_adj_rand_index = 0.8907784
workspace, module = self.make_workspace(
dict(image=ground_truth, mask=mask),
dict(image=test, mask = mask))
module.run(workspace)
measurements = workspace.measurements
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_RAND_INDEX, TEST_IMAGE_NAME))
self.assertAlmostEqual(
measurements.get_current_image_measurement(mname),
expected_rand_index, 6)
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_ADJUSTED_RAND_INDEX,
TEST_IMAGE_NAME))
self.assertAlmostEqual(
measurements.get_current_image_measurement(mname),
expected_adj_rand_index, 6)
def test_04_01_get_measurement_columns(self):
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
assert isinstance(module, C.CalculateImageOverlap)
module.object_name_GT.value = GROUND_TRUTH_OBJ
module.object_name_ID.value = ID_OBJ
for obj_or_img, name in ((O_IMG, TEST_IMAGE_NAME),
(O_OBJ, "_".join((GROUND_TRUTH_OBJ, ID_OBJ)))):
module.obj_or_img.value = obj_or_img
columns = module.get_measurement_columns(workspace.pipeline)
# All columns should be unique
self.assertEqual(len(columns), len(set([x[1] for x in columns])))
# All columns should be floats and done on images
self.assertTrue(all([x[0] == cpmeas.IMAGE]))
self.assertTrue(all([x[2] == cpmeas.COLTYPE_FLOAT]))
for feature in C.FTR_ALL:
field = '_'.join((C.C_IMAGE_OVERLAP, feature, name))
self.assertTrue(field in [x[1] for x in columns])
def test_04_02_get_categories(self):
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
categories = module.get_categories(workspace.pipeline, "Foo")
self.assertEqual(len(categories), 0)
categories = module.get_categories(workspace.pipeline, cpmeas.IMAGE)
self.assertEqual(len(categories), 1)
self.assertEqual(categories[0], C.C_IMAGE_OVERLAP)
def test_04_03_get_measurements(self):
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
for wants_emd, features in (
(True, list(C.FTR_ALL) + [C.FTR_EARTH_MOVERS_DISTANCE]),
(False, C.FTR_ALL)):
module.wants_emd.value = wants_emd
mnames = module.get_measurements(workspace.pipeline,
cpmeas.IMAGE, C.C_IMAGE_OVERLAP)
self.assertEqual(len(mnames), len(features))
self.assertTrue(all(n in features for n in mnames))
self.assertTrue(all(f in mnames for f in features))
mnames = module.get_measurements(workspace.pipeline, "Foo",
C.C_IMAGE_OVERLAP)
self.assertEqual(len(mnames), 0)
mnames = module.get_measurements(workspace.pipeline, cpmeas.IMAGE,
"Foo")
self.assertEqual(len(mnames), 0)
def test_04_04_get_measurement_images(self):
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
for feature in C.FTR_ALL:
imnames = module.get_measurement_images(workspace.pipeline,
cpmeas.IMAGE,
C.C_IMAGE_OVERLAP,
feature)
self.assertEqual(len(imnames), 1)
self.assertEqual(imnames[0], TEST_IMAGE_NAME)
imnames = module.get_measurement_images(workspace.pipeline,
cpmeas.IMAGE,
C.C_IMAGE_OVERLAP,
"Foo")
self.assertEqual(len(imnames), 0)
imnames = module.get_measurement_images(workspace.pipeline,
cpmeas.IMAGE,
"Foo",
C.FTR_FALSE_NEG_RATE)
self.assertEqual(len(imnames), 0)
imnames = module.get_measurement_images(workspace.pipeline,
"Foo",
C.C_IMAGE_OVERLAP,
C.FTR_FALSE_NEG_RATE)
self.assertEqual(len(imnames), 0)
def test_04_05_get_measurement_scales(self):
workspace, module = self.make_workspace(
dict(image = np.zeros((20,10), bool)),
dict(image = np.zeros((20,10), bool)))
module.obj_or_img.value = C.O_OBJ
module.object_name_GT.value = GROUND_TRUTH_OBJ
module.object_name_ID.value = ID_OBJ
scales = module.get_measurement_scales(
workspace.pipeline, cpmeas.IMAGE, C.C_IMAGE_OVERLAP,
C.FTR_RAND_INDEX, None)
self.assertEqual(len(scales), 1)
self.assertEqual(scales[0], "_".join((GROUND_TRUTH_OBJ, ID_OBJ)))
module.obj_or_img.value = C.O_IMG
scales = module.get_measurement_scales(
workspace.pipeline, cpmeas.IMAGE, C.C_IMAGE_OVERLAP,
C.FTR_RAND_INDEX, None)
self.assertEqual(len(scales), 0)
def test_05_00_test_measure_overlap_no_objects(self):
# Regression test of issue #934 - no objects
workspace, module = self.make_obj_workspace(
np.zeros((0, 3), int),
np.zeros((0, 3), int),
dict(image = np.zeros((20, 10), bool)),
dict(image = np.zeros((20, 10), bool)))
module.run(workspace)
m = workspace.measurements
for feature in C.FTR_ALL:
mname = module.measurement_name(feature)
value = m[cpmeas.IMAGE, mname, 1]
if feature == C.FTR_TRUE_NEG_RATE:
self.assertEqual(value, 1)
elif feature == C.FTR_FALSE_POS_RATE:
self.assertEqual(value, 0)
else:
self.assertTrue(
np.isnan(value), msg = "%s was %f. not nan" % (mname, value))
#
# Make sure they don't crash
#
workspace, module = self.make_obj_workspace(
np.zeros((0, 3), int),
np.ones((1, 3), int),
dict(image = np.zeros((20, 10), bool)),
dict(image = np.zeros((20, 10), bool)))
module.run(workspace)
workspace, module = self.make_obj_workspace(
np.ones((1, 3), int),
np.zeros((0, 3), int),
dict(image = np.zeros((20, 10), bool)),
dict(image = np.zeros((20, 10), bool)))
module.run(workspace)
def test_05_01_test_measure_overlap_objects(self):
r = np.random.RandomState()
r.seed(51)
workspace, module = self.make_obj_workspace(
np.column_stack([r.randint(0, 20, 150),
r.randint(0, 10, 150),
r.randint(1, 5, 150)]),
np.column_stack([r.randint(0, 20, 175),
r.randint(0, 10, 175),
r.randint(1, 5, 175)]),
dict(image = np.zeros((20, 10), bool)),
dict(image = np.zeros((20, 10), bool)))
module.wants_emd.value = False
module.run(workspace)
measurements = workspace.measurements
self.assertTrue(isinstance(measurements, cpmeas.Measurements))
def test_05_02_test_objects_rand_index(self):
r = np.random.RandomState()
r.seed(52)
base = np.zeros((100, 100), bool)
base[r.randint(0, 100, size=10),
r.randint(0, 100, size=10)] = True
gt = base.copy()
gt[r.randint(0, 100, size=5),
r.randint(0, 100, size=5)] = True
test = base.copy()
test[r.randint(0, 100, size=5),
r.randint(0, 100, size=5)] = True
gt = ndimage.binary_dilation(gt, np.ones((5,5), bool))
test = ndimage.binary_dilation(test, np.ones((5,5), bool))
workspace, module = self.make_workspace(
dict(image = gt),
dict(image = test))
module.wants_emd.value = False
module.run(workspace)
measurements = workspace.measurements
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_RAND_INDEX,
TEST_IMAGE_NAME))
expected_rand_index = measurements.get_current_image_measurement(mname)
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_ADJUSTED_RAND_INDEX,
TEST_IMAGE_NAME))
expected_adjusted_rand_index = \
measurements.get_current_image_measurement(mname)
gt_labels, _ = ndimage.label(gt, np.ones((3,3),bool))
test_labels, _ = ndimage.label(test, np.ones((3,3), bool))
workspace, module = self.make_obj_workspace(
gt_labels, test_labels,
dict(image=np.ones(gt_labels.shape)),
dict(image=np.ones(test_labels.shape)))
module.run(workspace)
measurements = workspace.measurements
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_RAND_INDEX,
GROUND_TRUTH_OBJ, ID_OBJ))
rand_index = measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(rand_index, expected_rand_index)
mname = '_'.join((C.C_IMAGE_OVERLAP, C.FTR_ADJUSTED_RAND_INDEX,
GROUND_TRUTH_OBJ, ID_OBJ))
adjusted_rand_index = \
measurements.get_current_image_measurement(mname)
self.assertAlmostEqual(adjusted_rand_index, expected_adjusted_rand_index)
def test_06_00_no_emd(self):
workspace, module = self.make_workspace(
dict(image = np.ones((20,10), bool)),
dict(image = np.ones((20,10), bool)))
module.wants_emd.value = False
module.run(workspace)
self.assertFalse(workspace.measurements.has_feature(
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)))
def test_06_01_one_pixel(self):
#
# The earth movers distance should be sqrt((8-5)**2 + (7 - 3) ** 2) = 5
#
src = np.zeros((20, 10), bool)
dest = np.zeros((20, 10), bool)
src[5, 3] = True
dest[8, 7] = True
workspace, module = self.make_workspace(
dict(image = src), dict(image = dest))
module.run(workspace)
self.assertEqual(workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)], 5)
def test_06_02_missing_penalty(self):
#
# Test that the missing penalty works
#
src = np.zeros((20, 10), bool)
dest = np.zeros((20, 10), bool)
src[2, 2] = True
dest[2, 2] = True
dest[8, 7] = True
dest[2, 6] = True
workspace, module = self.make_workspace(
dict(image = src), dict(image = dest))
module.penalize_missing.value = True
module.max_distance.value = 8
module.run(workspace)
self.assertEqual(workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)], 16)
def test_06_03_max_distance(self):
src = np.zeros((20, 10), bool)
dest = np.zeros((20, 10), bool)
src[5, 3] = True
dest[8, 7] = True
src[19, 9] = True
dest[11, 9] = True
workspace, module = self.make_workspace(
dict(image = src), dict(image = dest))
module.max_distance.value = 6
module.run(workspace)
self.assertEqual(workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)], 11)
def test_06_04_decimate_k_means(self):
r = np.random.RandomState()
r.seed(64)
img = r.uniform(size=(10, 10)) > .5
workspace, module = self.make_workspace(
dict(image = img), dict(image = img.transpose()))
#
# Pick a single point for decimation - the emd should be zero
#
module.max_points._Number__minval=1
module.max_points.value = 1
module.run(workspace)
self.assertEqual(workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)], 0)
#
# Pick a large number of points to get the real EMD
#
workspace, module = self.make_workspace(
dict(image = img), dict(image = img.transpose()))
module.max_points._Number__minval=1
module.max_points.value = 100
module.run(workspace)
emd = workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)]
#
# The EMD after decimation is going to be randomly different,
# but not by much.
#
workspace, module = self.make_workspace(
dict(image = img), dict(image = img.transpose()))
module.max_points._Number__minval=1
module.max_points.value = np.sum(img | img.transpose()) / 2
module.run(workspace)
decimated_emd = workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)]
self.assertLess(decimated_emd, emd * 2)
self.assertGreater(decimated_emd, emd / 2)
def test_06_05_decimate_skel(self):
#
# Mostly, this is to check that the skeleton method doesn't crash
#
i, j = np.mgrid[0:10, 0:20]
image1 = ((i-4) **2)*4 + (j-8) ** 2 < 32
image2 = ((i-6) **2)*4 + (j-12) ** 2 < 32
workspace, module = self.make_workspace(
dict(image = image1), dict(image = image2))
module.max_points._Number__minval=1
module.max_points.value = 5
module.decimation_method.value = C.DM_SKEL
module.run(workspace)
emd = workspace.measurements[
cpmeas.IMAGE,
module.measurement_name(C.FTR_EARTH_MOVERS_DISTANCE)]
self.assertGreater(emd, np.sum(image1) * 3)
self.assertLess(emd, np.sum(image1) * 6)
| gpl-2.0 |
HanyuWorm/volatility | volatility/plugins/mac/psaux.py | 45 | 1804 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: [email protected]
@organization:
"""
import volatility.plugins.mac.pstasks as pstasks
class mac_psaux(pstasks.mac_tasks):
""" Prints processes with arguments in user land (**argv) """
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "20"),
("Bits", "16"),
("Stack", "#018x"),
("Length", "8"),
("Argc", "8"),
("Arguments", "")])
for proc in data:
self.table_row(outfd,
proc.p_pid,
proc.p_comm,
str(proc.task.map.pmap.pm_task_map or '')[9:],
proc.user_stack,
proc.p_argslen,
proc.p_argc,
proc.get_arguments())
| gpl-2.0 |
devsarr/ONLYOFFICE-OnlineEditors | ActiveX/Common/DocxFormat/Source/XML/libxml2/XML/python/libxml.py | 14 | 24421 | import libxml2mod
import types
import sys
# The root of all libxml2 errors.
class libxmlError(Exception): pass
# Type of the wrapper class for the C objects wrappers
def checkWrapper(obj):
try:
n = type(_obj).__name__
if n != 'PyCObject' and n != 'PyCapsule':
return 1
except:
return 0
return 0
#
# id() is sometimes negative ...
#
def pos_id(o):
i = id(o)
if (i < 0):
return (sys.maxsize - i)
return i
#
# Errors raised by the wrappers when some tree handling failed.
#
class treeError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class parserError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class uriError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class xpathError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class ioWrapper:
def __init__(self, _obj):
self.__io = _obj
self._o = None
def io_close(self):
if self.__io == None:
return(-1)
self.__io.close()
self.__io = None
return(0)
def io_flush(self):
if self.__io == None:
return(-1)
self.__io.flush()
return(0)
def io_read(self, len = -1):
if self.__io == None:
return(-1)
try:
if len < 0:
ret = self.__io.read()
else:
ret = self.__io.read(len)
except Exception:
import sys
e = sys.exc_info()[1]
print("failed to read from Python:", type(e))
print("on IO:", self.__io)
self.__io == None
return(-1)
return(ret)
def io_write(self, str, len = -1):
if self.__io == None:
return(-1)
if len < 0:
return(self.__io.write(str))
return(self.__io.write(str, len))
class ioReadWrapper(ioWrapper):
def __init__(self, _obj, enc = ""):
ioWrapper.__init__(self, _obj)
self._o = libxml2mod.xmlCreateInputBuffer(self, enc)
def __del__(self):
print("__del__")
self.io_close()
if self._o != None:
libxml2mod.xmlFreeParserInputBuffer(self._o)
self._o = None
def close(self):
self.io_close()
if self._o != None:
libxml2mod.xmlFreeParserInputBuffer(self._o)
self._o = None
class ioWriteWrapper(ioWrapper):
def __init__(self, _obj, enc = ""):
# print "ioWriteWrapper.__init__", _obj
if type(_obj) == type(''):
print("write io from a string")
self.o = None
elif type(_obj).__name__ == 'PyCapsule':
file = libxml2mod.outputBufferGetPythonFile(_obj)
if file != None:
ioWrapper.__init__(self, file)
else:
ioWrapper.__init__(self, _obj)
self._o = _obj
# elif type(_obj) == types.InstanceType:
# print(("write io from instance of %s" % (_obj.__class__)))
# ioWrapper.__init__(self, _obj)
# self._o = libxml2mod.xmlCreateOutputBuffer(self, enc)
else:
file = libxml2mod.outputBufferGetPythonFile(_obj)
if file != None:
ioWrapper.__init__(self, file)
else:
ioWrapper.__init__(self, _obj)
self._o = _obj
def __del__(self):
# print "__del__"
self.io_close()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
def flush(self):
self.io_flush()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
def close(self):
self.io_flush()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
#
# Example of a class to handle SAX events
#
class SAXCallback:
"""Base class for SAX handlers"""
def startDocument(self):
"""called at the start of the document"""
pass
def endDocument(self):
"""called at the end of the document"""
pass
def startElement(self, tag, attrs):
"""called at the start of every element, tag is the name of
the element, attrs is a dictionary of the element's attributes"""
pass
def endElement(self, tag):
"""called at the start of every element, tag is the name of
the element"""
pass
def characters(self, data):
"""called when character data have been read, data is the string
containing the data, multiple consecutive characters() callback
are possible."""
pass
def cdataBlock(self, data):
"""called when CDATA section have been read, data is the string
containing the data, multiple consecutive cdataBlock() callback
are possible."""
pass
def reference(self, name):
"""called when an entity reference has been found"""
pass
def ignorableWhitespace(self, data):
"""called when potentially ignorable white spaces have been found"""
pass
def processingInstruction(self, target, data):
"""called when a PI has been found, target contains the PI name and
data is the associated data in the PI"""
pass
def comment(self, content):
"""called when a comment has been found, content contains the comment"""
pass
def externalSubset(self, name, externalID, systemID):
"""called when a DOCTYPE declaration has been found, name is the
DTD name and externalID, systemID are the DTD public and system
identifier for that DTd if available"""
pass
def internalSubset(self, name, externalID, systemID):
"""called when a DOCTYPE declaration has been found, name is the
DTD name and externalID, systemID are the DTD public and system
identifier for that DTD if available"""
pass
def entityDecl(self, name, type, externalID, systemID, content):
"""called when an ENTITY declaration has been found, name is the
entity name and externalID, systemID are the entity public and
system identifier for that entity if available, type indicates
the entity type, and content reports it's string content"""
pass
def notationDecl(self, name, externalID, systemID):
"""called when an NOTATION declaration has been found, name is the
notation name and externalID, systemID are the notation public and
system identifier for that notation if available"""
pass
def attributeDecl(self, elem, name, type, defi, defaultValue, nameList):
"""called when an ATTRIBUTE definition has been found"""
pass
def elementDecl(self, name, type, content):
"""called when an ELEMENT definition has been found"""
pass
def entityDecl(self, name, publicId, systemID, notationName):
"""called when an unparsed ENTITY declaration has been found,
name is the entity name and publicId,, systemID are the entity
public and system identifier for that entity if available,
and notationName indicate the associated NOTATION"""
pass
def warning(self, msg):
#print msg
pass
def error(self, msg):
raise parserError(msg)
def fatalError(self, msg):
raise parserError(msg)
#
# This class is the ancestor of all the Node classes. It provides
# the basic functionalities shared by all nodes (and handle
# gracefylly the exception), like name, navigation in the tree,
# doc reference, content access and serializing to a string or URI
#
class xmlCore:
def __init__(self, _obj=None):
if _obj != None:
self._o = _obj;
return
self._o = None
def __eq__(self, other):
if other == None:
return False
ret = libxml2mod.compareNodesEqual(self._o, other._o)
if ret == None:
return False
return ret == True
def __ne__(self, other):
if other == None:
return True
ret = libxml2mod.compareNodesEqual(self._o, other._o)
return not ret
def __hash__(self):
ret = libxml2mod.nodeHash(self._o)
return ret
def __str__(self):
return self.serialize()
def get_parent(self):
ret = libxml2mod.parent(self._o)
if ret == None:
return None
return nodeWrap(ret)
def get_children(self):
ret = libxml2mod.children(self._o)
if ret == None:
return None
return nodeWrap(ret)
def get_last(self):
ret = libxml2mod.last(self._o)
if ret == None:
return None
return nodeWrap(ret)
def get_next(self):
ret = libxml2mod.next(self._o)
if ret == None:
return None
return nodeWrap(ret)
def get_properties(self):
ret = libxml2mod.properties(self._o)
if ret == None:
return None
return xmlAttr(_obj=ret)
def get_prev(self):
ret = libxml2mod.prev(self._o)
if ret == None:
return None
return nodeWrap(ret)
def get_content(self):
return libxml2mod.xmlNodeGetContent(self._o)
getContent = get_content # why is this duplicate naming needed ?
def get_name(self):
return libxml2mod.name(self._o)
def get_type(self):
return libxml2mod.type(self._o)
def get_doc(self):
ret = libxml2mod.doc(self._o)
if ret == None:
if self.type in ["document_xml", "document_html"]:
return xmlDoc(_obj=self._o)
else:
return None
return xmlDoc(_obj=ret)
#
# Those are common attributes to nearly all type of nodes
# defined as python2 properties
#
import sys
if float(sys.version[0:3]) < 2.2:
def __getattr__(self, attr):
if attr == "parent":
ret = libxml2mod.parent(self._o)
if ret == None:
return None
return nodeWrap(ret)
elif attr == "properties":
ret = libxml2mod.properties(self._o)
if ret == None:
return None
return xmlAttr(_obj=ret)
elif attr == "children":
ret = libxml2mod.children(self._o)
if ret == None:
return None
return nodeWrap(ret)
elif attr == "last":
ret = libxml2mod.last(self._o)
if ret == None:
return None
return nodeWrap(ret)
elif attr == "next":
ret = libxml2mod.next(self._o)
if ret == None:
return None
return nodeWrap(ret)
elif attr == "prev":
ret = libxml2mod.prev(self._o)
if ret == None:
return None
return nodeWrap(ret)
elif attr == "content":
return libxml2mod.xmlNodeGetContent(self._o)
elif attr == "name":
return libxml2mod.name(self._o)
elif attr == "type":
return libxml2mod.type(self._o)
elif attr == "doc":
ret = libxml2mod.doc(self._o)
if ret == None:
if self.type == "document_xml" or self.type == "document_html":
return xmlDoc(_obj=self._o)
else:
return None
return xmlDoc(_obj=ret)
raise AttributeError(attr)
else:
parent = property(get_parent, None, None, "Parent node")
children = property(get_children, None, None, "First child node")
last = property(get_last, None, None, "Last sibling node")
next = property(get_next, None, None, "Next sibling node")
prev = property(get_prev, None, None, "Previous sibling node")
properties = property(get_properties, None, None, "List of properies")
content = property(get_content, None, None, "Content of this node")
name = property(get_name, None, None, "Node name")
type = property(get_type, None, None, "Node type")
doc = property(get_doc, None, None, "The document this node belongs to")
#
# Serialization routines, the optional arguments have the following
# meaning:
# encoding: string to ask saving in a specific encoding
# indent: if 1 the serializer is asked to indent the output
#
def serialize(self, encoding = None, format = 0):
return libxml2mod.serializeNode(self._o, encoding, format)
def saveTo(self, file, encoding = None, format = 0):
return libxml2mod.saveNodeTo(self._o, file, encoding, format)
#
# Canonicalization routines:
#
# nodes: the node set (tuple or list) to be included in the
# canonized image or None if all document nodes should be
# included.
# exclusive: the exclusive flag (0 - non-exclusive
# canonicalization; otherwise - exclusive canonicalization)
# prefixes: the list of inclusive namespace prefixes (strings),
# or None if there is no inclusive namespaces (only for
# exclusive canonicalization, ignored otherwise)
# with_comments: include comments in the result (!=0) or not
# (==0)
def c14nMemory(self,
nodes=None,
exclusive=0,
prefixes=None,
with_comments=0):
if nodes:
nodes = [n._o for n in nodes]
return libxml2mod.xmlC14NDocDumpMemory(
self.get_doc()._o,
nodes,
exclusive != 0,
prefixes,
with_comments != 0)
def c14nSaveTo(self,
file,
nodes=None,
exclusive=0,
prefixes=None,
with_comments=0):
if nodes:
nodes = [n._o for n in nodes]
return libxml2mod.xmlC14NDocSaveTo(
self.get_doc()._o,
nodes,
exclusive != 0,
prefixes,
with_comments != 0,
file)
#
# Selecting nodes using XPath, a bit slow because the context
# is allocated/freed every time but convenient.
#
def xpathEval(self, expr):
doc = self.doc
if doc == None:
return None
ctxt = doc.xpathNewContext()
ctxt.setContextNode(self)
res = ctxt.xpathEval(expr)
ctxt.xpathFreeContext()
return res
# #
# # Selecting nodes using XPath, faster because the context
# # is allocated just once per xmlDoc.
# #
# # Removed: DV memleaks c.f. #126735
# #
# def xpathEval2(self, expr):
# doc = self.doc
# if doc == None:
# return None
# try:
# doc._ctxt.setContextNode(self)
# except:
# doc._ctxt = doc.xpathNewContext()
# doc._ctxt.setContextNode(self)
# res = doc._ctxt.xpathEval(expr)
# return res
def xpathEval2(self, expr):
return self.xpathEval(expr)
# Remove namespaces
def removeNsDef(self, href):
"""
Remove a namespace definition from a node. If href is None,
remove all of the ns definitions on that node. The removed
namespaces are returned as a linked list.
Note: If any child nodes referred to the removed namespaces,
they will be left with dangling links. You should call
renconciliateNs() to fix those pointers.
Note: This method does not free memory taken by the ns
definitions. You will need to free it manually with the
freeNsList() method on the returns xmlNs object.
"""
ret = libxml2mod.xmlNodeRemoveNsDef(self._o, href)
if ret is None:return None
__tmp = xmlNs(_obj=ret)
return __tmp
# support for python2 iterators
def walk_depth_first(self):
return xmlCoreDepthFirstItertor(self)
def walk_breadth_first(self):
return xmlCoreBreadthFirstItertor(self)
__iter__ = walk_depth_first
def free(self):
try:
self.doc._ctxt.xpathFreeContext()
except:
pass
libxml2mod.xmlFreeDoc(self._o)
#
# implements the depth-first iterator for libxml2 DOM tree
#
class xmlCoreDepthFirstItertor:
def __init__(self, node):
self.node = node
self.parents = []
def __iter__(self):
return self
def next(self):
while 1:
if self.node:
ret = self.node
self.parents.append(self.node)
self.node = self.node.children
return ret
try:
parent = self.parents.pop()
except IndexError:
raise StopIteration
self.node = parent.next
#
# implements the breadth-first iterator for libxml2 DOM tree
#
class xmlCoreBreadthFirstItertor:
def __init__(self, node):
self.node = node
self.parents = []
def __iter__(self):
return self
def next(self):
while 1:
if self.node:
ret = self.node
self.parents.append(self.node)
self.node = self.node.next
return ret
try:
parent = self.parents.pop()
except IndexError:
raise StopIteration
self.node = parent.children
#
# converters to present a nicer view of the XPath returns
#
def nodeWrap(o):
# TODO try to cast to the most appropriate node class
name = libxml2mod.type(o)
if name == "element" or name == "text":
return xmlNode(_obj=o)
if name == "attribute":
return xmlAttr(_obj=o)
if name[0:8] == "document":
return xmlDoc(_obj=o)
if name == "namespace":
return xmlNs(_obj=o)
if name == "elem_decl":
return xmlElement(_obj=o)
if name == "attribute_decl":
return xmlAttribute(_obj=o)
if name == "entity_decl":
return xmlEntity(_obj=o)
if name == "dtd":
return xmlDtd(_obj=o)
return xmlNode(_obj=o)
def xpathObjectRet(o):
otype = type(o)
if otype == type([]):
ret = list(map(xpathObjectRet, o))
return ret
elif otype == type(()):
ret = list(map(xpathObjectRet, o))
return tuple(ret)
elif otype == type('') or otype == type(0) or otype == type(0.0):
return o
else:
return nodeWrap(o)
#
# register an XPath function
#
def registerXPathFunction(ctxt, name, ns_uri, f):
ret = libxml2mod.xmlRegisterXPathFunction(ctxt, name, ns_uri, f)
#
# For the xmlTextReader parser configuration
#
PARSER_LOADDTD=1
PARSER_DEFAULTATTRS=2
PARSER_VALIDATE=3
PARSER_SUBST_ENTITIES=4
#
# For the error callback severities
#
PARSER_SEVERITY_VALIDITY_WARNING=1
PARSER_SEVERITY_VALIDITY_ERROR=2
PARSER_SEVERITY_WARNING=3
PARSER_SEVERITY_ERROR=4
#
# register the libxml2 error handler
#
def registerErrorHandler(f, ctx):
"""Register a Python written function to for error reporting.
The function is called back as f(ctx, error). """
import sys
if 'libxslt' not in sys.modules:
# normal behaviour when libxslt is not imported
ret = libxml2mod.xmlRegisterErrorHandler(f,ctx)
else:
# when libxslt is already imported, one must
# use libxst's error handler instead
import libxslt
ret = libxslt.registerErrorHandler(f,ctx)
return ret
class parserCtxtCore:
def __init__(self, _obj=None):
if _obj != None:
self._o = _obj;
return
self._o = None
def __del__(self):
if self._o != None:
libxml2mod.xmlFreeParserCtxt(self._o)
self._o = None
def setErrorHandler(self,f,arg):
"""Register an error handler that will be called back as
f(arg,msg,severity,reserved).
@reserved is currently always None."""
libxml2mod.xmlParserCtxtSetErrorHandler(self._o,f,arg)
def getErrorHandler(self):
"""Return (f,arg) as previously registered with setErrorHandler
or (None,None)."""
return libxml2mod.xmlParserCtxtGetErrorHandler(self._o)
def addLocalCatalog(self, uri):
"""Register a local catalog with the parser"""
return libxml2mod.addLocalCatalog(self._o, uri)
class ValidCtxtCore:
def __init__(self, *args, **kw):
pass
def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for DTD validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlSetValidErrors(self._o, err_func, warn_func, arg)
class SchemaValidCtxtCore:
def __init__(self, *args, **kw):
pass
def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for Schema validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlSchemaSetValidErrors(self._o, err_func, warn_func, arg)
class relaxNgValidCtxtCore:
def __init__(self, *args, **kw):
pass
def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for RelaxNG validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlRelaxNGSetValidErrors(self._o, err_func, warn_func, arg)
def _xmlTextReaderErrorFunc(xxx_todo_changeme,msg,severity,locator):
"""Intermediate callback to wrap the locator"""
(f,arg) = xxx_todo_changeme
return f(arg,msg,severity,xmlTextReaderLocator(locator))
class xmlTextReaderCore:
def __init__(self, _obj=None):
self.input = None
if _obj != None:self._o = _obj;return
self._o = None
def __del__(self):
if self._o != None:
libxml2mod.xmlFreeTextReader(self._o)
self._o = None
def SetErrorHandler(self,f,arg):
"""Register an error handler that will be called back as
f(arg,msg,severity,locator)."""
if f is None:
libxml2mod.xmlTextReaderSetErrorHandler(\
self._o,None,None)
else:
libxml2mod.xmlTextReaderSetErrorHandler(\
self._o,_xmlTextReaderErrorFunc,(f,arg))
def GetErrorHandler(self):
"""Return (f,arg) as previously registered with setErrorHandler
or (None,None)."""
f,arg = libxml2mod.xmlTextReaderGetErrorHandler(self._o)
if f is None:
return None,None
else:
# assert f is _xmlTextReaderErrorFunc
return arg
#
# The cleanup now goes though a wrapper in libxml.c
#
def cleanupParser():
libxml2mod.xmlPythonCleanupParser()
#
# The interface to xmlRegisterInputCallbacks.
# Since this API does not allow to pass a data object along with
# match/open callbacks, it is necessary to maintain a list of all
# Python callbacks.
#
__input_callbacks = []
def registerInputCallback(func):
def findOpenCallback(URI):
for cb in reversed(__input_callbacks):
o = cb(URI)
if o is not None:
return o
libxml2mod.xmlRegisterInputCallback(findOpenCallback)
__input_callbacks.append(func)
def popInputCallbacks():
# First pop python-level callbacks, when no more available - start
# popping built-in ones.
if len(__input_callbacks) > 0:
__input_callbacks.pop()
if len(__input_callbacks) == 0:
libxml2mod.xmlUnregisterInputCallback()
# WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
#
# Everything before this line comes from libxml.py
# Everything after this line is automatically generated
#
# WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
| agpl-3.0 |
xkcd1253/SocialNetworkforTwo | flask/lib/python2.7/site-packages/flask/templating.py | 48 | 4595 | # -*- coding: utf-8 -*-
"""
flask.templating
~~~~~~~~~~~~~~~~
Implements the bridge to Jinja2.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import posixpath
from jinja2 import BaseLoader, Environment as BaseEnvironment, \
TemplateNotFound
from .globals import _request_ctx_stack
from .signals import template_rendered
from .module import blueprint_is_module
def _default_template_ctx_processor():
"""Default template context processor. Injects `request`,
`session` and `g`.
"""
reqctx = _request_ctx_stack.top
return dict(
config=reqctx.app.config,
request=reqctx.request,
session=reqctx.session,
g=reqctx.g
)
class Environment(BaseEnvironment):
"""Works like a regular Jinja2 environment but has some additional
knowledge of how Flask's blueprint works so that it can prepend the
name of the blueprint to referenced templates if necessary.
"""
def __init__(self, app, **options):
if 'loader' not in options:
options['loader'] = app.create_global_jinja_loader()
BaseEnvironment.__init__(self, **options)
self.app = app
class DispatchingJinjaLoader(BaseLoader):
"""A loader that looks for templates in the application and all
the blueprint folders.
"""
def __init__(self, app):
self.app = app
def get_source(self, environment, template):
for loader, local_name in self._iter_loaders(template):
try:
return loader.get_source(environment, local_name)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
def _iter_loaders(self, template):
loader = self.app.jinja_loader
if loader is not None:
yield loader, template
# old style module based loaders in case we are dealing with a
# blueprint that is an old style module
try:
module, local_name = posixpath.normpath(template).split('/', 1)
blueprint = self.app.blueprints[module]
if blueprint_is_module(blueprint):
loader = blueprint.jinja_loader
if loader is not None:
yield loader, local_name
except (ValueError, KeyError):
pass
for blueprint in self.app.blueprints.itervalues():
if blueprint_is_module(blueprint):
continue
loader = blueprint.jinja_loader
if loader is not None:
yield loader, template
def list_templates(self):
result = set()
loader = self.app.jinja_loader
if loader is not None:
result.update(loader.list_templates())
for name, blueprint in self.app.blueprints.iteritems():
loader = blueprint.jinja_loader
if loader is not None:
for template in loader.list_templates():
prefix = ''
if blueprint_is_module(blueprint):
prefix = name + '/'
result.add(prefix + template)
return list(result)
def _render(template, context, app):
"""Renders the template and fires the signal"""
rv = template.render(context)
template_rendered.send(app, template=template, context=context)
return rv
def render_template(template_name_or_list, **context):
"""Renders a template from the template folder with the given
context.
:param template_name_or_list: the name of the template to be
rendered, or an iterable with template names
the first one existing will be rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _request_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list),
context, ctx.app)
def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context.
:param template_name: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _request_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source),
context, ctx.app)
| gpl-2.0 |
mantidproject/mantid | scripts/directtools/_validate.py | 3 | 1564 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2019 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
def _isDOS(workspace):
"""Return True if workspace looks like a valid DOS workspace, False otherwise."""
singleHistogram = workspace.getNumberHistograms() == 1
unitsInDeltaE = workspace.getAxis(0).getUnit().unitID() == 'DeltaE'
return singleHistogram and unitsInDeltaE
def _isSofQW(workspace):
"""Return True if workspace looks like a valid S(Q,E) workspace, False otherwise."""
validUnits = ['q', 'Energy transfer']
unit1 = workspace.getAxis(0).getUnit().name()
unit2 = workspace.getAxis(1).getUnit().name()
return unit1 != unit2 and unit1 in validUnits and unit2 in validUnits
def _singlehistogramordie(workspace):
"""Raise an exception if workspace has more than one histogram."""
if workspace.getNumberHistograms() > 1:
raise RuntimeError("The workspace '{}' has more than one histogram.".format(workspace))
def _styleordie(style):
"""Raise an exception if style is not a valid style string."""
if not isinstance(style, str):
raise RuntimeError("The 'style' argument '{}' is not a valid string.".format(str(style)))
if 'm' not in style and 'l' not in style:
raise RuntimeError("The 'style' argument '{}' does not contain either 'm' or 'l'.".format(style))
| gpl-3.0 |
cloudbau/cinder | cinder/volume/flows/base.py | 3 | 2230 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# For more information please visit: https://wiki.openstack.org/wiki/TaskFlow
from cinder.taskflow import task
def _make_task_name(cls, addons=None):
"""Makes a pretty name for a task class."""
base_name = ".".join([cls.__module__, cls.__name__])
extra = ''
if addons:
extra = ';%s' % (", ".join([str(a) for a in addons]))
return base_name + extra
class CinderTask(task.Task):
"""The root task class for all cinder tasks.
It automatically names the given task using the module and class that
implement the given task as the task name.
"""
def __init__(self, addons=None):
super(CinderTask, self).__init__(_make_task_name(self.__class__,
addons))
class InjectTask(CinderTask):
"""This injects a dict into the flow.
This injection is done so that the keys (and values) provided can be
dependended on by tasks further down the line. Since taskflow is dependency
based this can be considered the bootstrapping task that provides an
initial set of values for other tasks to get started with. If this did not
exist then tasks would fail locating there dependent tasks and the values
said dependent tasks produce.
Reversion strategy: N/A
"""
def __init__(self, inject_what, addons=None):
super(InjectTask, self).__init__(addons=addons)
self.provides.update(inject_what.keys())
self._inject = inject_what
def __call__(self, context):
return dict(self._inject)
| apache-2.0 |
Exceltior/dogecoin | qa/rpc-tests/listtransactions.py | 145 | 6081 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
def run_test(nodes):
# Simple send, 0 to 1:
txid = nodes[0].sendtoaddress(nodes[1].getnewaddress(), 0.1)
sync_mempools(nodes)
check_array_result(nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
check_array_result(nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
nodes[0].setgenerate(True, 1)
sync_blocks(nodes)
check_array_result(nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
check_array_result(nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = nodes[0].sendtoaddress(nodes[0].getnewaddress(), 0.2)
check_array_result(nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
check_array_result(nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { nodes[0].getnewaddress() : 0.11, nodes[1].getnewaddress() : 0.22,
nodes[0].getaccountaddress("from1") : 0.33, nodes[1].getaccountaddress("toself") : 0.44 }
txid = nodes[1].sendmany("", send_to)
sync_mempools(nodes)
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
check_array_result(nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
check_array_result(nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
check_array_result(nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
check_array_result(nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing bitcoind/bitcoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
nodes = start_nodes(2, options.tmpdir)
connect_nodes(nodes[1], 0)
sync_blocks(nodes)
run_test(nodes)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
stop_nodes(nodes)
wait_bitcoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
| mit |
dudochkin-victor/telepathy-yell | tools/glib-interfaces-gen.py | 14 | 6210 | #!/usr/bin/python
from sys import argv, stdout, stderr
import xml.dom.minidom
from libglibcodegen import NS_TP, get_docstring, \
get_descendant_text, get_by_path
class Generator(object):
def __init__(self, prefix, implfile, declfile, dom):
self.prefix = prefix + '_'
assert declfile.endswith('.h')
docfile = declfile[:-2] + '-gtk-doc.h'
self.impls = open(implfile, 'w')
self.decls = open(declfile, 'w')
self.docs = open(docfile, 'w')
self.spec = get_by_path(dom, "spec")[0]
def h(self, code):
self.decls.write(code.encode('utf-8'))
def c(self, code):
self.impls.write(code.encode('utf-8'))
def d(self, code):
self.docs.write(code.encode('utf-8'))
def __call__(self):
for f in self.h, self.c:
self.do_header(f)
self.do_body()
# Header
def do_header(self, f):
f('/* Generated from: ')
f(get_descendant_text(get_by_path(self.spec, 'title')))
version = get_by_path(self.spec, "version")
if version:
f(' version ' + get_descendant_text(version))
f('\n\n')
for copyright in get_by_path(self.spec, 'copyright'):
f(get_descendant_text(copyright))
f('\n')
f('\n')
f(get_descendant_text(get_by_path(self.spec, 'license')))
f(get_descendant_text(get_by_path(self.spec, 'docstring')))
f("""
*/
""")
# Body
def do_body(self):
for iface in self.spec.getElementsByTagName('interface'):
self.do_iface(iface)
def do_iface(self, iface):
parent_name = get_by_path(iface, '../@name')
self.d("""\
/**
* %(IFACE_DEFINE)s:
*
* The interface name "%(name)s"
*/
""" % {'IFACE_DEFINE' : (self.prefix + 'IFACE_' + \
parent_name).upper().replace('/', ''),
'name' : iface.getAttribute('name')})
self.h("""
#define %(IFACE_DEFINE)s \\
"%(name)s"
""" % {'IFACE_DEFINE' : (self.prefix + 'IFACE_' + \
parent_name).upper().replace('/', ''),
'name' : iface.getAttribute('name')})
self.d("""
/**
* %(IFACE_QUARK_DEFINE)s:
*
* Expands to a call to a function that returns a quark for the interface \
name "%(name)s"
*/
""" % {'IFACE_QUARK_DEFINE' : (self.prefix + 'IFACE_QUARK_' + \
parent_name).upper().replace('/', ''),
'iface_quark_func' : (self.prefix + 'iface_quark_' + \
parent_name).lower().replace('/', ''),
'name' : iface.getAttribute('name')})
self.h("""
#define %(IFACE_QUARK_DEFINE)s \\
(%(iface_quark_func)s ())
GQuark %(iface_quark_func)s (void);
""" % {'IFACE_QUARK_DEFINE' : (self.prefix + 'IFACE_QUARK_' + \
parent_name).upper().replace('/', ''),
'iface_quark_func' : (self.prefix + 'iface_quark_' + \
parent_name).lower().replace('/', ''),
'name' : iface.getAttribute('name')})
self.c("""\
GQuark
%(iface_quark_func)s (void)
{
static GQuark quark = 0;
if (G_UNLIKELY (quark == 0))
{
quark = g_quark_from_static_string ("%(name)s");
}
return quark;
}
""" % {'iface_quark_func' : (self.prefix + 'iface_quark_' + \
parent_name).lower().replace('/', ''),
'name' : iface.getAttribute('name')})
for prop in iface.getElementsByTagNameNS(None, 'property'):
self.d("""
/**
* %(IFACE_PREFIX)s_%(PROP_UC)s:
*
* The fully-qualified property name "%(name)s.%(prop)s"
*/
""" % {'IFACE_PREFIX' : (self.prefix + 'PROP_' + \
parent_name).upper().replace('/', ''),
'PROP_UC': prop.getAttributeNS(NS_TP, "name-for-bindings").upper(),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
self.h("""
#define %(IFACE_PREFIX)s_%(PROP_UC)s \\
"%(name)s.%(prop)s"
""" % {'IFACE_PREFIX' : (self.prefix + 'PROP_' + \
parent_name).upper().replace('/', ''),
'PROP_UC': prop.getAttributeNS(NS_TP, "name-for-bindings").upper(),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
for prop in iface.getElementsByTagNameNS(NS_TP, 'contact-attribute'):
self.d("""
/**
* %(TOKEN_PREFIX)s_%(TOKEN_UC)s:
*
* The fully-qualified contact attribute token name "%(name)s/%(prop)s"
*/
""" % {'TOKEN_PREFIX' : (self.prefix + 'TOKEN_' + \
parent_name).upper().replace('/', ''),
'TOKEN_UC': prop.getAttributeNS(None, "name").upper().replace("-", "_").replace(".", "_"),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
self.h("""
#define %(TOKEN_PREFIX)s_%(TOKEN_UC)s \\
"%(name)s/%(prop)s"
""" % {'TOKEN_PREFIX' : (self.prefix + 'TOKEN_' + \
parent_name).upper().replace('/', ''),
'TOKEN_UC': prop.getAttributeNS(None, "name").upper().replace("-", "_").replace(".", "_"),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
for prop in iface.getElementsByTagNameNS(NS_TP, 'hct'):
if (prop.getAttribute('is-family') != "yes"):
self.d("""
/**
* %(TOKEN_PREFIX)s_%(TOKEN_UC)s:
*
* The fully-qualified capability token name "%(name)s/%(prop)s"
*/
""" % {'TOKEN_PREFIX' : (self.prefix + 'TOKEN_' + \
parent_name).upper().replace('/', ''),
'TOKEN_UC': prop.getAttributeNS(None, "name").upper().replace("-", "_").replace(".", "_"),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
self.h("""
#define %(TOKEN_PREFIX)s_%(TOKEN_UC)s \\
"%(name)s/%(prop)s"
""" % {'TOKEN_PREFIX' : (self.prefix + 'TOKEN_' + \
parent_name).upper().replace('/', ''),
'TOKEN_UC': prop.getAttributeNS(None, "name").upper().replace("-", "_").replace(".", "_"),
'name' : iface.getAttribute('name'),
'prop' : prop.getAttribute('name'),
})
if __name__ == '__main__':
argv = argv[1:]
Generator(argv[0], argv[1], argv[2], xml.dom.minidom.parse(argv[3]))()
| lgpl-2.1 |
jadref/buffer_bci | python/probedMovements_psychoPy/Free_Matsuhashi.py | 2 | 9596 | # import necessary libraries from Psychopy and buffer_bci-master
from psychopy import visual, core, event, gui, sound, data, monitors
from random import shuffle
import numpy as np
import struct, sys, time
sys.path.append("../../dataAcq/buffer/python/")
import FieldTrip
# ************** Set up buffer connection **************
# set hostname and port of the computer running the fieldtrip buffer.
hostname='localhost'
port=1972
# function to send events to data buffer
# use as: sendEvent("markername", markernumber, offset)
def sendEvent(event_type, event_value, offset=0):
e = FieldTrip.Event()
e.type = event_type
e.value = event_value
sample, bla = ftc.poll()
e.sample = sample + offset + 1
ftc.putEvents(e)
#Connecting to Buffer
timeout=5000
ftc = FieldTrip.Client()
# Wait until the buffer connects correctly and returns a valid header
hdr = None;
while hdr is None :
print 'Trying to connect to buffer on %s:%i ...'%(hostname,port)
try:
ftc.connect(hostname, port)
print '\nConnected - trying to read header...'
hdr = ftc.getHeader()
except IOError:
pass
if hdr is None:
print 'Invalid Header... waiting'
core.wait(1)
else:
print hdr
print hdr.labels
fSample = hdr.fSample
# ************** Set up stimulus screen and set experiment parameters **************
#present a dialogue to provide the current participant code
ppcode = {'Participant':01}
dlg = gui.DlgFromDict(ppcode, title='Experiment', fixed=['01'])
if dlg.OK:
# create a text file to save the experiment data
savefile = open("logfiles/pp"+str(ppcode['Participant'])+".txt","w")
savefile.write("Trial \t Participant \t Image \t MaxTime(sec) \t Played \t SoundOnset(sec) \t" +
"AudioStart(sec) \t AudioEnd(sec) \t RT(sec) \t Answer(1=yes, 0=no, -1=empty) \n")
else:
core.quit() #the user hit cancel so exit
# Setup the stimulus window
mywin = visual.Window(size=(1920, 1080), fullscr=True, screen=0, allowGUI=False, allowStencil=False,
monitor='testMonitor', units="pix",color=[0,0,0], colorSpace='rgb',blendMode='avg', useFBO=True)
#create some stimuli
instruction = visual.TextStim(mywin, text='Welcome!\n\n' +
'You will view a sequence of images, each displaying an object.\n'+
'When you recognize the object in the image, press SPACE with your right hand in order to continue to the next image.\n\n' +
'When you hear a beep while you are looking at an image and:\n' +
'(1) you already intended to press SPACE: do NOT press SPACE and wait fot the next image to appear.\n' +
'(2) you did not yet intend to press SPACE: ignore the beep and continue what you were doing.\n\n' +
'This experiment will last about 10 minutes.\n' +
'Please try to blink and move as little as possible while there is an image present on the screen.\n\n' +
'Good luck!',color=(1,1,1),wrapWidth = 800) # instructions
fixation = visual.TextStim(mywin, text='+',color=(1,1,1),height=40) # fixation cross
breaktext = visual.TextStim(mywin, text='Break\n\nPress a button to continue...',color=(1,1,1)) # break
thankyou = visual.TextStim(mywin, text='End of the experiment',color=(1,1,1)) # thank you screen
question = visual.TextStim(mywin, text='Did you already intend to press SPACE when you heard the beep? \n [z = JA] [m = NEE]',color=(1,1,1)) # intention question
beep = sound.SoundPyo(value='C',secs=0.2,octave=5,stereo=True,volume=1.0,loops=0,sampleRate=44100,bits=16,hamming=True,start=0,stop=-1) # beep sound
#set experiment parameters
nr_images = 15
opacity = np.arange(0.0,1.0,0.025)
sizeMask = 8
nr_trials_per_block = 5
nr_blocks = 3
current_trial = 0
current_block = 1
order = list(xrange(1,nr_images+1))
print "Order", order
shuffle(order)
timer = core.Clock()
# ************** Start experiment **************
# Show instruction
instruction.draw()
mywin.flip()
# wait for key-press
allKeys = event.getKeys()
while len(allKeys)==0:
allKeys = event.getKeys()
if 'escape' in allKeys[0]:
mywin.close() # quit
core.quit()
# Run through trials
print "Total number of trials = " + str(nr_images)
for block in range (1,nr_blocks+1):
sendEvent("experiment.block","Block_"+str(block))
for trial in range (0,nr_trials_per_block):
print "Current trial = ", current_trial
# set current image and image mask
image = visual.ImageStim(mywin, image="stimuli_BOSS_database/IMG" + str(order[current_trial]) + ".png") # set current image
image.setSize([500,500])
myTex = np.random.choice([0.0,1.0],size=(sizeMask,sizeMask),p=[1./10,9./10])
myStim = visual.GratingStim(mywin, tex=None, mask=myTex, size=image.size)
# determine max trial length (between 10 and 15 seconds)
maxTime = (15-10)*np.random.random()+10
print "Max time = ", maxTime
# determine random sound onset between 2 and MaxTime-1 seconds after trial start
soundOnset = ((maxTime-1)-2)*np.random.random()+3
print "Sound onset = ", soundOnset
allKeys = [] # forget Keyboard history
answer = -1
soundStart = -1
rt = -1
soundEnd = -1
endTrial = False # trial still running
done = False # no butten press yet
empty = False # image is still masked
played = False # sound is not played yet
timestep = 0.05 # image mask disapperes a bit each 5ms
idx = len(opacity)-1
current_time = timestep
x = np.random.randint(sizeMask) # which part of image mask should disappear
y = np.random.randint(sizeMask) # which part of image mask should disappear
while myTex[x][y] == 0:
x = np.random.randint(sizeMask)
y = np.random.randint(sizeMask)
# present fixation cross for 200ms
sendEvent("experiment.trial","Trial_"+str(current_trial))
fixation.draw()
mywin.flip()
sendEvent("stimulus.fixationcross","start")
core.wait(0.2)
sendEvent("stimulus.fixationcross","end")
# present image
image.draw()
myStim.draw()
mywin.flip()
sendEvent("stimulus.image","start")
timer.reset()
while done is False and endTrial is False:
if timer.getTime() >= maxTime:
endTrial = True
question.draw()
mywin.flip()
allKeys = event.getKeys()
while len(allKeys)<1:
allKeys = event.getKeys()
if allKeys[0] == 'z':
sendEvent("response.question","yes")
answer = 1
else:
sendEvent("response.question","no")
answer = 0
else:
allKeys = event.getKeys()
if timer.getTime() >= soundOnset and played is False:
sendEvent("stimulus.beep","start")
soundStart = timer.getTime()
beep.play()
sendEvent("stimulus.beep","end")
soundEnd = timer.getTime()
played = True
if len(allKeys)>0:
if allKeys[0] == 'space':
sendEvent("response.space","pressed")
rt = timer.getTime()
done = 1 # button press
elif 'escape' in allKeys[0]:
mywin.close() # quit
core.quit()
elif timer.getTime() >= current_time and empty is False:
current_time += timestep
myTex[x][y] = opacity[idx]
idx -= 1
myStim = visual.GratingStim(mywin, tex=None, mask=myTex, size=image.size)
image.draw()
myStim.draw()
mywin.flip()
if idx == -1:
if 1 in myTex:
idx = len(opacity)-1
x = np.random.randint(sizeMask) # get new part of mask to disappear
y = np.random.randint(sizeMask) # get new part of mask to disappear
while myTex[x][y] == 0:
x = np.random.randint(sizeMask)
y = np.random.randint(sizeMask)
else:
empty = True
# save data to file
savefile.write(str(current_trial) + "\t" + str(ppcode['Participant']) +"\t" + "IMG" + str(order[current_trial]) + ".png" + "\t" +
str(round(maxTime,3)) + "\t" + str(played) + "\t" + str(round(soundOnset,3)) + "\t" + str(round(soundStart,3)) + "\t" +
str(round(soundEnd,3)) + "\t" + str(round(rt,3)) + "\t" + str(answer) + "\n")
mywin.flip()
core.wait(0.2)
current_trial += 1
if block < nr_blocks:
# break
breaktext.draw()
mywin.flip()
# wait for key-press
allKeys = event.getKeys()
while len(allKeys)==0:
allKeys = event.getKeys()
# ************** End of experiment **************
thankyou.draw()
mywin.flip()
core.wait(2)
#cleanup
mywin.close()
ftc.disconnect()
core.quit()
sys.exit()
| gpl-3.0 |
iwm911/plaso | plaso/parsers/bsm_test.py | 1 | 8160 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Basic Security Module (BSM) file parser."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import bsm as bsm_formatter
from plaso.lib import event
from plaso.lib import timelib_test
from plaso.parsers import bsm
from plaso.parsers import test_lib
class BsmParserTest(test_lib.ParserTestCase):
"""Tests for Basic Security Module (BSM) file parser."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
mac_pre_obj = event.PreprocessObject()
mac_pre_obj.guessed_os = 'MacOSX'
self._parser_macbsm = bsm.BsmParser(mac_pre_obj, None)
openbsm_pre_obj = event.PreprocessObject()
openbsm_pre_obj.guessed_os = 'openbsm'
self._parser_openbsm = bsm.BsmParser(openbsm_pre_obj, None)
def testParse(self):
"""Tests the Parse function on a "generic" BSM file."""
test_file = self._GetTestFilePath(['openbsm.bsm'])
event_generator = self._ParseFile(self._parser_openbsm, test_file)
event_objects = self._GetEventObjects(event_generator)
self.assertEqual(len(event_objects), 50)
expected_extra_tokens = [
u'[BSM_TOKEN_ARGUMENT32: test_arg32_token(3) is 0xABCDEF00]',
u'[BSM_TOKEN_DATA: Format data: String, Data: SomeData]',
u'[BSM_TOKEN_FILE: test, timestamp: 1970-01-01 20:42:45]',
u'[BSM_TOKEN_ADDR: 192.168.100.15]',
u'[IPv4_Header: 0x400000145478000040010000c0a8649bc0a86e30]',
u'[BSM_TOKEN_IPC: object type 1, object id 305419896]',
u'[BSM_TOKEN_PORT: 20480]',
u'[BSM_TOKEN_OPAQUE: aabbccdd]',
u'[BSM_TOKEN_PATH: /test/this/is/a/test]',
(u'[BSM_TOKEN_PROCESS32: aid(305419896), euid(19088743), '
u'egid(591751049), uid(2557891634), gid(159868227), '
u'pid(321140038), session_id(2542171492), '
u'terminal_port(374945606), terminal_ip(127.0.0.1)]'),
(u'[BSM_TOKEN_PROCESS64: aid(305419896), euid(19088743), '
u'egid(591751049), uid(2557891634), gid(159868227), '
u'pid(321140038), session_id(2542171492), '
u'terminal_port(374945606), terminal_ip(127.0.0.1)]'),
(u'[BSM_TOKEN_RETURN32: Invalid argument (22), '
u'System call status: 305419896]'),
u'[BSM_TOKEN_SEQUENCE: 305419896]',
(u'[BSM_TOKEN_AUT_SOCKINET32_EX: '
u'from 127.0.0.1 port 0 to 127.0.0.1 port 0]'),
(u'[BSM_TOKEN_SUBJECT32: aid(305419896), euid(19088743), '
u'egid(591751049), uid(2557891634), gid(159868227), '
u'pid(321140038), session_id(2542171492), '
u'terminal_port(374945606), terminal_ip(127.0.0.1)]'),
(u'[BSM_TOKEN_SUBJECT32_EX: aid(305419896), euid(19088743), '
u'egid(591751049), uid(2557891634), gid(159868227), '
u'pid(321140038), session_id(2542171492), '
u'terminal_port(374945606), terminal_ip(fe80::1)]'),
u'[BSM_TOKEN_TEXT: This is a test.]',
u'[BSM_TOKEN_ZONENAME: testzone]',
(u'[BSM_TOKEN_RETURN32: Argument list too long (7), '
u'System call status: 4294967295]')]
extra_tokens = []
for event_object_index in range(0, 19):
extra_tokens.append(event_objects[event_object_index].extra_tokens)
self.assertEqual(extra_tokens, expected_extra_tokens)
def testParseFileMacOSX(self):
"""Tests the Parse function on a Mac OS X BSM file."""
test_file = self._GetTestFilePath(['apple.bsm'])
event_generator = self._ParseFile(self._parser_macbsm, test_file)
event_objects = self._GetEventObjects(event_generator)
self.assertEqual(len(event_objects), 54)
event_object = event_objects[0]
self.assertEqual(event_object.data_type, 'mac:bsm:event')
expected_msg = (
u'Type: audit crash recovery (45029) '
u'Return: [BSM_TOKEN_RETURN32: Success (0), System call status: 0] '
u'Information: [BSM_TOKEN_TEXT: launchctl::Audit recovery]. '
u'[BSM_TOKEN_PATH: /var/audit/20131104171720.crash_recovery]')
expected_msg_short = (
u'Type: audit crash recovery (45029) '
u'Return: [BSM_TOKEN_RETURN32: Success (0), ...')
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-11-04 18:36:20.000381')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(event_object.event_type, u'audit crash recovery (45029)')
expected_extra_tokens = (
u'[BSM_TOKEN_TEXT: launchctl::Audit recovery]. '
u'[BSM_TOKEN_PATH: /var/audit/20131104171720.crash_recovery]')
self.assertEqual(event_object.extra_tokens, expected_extra_tokens)
expected_return_value = (
u'[BSM_TOKEN_RETURN32: Success (0), System call status: 0]')
self.assertEqual(event_object.return_value, expected_return_value)
event_object = event_objects[15]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-11-04 18:36:26.000171')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(event_object.event_type, u'user authentication (45023)')
expected_extra_tokens = (
u'[BSM_TOKEN_SUBJECT32: aid(4294967295), euid(92), egid(92), uid(92), '
u'gid(92), pid(143), session_id(100004), terminal_port(143), '
u'terminal_ip(0.0.0.0)]. '
u'[BSM_TOKEN_TEXT: Verify password for record type Users '
u'\'moxilo\' node \'/Local/Default\']')
self.assertEqual(event_object.extra_tokens, expected_extra_tokens)
expected_return_value = (
u'[BSM_TOKEN_RETURN32: Unknown (255), System call status: 5000]')
self.assertEqual(event_object.return_value, expected_return_value)
event_object = event_objects[31]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-11-04 18:36:26.000530')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(event_object.event_type, u'SecSrvr AuthEngine (45025)')
expected_extra_tokens = (
u'[BSM_TOKEN_SUBJECT32: aid(4294967295), euid(0), egid(0), uid(0), '
u'gid(0), pid(67), session_id(100004), terminal_port(67), '
u'terminal_ip(0.0.0.0)]. '
u'[BSM_TOKEN_TEXT: system.login.done]. '
u'[BSM_TOKEN_TEXT: system.login.done]')
self.assertEqual(event_object.extra_tokens, expected_extra_tokens)
expected_return_value = (
u'[BSM_TOKEN_RETURN32: Success (0), System call status: 0]')
self.assertEqual(event_object.return_value, expected_return_value)
event_object = event_objects[50]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-11-04 18:37:36.000399')
self.assertEqual(event_object.timestamp, expected_timestamp)
self.assertEqual(event_object.event_type, u'session end (44903)')
expected_extra_tokens = (
u'[BSM_TOKEN_ARGUMENT64: sflags(1) is 0x0]. '
u'[BSM_TOKEN_ARGUMENT32: am_success(2) is 0x3000]. '
u'[BSM_TOKEN_ARGUMENT32: am_failure(3) is 0x3000]. '
u'[BSM_TOKEN_SUBJECT32: aid(4294967295), euid(0), egid(0), uid(0), '
u'gid(0), pid(0), session_id(100015), terminal_port(0), '
u'terminal_ip(0.0.0.0)]')
self.assertEqual(event_object.extra_tokens, expected_extra_tokens)
expected_return_value = (
u'[BSM_TOKEN_RETURN32: Success (0), System call status: 0]')
self.assertEqual(event_object.return_value, expected_return_value)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
willthames/ansible | lib/ansible/plugins/action/package.py | 31 | 2900 | # (c) 2015, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
''' handler for package operations '''
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
module = self._task.args.get('use', 'auto')
if module == 'auto':
try:
if self._task.delegate_to: # if we delegate, we should use delegated host's facts
module = self._templar.template("{{hostvars['%s']['ansible_facts']['ansible_pkg_mgr']}}" % self._task.delegate_to)
else:
module = self._templar.template('{{ansible_facts["ansible_pkg_mgr"]}}')
except:
pass # could not get it from template!
if module == 'auto':
facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars)
display.debug("Facts %s" % facts)
module = facts.get('ansible_facts', {}).get('ansible_pkg_mgr', 'auto')
if module != 'auto':
if module not in self._shared_loader_obj.module_loader:
result['failed'] = True
result['msg'] = 'Could not find a module for %s.' % module
else:
# run the 'package' module
new_module_args = self._task.args.copy()
if 'use' in new_module_args:
del new_module_args['use']
display.vvvv("Running %s" % module)
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async))
else:
result['failed'] = True
result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.'
return result
| gpl-3.0 |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Bitly/Links/ShortenURL.py | 1 | 3516 | # -*- coding: utf-8 -*-
###############################################################################
#
# ShortenURL
# Returns a shortened URL for a long URL that you provide.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ShortenURL(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ShortenURL Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ShortenURL, self).__init__(temboo_session, '/Library/Bitly/Links/ShortenURL')
def new_input_set(self):
return ShortenURLInputSet()
def _make_result_set(self, result, path):
return ShortenURLResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ShortenURLChoreographyExecution(session, exec_id, path)
class ShortenURLInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ShortenURL
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The oAuth access token provided by Bitly.)
"""
super(ShortenURLInputSet, self)._set_input('AccessToken', value)
def set_LongURL(self, value):
"""
Set the value of the LongURL input for this Choreo. ((required, string) The long url that you want to shorten.)
"""
super(ShortenURLInputSet, self)._set_input('LongURL', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that you want the response to be in. Defaults to simple "txt" format which will just return the shortened URL. "json" and "xml" are also supported.)
"""
super(ShortenURLInputSet, self)._set_input('ResponseFormat', value)
class ShortenURLResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ShortenURL Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Bitly.)
"""
return self._output.get('Response', None)
class ShortenURLChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ShortenURLResultSet(response, path)
| gpl-2.0 |
srinathv/bokeh | bokeh/server/zmq/forwarder.py | 13 | 1318 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2015, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
from __future__ import absolute_import
import zmq
from zmq.devices import ThreadDevice as ZMQThreadDevice
class ThreadDevice(ZMQThreadDevice):
def __init__(self, ctx, device_type=zmq.QUEUE, in_type=None, out_type=None):
self.ctx = ctx
# XXX: super(ThreadDevice, self).__init__(device_type, in_type, out_type)
# but ZMQThreadDevice is an old-style class (yay!).
ZMQThreadDevice.__init__(self, device_type, in_type, out_type)
def context_factory(self):
return self.ctx
class Forwarder(object):
def __init__(self, ctx, input_addr, output_addr):
self.device = ThreadDevice(ctx, zmq.FORWARDER, in_type=zmq.SUB, out_type=zmq.PUB)
self.device.bind_in(input_addr)
self.device.bind_out(output_addr)
self.device.setsockopt_in(zmq.SUBSCRIBE, b"")
def start(self):
self.device.start()
def stop(self):
self.device.ctx.term()
self.device.join()
| bsd-3-clause |
guilhermetavares/django-debug-toolbar | tests/settings.py | 8 | 1848 | """Django settings for tests."""
import os
import django
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
SECRET_KEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890'
INTERNAL_IPS = ['127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'tests',
]
MEDIA_URL = '/media/' # Avoids https://code.djangoproject.com/ticket/21451
MIDDLEWARE_CLASSES = [
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tests.urls'
STATIC_ROOT = os.path.join(BASE_DIR, 'tests', 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'tests', 'additional_static'),
("prefix", os.path.join(BASE_DIR, 'tests', 'additional_static')),
]
# Cache and database
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'second': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# Debug Toolbar configuration
DEBUG_TOOLBAR_CONFIG = {
# Django's test client sets wsgi.multiprocess to True inappropriately
'RENDER_PANELS': False,
}
if django.VERSION[:2] < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
| bsd-3-clause |
carolinux/QGIS | python/plugins/processing/modeler/ModelerAlgorithmProvider.py | 12 | 4201 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ModelerAlgorithmProvider.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtGui import QIcon
from processing.core.AlgorithmProvider import AlgorithmProvider
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.core.ProcessingLog import ProcessingLog
from processing.modeler.ModelerUtils import ModelerUtils
from processing.modeler.ModelerAlgorithm import ModelerAlgorithm
from processing.modeler.WrongModelException import WrongModelException
from processing.modeler.EditModelAction import EditModelAction
from processing.modeler.CreateNewModelAction import CreateNewModelAction
from processing.modeler.DeleteModelAction import DeleteModelAction
from processing.modeler.AddModelFromFileAction import AddModelFromFileAction
from processing.gui.GetScriptsAndModels import GetModelsAction
pluginPath = os.path.split(os.path.dirname(__file__))[0]
class ModelerAlgorithmProvider(AlgorithmProvider):
def __init__(self):
AlgorithmProvider.__init__(self)
self.actions = [CreateNewModelAction(), AddModelFromFileAction(), GetModelsAction()]
self.contextMenuActions = [EditModelAction(), DeleteModelAction()]
def initializeSettings(self):
AlgorithmProvider.initializeSettings(self)
ProcessingConfig.addSetting(Setting(self.getDescription(),
ModelerUtils.MODELS_FOLDER, self.tr('Models folder', 'ModelerAlgorithmProvider'),
ModelerUtils.modelsFolder(), valuetype=Setting.FOLDER))
def setAlgsList(self, algs):
ModelerUtils.allAlgs = algs
def modelsFolder(self):
return ModelerUtils.modelsFolder()
def getDescription(self):
return self.tr('Models', 'ModelerAlgorithmProvider')
def getName(self):
return 'model'
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'model.png'))
def _loadAlgorithms(self):
folder = ModelerUtils.modelsFolder()
self.loadFromFolder(folder)
def loadFromFolder(self, folder):
if not os.path.exists(folder):
return
for path, subdirs, files in os.walk(folder):
for descriptionFile in files:
if descriptionFile.endswith('model'):
try:
fullpath = os.path.join(path, descriptionFile)
alg = ModelerAlgorithm.fromFile(fullpath)
if alg.name:
alg.provider = self
alg.descriptionFile = fullpath
self.algs.append(alg)
else:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,
self.tr('Could not load model %s', 'ModelerAlgorithmProvider') % descriptionFile)
except WrongModelException as e:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,
self.tr('Could not load model %s\n%s', 'ModelerAlgorithmProvider') % (descriptionFile, e.msg))
| gpl-2.0 |
jdrusso/last_letter | last_letter/scripts/gyroscope.py | 2 | 4760 | #!/usr/bin/env python
import roslib
import sys
import rospy
from geometry_msgs.msg import Vector3, Vector3Stamped
import numpy as np
from uav_utils import ThermalRise
from mathutils import saturation, quat2Reb
from math import floor, exp
from last_letter.msg import SimStates, SimSensor3, Environment
####################### Gyroscope Class #########################################################
class Gyroscope():
def __init__(self,name):
rospy.loginfo('Create ' + name +' Sensor')
self.dt = 1.0/rospy.get_param(name+'/rate', 100.0)
self.rate = rospy.Rate(rospy.get_param(name+'/rate', 100.0))
rospy.loginfo("\ta.rate: %.1f Hz",1.0/self.dt)
self.cp = np.array(rospy.get_param(name+'/CP', [0.0, 0.0, 0.0])).reshape(3,1)
rospy.loginfo("\tb.position: [%.2f,%.2f,%.2f]", self.cp[0],self.cp[1],self.cp[2])
self.resolution = rospy.get_param(name+'/resolution', 0.0001)
rospy.loginfo("\tc.resolution: %.1e rad/s",self.resolution)
self.maxvalue = rospy.get_param(name+'/max', 9.0)
rospy.loginfo("\td.maxvalue: %.2f rad/s",self.maxvalue)
self.std_coeff = rospy.get_param(name+'/std_coeff', [0.0,0.0,0.0])
rospy.loginfo("\te.noise coeff:")
rospy.loginfo("\t\t1.main std: %.3f",self.std_coeff[0])
rospy.loginfo("\t\t2.bias coeff: %.3f",self.std_coeff[1])
rospy.loginfo("\t\t3.random walk coeff: %.3f",self.std_coeff[2])
#Noise Characteristics
self.bs = np.random.normal(0,rospy.get_param(name+'/scaleBias', 0.03)/3,3)
self.ks = np.random.normal(0,rospy.get_param(name+'/scaleThermal', 0.002)/3,3)
self.bb = np.random.normal(0,rospy.get_param(name+'/offsetBias', 0.785)/3,3)
self.kb = np.random.normal(0,rospy.get_param(name+'/offsetThermal', 0.0084)/3,3)
self.p0 = rospy.get_param(name+'/RWPSD', 0.00392)
self.bias=Vector3()
self.randomwalk=Vector3()
self.scale = Vector3()
self.temp=25.0
self.measurement = SimSensor3()
self.pub = rospy.Publisher(name, SimSensor3)
self.states_sub = rospy.Subscriber("states",SimStates,self.StatesCallback)
self.atm_sub = rospy.Subscriber("environment",Environment,self.EnvironmentCallback)
self.real=SimStates()
self.realnew=False
#Thermal Chracteristics
self.thermalMass = ThermalRise(name)
rospy.loginfo("\tf.Loading Thermal Chracteristics")
self.airspeed = 0.0
self.temp=25.0
def iterate(self,states):
Reb = quat2Reb(states.pose.orientation)
rw_noise = np.random.normal(0,self.p0,3)
self.randomwalk.x +=self.dt*rw_noise[0]
self.randomwalk.y +=self.dt*rw_noise[1]
self.randomwalk.z +=self.dt*rw_noise[2]
self.bias.x = self.bb[0] + self.kb[0] * (self.temp - 298.15)
self.bias.y = self.bb[1] + self.kb[1] * (self.temp - 298.15)
self.bias.z = self.bb[1] + self.kb[1] * (self.temp - 298.15)
self.scale.x = self.bs[0] + self.ks[0] * (self.temp - 298.15)
self.scale.y = self.bs[1] + self.ks[1] * (self.temp - 298.15)
self.scale.z = self.bs[2] + self.ks[2] * (self.temp - 298.15)
self.measurement.axis.x = (1+self.scale.x)*states.velocity.angular.x + self.bias.x + self.randomwalk.x
self.measurement.axis.y = (1+self.scale.y)*states.velocity.angular.y + self.bias.y + self.randomwalk.y
self.measurement.axis.z = (1+self.scale.z)*states.velocity.angular.z + self.bias.z + self.randomwalk.z
self.measurement.axis.x = saturation(floor(self.measurement.axis.x/self.resolution)*self.resolution,-self.maxvalue, self.maxvalue)
self.measurement.axis.y = saturation(floor(self.measurement.axis.y/self.resolution)*self.resolution,-self.maxvalue, self.maxvalue)
self.measurement.axis.z = saturation(floor(self.measurement.axis.z/self.resolution)*self.resolution,-self.maxvalue, self.maxvalue)
self.tempOffset = self.thermalMass.step(self.airspeed,self.dt)
self.measurement.temperature = self.temp + self.tempOffset
def StatesCallback(self,data):
self.real=data
self.realnew=True
def EnvironmentCallback(self,data):
self.temp=data.temperature
if self.realnew:
self.airspeed = np.sqrt(pow(self.real.velocity.linear.x - data.wind.x,2) + pow(self.real.velocity.linear.y - data.wind.y,2) + pow(self.real.velocity.linear.z - data.wind.z,2))
###################################################################################################
####################### Main Program ############################################################
###################################################################################################
if __name__ == '__main__':
rospy.init_node('gyro_model')
fullname = rospy.get_name().split('/')
gyro = Gyroscope(fullname[-1])
while not rospy.is_shutdown():
if gyro.realnew:
gyro.realnew=False
gyro.iterate(gyro.real)
gyro.measurement.header.stamp=rospy.Time.now()
gyro.pub.publish(gyro.measurement)
gyro.rate.sleep()
| gpl-3.0 |
tgianos/zerotodocker | security_monkey/0.3.4/security_monkey-scheduler/config-deploy.py | 6 | 1385 | # Insert any config items here.
# This will be fed into Flask/SQLAlchemy inside security_monkey/__init__.py
LOG_LEVEL = "DEBUG"
LOG_FILE = "/var/log/security_monkey/security_monkey-deploy.log"
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:securitymonkeypassword@postgres:5432/secmonkey'
SQLALCHEMY_POOL_SIZE = 50
SQLALCHEMY_MAX_OVERFLOW = 15
ENVIRONMENT = 'ec2'
USE_ROUTE53 = False
FQDN = '<PUBLIC_IP_ADDRESS>'
API_PORT = '5000'
WEB_PORT = '443'
FRONTED_BY_NGINX = True
NGINX_PORT = '443'
WEB_PATH = '/static/ui.html'
BASE_URL = 'https://{}/'.format(FQDN)
SECRET_KEY = '<INSERT_RANDOM_STRING_HERE>'
MAIL_DEFAULT_SENDER = 'securitymonkey@<YOURDOMAIN>.com'
SECURITY_REGISTERABLE = True
SECURITY_CONFIRMABLE = False
SECURITY_RECOVERABLE = False
SECURITY_PASSWORD_HASH = 'bcrypt'
SECURITY_PASSWORD_SALT = '<INSERT_RANDOM_STRING_HERE>'
SECURITY_POST_LOGIN_VIEW = WEB_PATH
SECURITY_POST_REGISTER_VIEW = WEB_PATH
SECURITY_POST_CONFIRM_VIEW = WEB_PATH
SECURITY_POST_RESET_VIEW = WEB_PATH
SECURITY_POST_CHANGE_VIEW = WEB_PATH
# This address gets all change notifications
SECURITY_TEAM_EMAIL = []
SECURITY_SEND_REGISTER_EMAIL = False
# These are only required if using SMTP instead of SES
EMAILS_USE_SMTP = True # Otherwise, Use SES
MAIL_SERVER = 'smtp.<YOUREMAILPROVIDER>.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USERNAME = 'securitymonkey'
MAIL_PASSWORD = '<YOURPASSWORD>'
| apache-2.0 |
jotes/boto | boto/route53/record.py | 136 | 14689 | # Copyright (c) 2010 Chris Moyer http://coredumped.org/
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
RECORD_TYPES = ['A', 'AAAA', 'TXT', 'CNAME', 'MX', 'PTR', 'SRV', 'SPF']
from boto.resultset import ResultSet
class ResourceRecordSets(ResultSet):
"""
A list of resource records.
:ivar hosted_zone_id: The ID of the hosted zone.
:ivar comment: A comment that will be stored with the change.
:ivar changes: A list of changes.
"""
ChangeResourceRecordSetsBody = """<?xml version="1.0" encoding="UTF-8"?>
<ChangeResourceRecordSetsRequest xmlns="https://route53.amazonaws.com/doc/2013-04-01/">
<ChangeBatch>
<Comment>%(comment)s</Comment>
<Changes>%(changes)s</Changes>
</ChangeBatch>
</ChangeResourceRecordSetsRequest>"""
ChangeXML = """<Change>
<Action>%(action)s</Action>
%(record)s
</Change>"""
def __init__(self, connection=None, hosted_zone_id=None, comment=None):
self.connection = connection
self.hosted_zone_id = hosted_zone_id
self.comment = comment
self.changes = []
self.next_record_name = None
self.next_record_type = None
self.next_record_identifier = None
super(ResourceRecordSets, self).__init__([('ResourceRecordSet', Record)])
def __repr__(self):
if self.changes:
record_list = ','.join([c.__repr__() for c in self.changes])
else:
record_list = ','.join([record.__repr__() for record in self])
return '<ResourceRecordSets:%s [%s]' % (self.hosted_zone_id,
record_list)
def add_change(self, action, name, type, ttl=600,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
health_check=None, failover=None):
"""
Add a change request to the set.
:type action: str
:param action: The action to perform ('CREATE'|'DELETE'|'UPSERT')
:type name: str
:param name: The name of the domain you want to perform the action on.
:type type: str
:param type: The DNS record type. Valid values are:
* A
* AAAA
* CNAME
* MX
* NS
* PTR
* SOA
* SPF
* SRV
* TXT
:type ttl: int
:param ttl: The resource record cache time to live (TTL), in seconds.
:type alias_hosted_zone_id: str
:param alias_dns_name: *Alias resource record sets only* The value
of the hosted zone ID, CanonicalHostedZoneNameId, for
the LoadBalancer.
:type alias_dns_name: str
:param alias_hosted_zone_id: *Alias resource record sets only*
Information about the domain to which you are redirecting traffic.
:type identifier: str
:param identifier: *Weighted and latency-based resource record sets
only* An identifier that differentiates among multiple resource
record sets that have the same combination of DNS name and type.
:type weight: int
:param weight: *Weighted resource record sets only* Among resource
record sets that have the same combination of DNS name and type,
a value that determines what portion of traffic for the current
resource record set is routed to the associated location
:type region: str
:param region: *Latency-based resource record sets only* Among resource
record sets that have the same combination of DNS name and type,
a value that determines which region this should be associated with
for the latency-based routing
:type alias_evaluate_target_health: bool
:param alias_evaluate_target_health: *Required for alias resource record
sets* Indicates whether this Resource Record Set should respect the
health status of any health checks associated with the ALIAS target
record which it is linked to.
:type health_check: str
:param health_check: Health check to associate with this record
:type failover: str
:param failover: *Failover resource record sets only* Whether this is the
primary or secondary resource record set.
"""
change = Record(name, type, ttl,
alias_hosted_zone_id=alias_hosted_zone_id,
alias_dns_name=alias_dns_name, identifier=identifier,
weight=weight, region=region,
alias_evaluate_target_health=alias_evaluate_target_health,
health_check=health_check, failover=failover)
self.changes.append([action, change])
return change
def add_change_record(self, action, change):
"""Add an existing record to a change set with the specified action"""
self.changes.append([action, change])
return
def to_xml(self):
"""Convert this ResourceRecordSet into XML
to be saved via the ChangeResourceRecordSetsRequest"""
changesXML = ""
for change in self.changes:
changeParams = {"action": change[0], "record": change[1].to_xml()}
changesXML += self.ChangeXML % changeParams
params = {"comment": self.comment, "changes": changesXML}
return self.ChangeResourceRecordSetsBody % params
def commit(self):
"""Commit this change"""
if not self.connection:
import boto
self.connection = boto.connect_route53()
return self.connection.change_rrsets(self.hosted_zone_id, self.to_xml())
def endElement(self, name, value, connection):
"""Overwritten to also add the NextRecordName,
NextRecordType and NextRecordIdentifier to the base object"""
if name == 'NextRecordName':
self.next_record_name = value
elif name == 'NextRecordType':
self.next_record_type = value
elif name == 'NextRecordIdentifier':
self.next_record_identifier = value
else:
return super(ResourceRecordSets, self).endElement(name, value, connection)
def __iter__(self):
"""Override the next function to support paging"""
results = super(ResourceRecordSets, self).__iter__()
truncated = self.is_truncated
while results:
for obj in results:
yield obj
if self.is_truncated:
self.is_truncated = False
results = self.connection.get_all_rrsets(self.hosted_zone_id, name=self.next_record_name,
type=self.next_record_type,
identifier=self.next_record_identifier)
else:
results = None
self.is_truncated = truncated
class Record(object):
"""An individual ResourceRecordSet"""
HealthCheckBody = """<HealthCheckId>%s</HealthCheckId>"""
XMLBody = """<ResourceRecordSet>
<Name>%(name)s</Name>
<Type>%(type)s</Type>
%(weight)s
%(body)s
%(health_check)s
</ResourceRecordSet>"""
WRRBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Weight>%(weight)s</Weight>
"""
RRRBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Region>%(region)s</Region>
"""
FailoverBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Failover>%(failover)s</Failover>
"""
ResourceRecordsBody = """
<TTL>%(ttl)s</TTL>
<ResourceRecords>
%(records)s
</ResourceRecords>"""
ResourceRecordBody = """<ResourceRecord>
<Value>%s</Value>
</ResourceRecord>"""
AliasBody = """<AliasTarget>
<HostedZoneId>%(hosted_zone_id)s</HostedZoneId>
<DNSName>%(dns_name)s</DNSName>
%(eval_target_health)s
</AliasTarget>"""
EvaluateTargetHealth = """<EvaluateTargetHealth>%s</EvaluateTargetHealth>"""
def __init__(self, name=None, type=None, ttl=600, resource_records=None,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
health_check=None, failover=None):
self.name = name
self.type = type
self.ttl = ttl
if resource_records is None:
resource_records = []
self.resource_records = resource_records
self.alias_hosted_zone_id = alias_hosted_zone_id
self.alias_dns_name = alias_dns_name
self.identifier = identifier
self.weight = weight
self.region = region
self.alias_evaluate_target_health = alias_evaluate_target_health
self.health_check = health_check
self.failover = failover
def __repr__(self):
return '<Record:%s:%s:%s>' % (self.name, self.type, self.to_print())
def add_value(self, value):
"""Add a resource record value"""
self.resource_records.append(value)
def set_alias(self, alias_hosted_zone_id, alias_dns_name,
alias_evaluate_target_health=False):
"""Make this an alias resource record set"""
self.alias_hosted_zone_id = alias_hosted_zone_id
self.alias_dns_name = alias_dns_name
self.alias_evaluate_target_health = alias_evaluate_target_health
def to_xml(self):
"""Spit this resource record set out as XML"""
if self.alias_hosted_zone_id is not None and self.alias_dns_name is not None:
# Use alias
if self.alias_evaluate_target_health is not None:
eval_target_health = self.EvaluateTargetHealth % ('true' if self.alias_evaluate_target_health else 'false')
else:
eval_target_health = ""
body = self.AliasBody % {"hosted_zone_id": self.alias_hosted_zone_id,
"dns_name": self.alias_dns_name,
"eval_target_health": eval_target_health}
else:
# Use resource record(s)
records = ""
for r in self.resource_records:
records += self.ResourceRecordBody % r
body = self.ResourceRecordsBody % {
"ttl": self.ttl,
"records": records,
}
weight = ""
if self.identifier is not None and self.weight is not None:
weight = self.WRRBody % {"identifier": self.identifier,
"weight": self.weight}
elif self.identifier is not None and self.region is not None:
weight = self.RRRBody % {"identifier": self.identifier,
"region": self.region}
elif self.identifier is not None and self.failover is not None:
weight = self.FailoverBody % {"identifier": self.identifier,
"failover": self.failover}
health_check = ""
if self.health_check is not None:
health_check = self.HealthCheckBody % (self.health_check)
params = {
"name": self.name,
"type": self.type,
"weight": weight,
"body": body,
"health_check": health_check
}
return self.XMLBody % params
def to_print(self):
rr = ""
if self.alias_hosted_zone_id is not None and self.alias_dns_name is not None:
# Show alias
rr = 'ALIAS ' + self.alias_hosted_zone_id + ' ' + self.alias_dns_name
if self.alias_evaluate_target_health is not None:
rr += ' (EvalTarget %s)' % self.alias_evaluate_target_health
else:
# Show resource record(s)
rr = ",".join(self.resource_records)
if self.identifier is not None and self.weight is not None:
rr += ' (WRR id=%s, w=%s)' % (self.identifier, self.weight)
elif self.identifier is not None and self.region is not None:
rr += ' (LBR id=%s, region=%s)' % (self.identifier, self.region)
elif self.identifier is not None and self.failover is not None:
rr += ' (FAILOVER id=%s, failover=%s)' % (self.identifier, self.failover)
return rr
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Type':
self.type = value
elif name == 'TTL':
self.ttl = value
elif name == 'Value':
self.resource_records.append(value)
elif name == 'HostedZoneId':
self.alias_hosted_zone_id = value
elif name == 'DNSName':
self.alias_dns_name = value
elif name == 'SetIdentifier':
self.identifier = value
elif name == 'EvaluateTargetHealth':
self.alias_evaluate_target_health = value.lower() == 'true'
elif name == 'Weight':
self.weight = value
elif name == 'Region':
self.region = value
elif name == 'Failover':
self.failover = value
elif name == 'HealthCheckId':
self.health_check = value
def startElement(self, name, attrs, connection):
return None
| mit |
simbuerg/benchbuild | benchbuild/utils/actions.py | 1 | 11516 | """
This defines classes that can be used to implement a series of Actions.
"""
from benchbuild.settings import CFG
from benchbuild.utils.db import persist_experiment
from benchbuild.utils.run import GuardedRunException
from plumbum import local
from benchbuild.utils.cmd import mkdir, rm, rmdir
from plumbum import ProcessExecutionError
from functools import partial, wraps
from datetime import datetime
from logging import error
import os
import logging
import sys
import traceback
import warnings
import textwrap
from abc import ABCMeta
from enum import Enum, unique
@unique
class StepResult(Enum):
OK = 1,
ERROR = 2
def to_step_result(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if not res:
res = StepResult.OK
return res
return wrapper
def log_before_after(name, desc):
_log = logging.getLogger(name='benchbuild.steps')
def func_decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
_log.info("\n{} - {}".format(name, desc))
res = f(*args, **kwargs)
if res == StepResult.OK:
_log.info("{} - OK\n".format(name))
else:
_log.error("{} - ERROR\n".format(name))
return res
return wrapper
return func_decorator
class StepClass(ABCMeta):
def __new__(metacls, name, bases, namespace, **kwds):
result = ABCMeta.__new__(metacls, name, bases, dict(namespace))
NAME = result.NAME
DESCRIPTION = result.DESCRIPTION
if NAME and DESCRIPTION:
result.__call__ = log_before_after(
NAME, DESCRIPTION)(to_step_result(result.__call__))
else:
result.__call__ = to_step_result(result.__call__)
return result
class Step(metaclass=StepClass):
NAME = None
DESCRIPTION = None
def __init__(self, project_or_experiment, action_fn=None):
self._obj = project_or_experiment
self._action_fn = action_fn
def __len__(self):
return 1
def __call__(self):
if not self._action_fn:
return
self._action_fn()
def __str__(self, indent=0):
return textwrap.indent(
"* {0}: Execute configured action.".format(self._obj.name),
indent * " ")
def onerror(self):
Clean(self._obj)()
class Clean(Step):
NAME = "CLEAN"
DESCRIPTION = "Cleans the build directory"
def __init__(self, project_or_experiment, action_fn=None, check_empty=False):
super(Clean, self).__init__(project_or_experiment, action_fn)
self.check_empty = check_empty
def __clean_mountpoints__(self, root):
"""
Unmount any remaining mountpoints under :root.
Args:
root: All UnionFS-mountpoints under this directory will be unmounted.
"""
from benchbuild.utils.run import unionfs_tear_down
import psutil
umount_paths = []
for part in psutil.disk_partitions(all=True):
if os.path.commonpath([
part.mountpoint,
root
]) == root:
if not part.fstype == "fuse.unionfs":
logging.error(
"NON-UnionFS mountpoint found under {0}".format(root))
else:
umount_paths.append(part.mountpoint)
for p in umount_paths:
unionfs_tear_down(p)
def __call__(self):
if not CFG['clean'].value():
return
if not self._obj:
return
obj_builddir = os.path.abspath(self._obj.builddir)
if os.path.exists(obj_builddir):
self.__clean_mountpoints__(obj_builddir)
if self.check_empty:
rmdir(obj_builddir, retcode=None)
else:
rm("-rf", obj_builddir)
def __str__(self, indent=0):
return textwrap.indent("* {0}: Clean the directory: {1}".format(
self._obj.name, self._obj.builddir), indent * " ")
class MakeBuildDir(Step):
NAME = "MKDIR"
DESCRIPTION = "Create the build directory"
def __call__(self):
if not self._obj:
return
if not os.path.exists(self._obj.builddir):
mkdir(self._obj.builddir)
def __str__(self, indent=0):
return textwrap.indent(
"* {0}: Create the build directory".format(self._obj.name),
indent * " ")
class Prepare(Step):
NAME = "PREPARE"
DESCRIPTION = "Prepare project build folder"
def __init__(self, project):
super(Prepare, self).__init__(project, project.prepare)
def __str__(self, indent=0):
return textwrap.indent("* {0}: Prepare".format(self._obj.name),
indent * " ")
class Download(Step):
NAME = "DOWNLOAD"
DESCRIPTION = "Download project source files"
def __init__(self, project):
super(Download, self).__init__(project, project.download)
def __str__(self, indent=0):
return textwrap.indent("* {0}: Download".format(self._obj.name),
indent * " ")
class Configure(Step):
NAME = "CONFIGURE"
DESCRIPTION = "Configure project source files"
def __init__(self, project):
super(Configure, self).__init__(project, project.configure)
def __str__(self, indent=0):
return textwrap.indent("* {0}: Configure".format(self._obj.name),
indent * " ")
class Build(Step):
NAME = "BUILD"
DESCRIPTION = "Build the project"
def __init__(self, project):
super(Build, self).__init__(project, project.build)
def __str__(self, indent=0):
return textwrap.indent("* {0}: Compile".format(self._obj.name),
indent * " ")
class Run(Step):
NAME = "RUN"
DESCRIPTION = "Execute the run action"
def __init__(self, project):
action_fn = partial(project.run, project.runtime_extension)
super(Run, self).__init__(project, action_fn)
def __call__(self):
if not self._obj:
return
if not self._action_fn:
return
with local.env(BB_EXPERIMENT_ID=str(CFG["experiment_id"]),
BB_USE_DATABSE=1):
self._action_fn()
def __str__(self, indent=0):
return textwrap.indent(
"* {0}: Execute run-time tests.".format(self._obj.name),
indent * " ")
class Echo(Step):
NAME = 'ECHO'
DESCRIPTION = 'Print a message.'
def __init__(self, message):
self._message = message
def __str__(self, indent=0):
return textwrap.indent("* echo: {0}".format(self._message),
indent * " ")
def __call__(self):
print()
print(self._message)
print()
class Any(Step):
NAME = "ANY"
DESCRIPTION = "Just run all actions, no questions asked."
def __init__(self, actions):
self._actions = actions
self._exlog = logging.getLogger('benchbuild')
super(Any, self).__init__(None, None)
def __len__(self):
return sum([len(x) for x in self._actions])
def __call__(self):
length = len(self._actions)
cnt = 0
for a in self._actions:
result = a()
cnt = cnt + 1
if result == StepResult.ERROR:
self._exlog.warn("{0} actions left in queue", length - cnt)
return StepResult.OK
def __str__(self, indent=0):
sub_actns = [a.__str__(indent + 1) for a in self._actions]
sub_actns = "\n".join(sub_actns)
return textwrap.indent("* Execute all of:\n" + sub_actns, indent * " ")
class Experiment(Any):
NAME = "EXPERIMENT"
DESCRIPTION = "Run a experiment, wrapped in a db transaction"
def __init__(self, experiment, actions):
self._experiment = experiment
actions = \
[Echo("Start experiment: {0}".format(experiment.name))] + actions
super(Experiment, self).__init__(actions)
def begin_transaction(self):
experiment, session = persist_experiment(self._experiment)
if experiment.begin is None:
experiment.begin = datetime.now()
else:
experiment.begin = min(experiment.begin, datetime.now())
session.add(experiment)
session.commit()
return experiment, session
def end_transaction(self, experiment, session):
if experiment.end is None:
experiment.end = datetime.now()
else:
experiment.end = max(experiment.end, datetime.now())
session.add(experiment)
session.commit()
def __call__(self):
result = StepResult.OK
experiment, session = self.begin_transaction()
try:
for a in self._actions:
with local.env(BB_EXPERIMENT_ID=str(CFG["experiment_id"])):
result = a()
except KeyboardInterrupt:
error("User requested termination.")
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
formatted = "".join(traceback.format_exception(exc_type, exc_value,
exc_traceback))
warnings.warn(formatted, category=RuntimeWarning)
print("Shutting down...")
finally:
self.end_transaction(experiment, session)
return result
def __str__(self, indent=0):
sub_actns = [a.__str__(indent + 1) for a in self._actions]
sub_actns = "\n".join(sub_actns)
return textwrap.indent(
"\nExperiment: {0}\n".format(self._experiment.name) + sub_actns,
indent * " ")
class RequireAll(Step):
def __init__(self, actions):
self._actions = actions
self._exlog = logging.getLogger('benchbuild')
super(RequireAll, self).__init__(None, None)
def __len__(self):
return sum([len(x) for x in self._actions])
def __call__(self):
for i, action in enumerate(self._actions):
try:
result = action()
except ProcessExecutionError as proc_ex:
self._exlog.error(u'Execution failed for: ' + str(proc_ex))
result = StepResult.ERROR
except (OSError, GuardedRunException) as os_ex:
self._exlog.error(os_ex)
result = StepResult.ERROR
if result != StepResult.OK:
self._exlog.error("Execution of #{0}: '{1}' failed.".format(
i, str(action)))
action.onerror()
return result
def __str__(self, indent=0):
sub_actns = [a.__str__(indent + 1) for a in self._actions]
sub_actns = "\n".join(sub_actns)
return textwrap.indent("* All required:\n" + sub_actns, indent * " ")
class CleanExtra(Step):
NAME = "CLEAN EXTRA"
DESCRIPTION = "Cleans the extra directories."
def __call__(self):
if not CFG['clean'].value():
return
paths = CFG["cleanup_paths"].value()
for p in paths:
if os.path.exists(p):
rm("-r", p)
def __str__(self, indent=0):
paths = CFG["cleanup_paths"].value()
lines = []
for p in paths:
lines.append(textwrap.indent("* Clean the directory: {0}".format(
p), indent * " "))
return "\n".join(lines)
| mit |
rdqw/sscoin | qa/rpc-tests/disablewallet.py | 41 | 1156 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Exercise API with -disablewallet.
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class DisableWalletTest (BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self, split=False):
self.nodes = start_nodes(1, self.options.tmpdir, [['-disablewallet']])
self.is_network_split = False
self.sync_all()
def run_test (self):
# Check regression: https://github.com/bitcoin/bitcoin/issues/6963#issuecomment-154548880
x = self.nodes[0].validateaddress('7TSBtVu959hGEGPKyHjJz9k55RpWrPffXz')
assert(x['isvalid'] == False)
x = self.nodes[0].validateaddress('ycwedq2f3sz2Yf9JqZsBCQPxp18WU3Hp4J')
assert(x['isvalid'] == True)
if __name__ == '__main__':
DisableWalletTest ().main ()
| mit |
w1ll1am23/home-assistant | homeassistant/components/tesla/binary_sensor.py | 14 | 1164 | """Support for Tesla binary sensor."""
from homeassistant.components.binary_sensor import DEVICE_CLASSES, BinarySensorEntity
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
async_add_entities(
[
TeslaBinarySensor(
device,
hass.data[TESLA_DOMAIN][config_entry.entry_id]["coordinator"],
)
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"][
"binary_sensor"
]
],
True,
)
class TeslaBinarySensor(TeslaDevice, BinarySensorEntity):
"""Implement an Tesla binary sensor for parking and charger."""
@property
def device_class(self):
"""Return the class of this binary sensor."""
return (
self.tesla_device.sensor_type
if self.tesla_device.sensor_type in DEVICE_CLASSES
else None
)
@property
def is_on(self):
"""Return the state of the binary sensor."""
return self.tesla_device.get_value()
| apache-2.0 |
samarthmed/emacs-config | .python-environments/default/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/mbcssm.py | 1783 | 19590 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
BIG5_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
)
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# CP949
CP949_cls = (
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
)
CP949_st = (
#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
)
CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
CP949SMModel = {'classTable': CP949_cls,
'classFactor': 10,
'stateTable': CP949_st,
'charLenTable': CP949CharLenTable,
'name': 'CP949'}
# EUC-JP
EUCJP_cls = (
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5 # f8 - ff
)
EUCJP_st = (
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
)
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0 # f8 - ff
)
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
)
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = (
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
EUCTW_st = (
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0 # f8 - ff
)
GB2312_st = (
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,2,2,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,0,0,0) # f8 - ff
SJIS_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
)
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2BE_st = (
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart #30-37
)
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2LE_st = (
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart #30-37
)
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0 # f8 - ff
)
UTF8_st = (
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
)
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
| gpl-2.0 |
trdean/grEME | gr-digital/examples/run_length.py | 87 | 2389 | #!/usr/bin/env python
#
# Copyright 2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from optparse import OptionParser
import sys
def main():
parser = OptionParser()
parser.add_option("-f", "--file", default=None,
help="Choose file to read data from.")
(options, args) = parser.parse_args()
if options.file == None:
print "Must specify file to read from using '-f'."
sys.exit(1)
print "Using", options.file, "for data."
f = open(options.file, 'r')
runs = []
count = 0
current = 0
bytes = 0
bits = 0
for ch in f.read():
x = ord(ch)
bytes = bytes + 1
for i in range(7,-1,-1):
bits = bits + 1
t = (x >> i) & 0x1
if t == current:
count = count + 1
else:
if count > 0:
if len(runs) < count:
for j in range(count - len(runs)):
runs.append(0);
runs[count-1] = runs[count-1] + 1
current = 1-current;
count = 1
# Deal with last run at EOF
if len(runs) < count and count > 0:
for j in range(count - len(runs)):
runs.append(0);
runs[count-1] = runs[count-1] + 1
chk = 0
print "Bytes read: ", bytes
print "Bits read: ", bits
print
for i in range(len(runs)):
chk = chk + runs[i]*(i+1)
print "Runs of length", i+1, ":", runs[i]
print
print "Sum of runs:", chk, "bits"
print
print "Maximum run length is", len(runs), "bits"
if __name__ == "__main__":
main()
| gpl-3.0 |
PearsonIOKI/compose-forum | askbot/migrations/0127_save_category_tree_as_json.py | 13 | 30228 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.utils import simplejson
from django.db import models
from askbot.conf import settings as askbot_settings
def get_subtree(tree, path):
"""#this might be simpler, but not tested
clevel = tree
for step in path:
try:
level = clevel[step]
except IndexError:
return False
return clevel
"""
if len(path) == 1:
assert(path[0] == 0)
return tree
else:
import copy
parent_path = copy.copy(path)
leaf_index = parent_path.pop()
branch_index = parent_path[-1]
parent_tree = get_subtree(tree, parent_path)
return parent_tree[branch_index][1]
def parse_tree(text):
"""parse tree represented as indented text
one item per line, with two spaces per level of indentation
"""
lines = text.split('\n')
import re
in_re = re.compile(r'^([ ]*)')
tree = [['dummy', []]]
subtree_path = [0]
clevel = 0
for line in lines:
if line.strip() == '':
continue
match = in_re.match(line)
level = len(match.group(1))/2 + 1
if level > clevel:
subtree_path.append(0)#
elif level < clevel:
subtree_path = subtree_path[:level+1]
leaf_index = subtree_path.pop()
subtree_path.append(leaf_index + 1)
else:
leaf_index = subtree_path.pop()
subtree_path.append(leaf_index + 1)
clevel = level
try:
subtree = get_subtree(tree, subtree_path)
except:
return tree
subtree.append([line.strip(), []])
return tree
class Migration(DataMigration):
def forwards(self, orm):
"""reads category tree saved as string,
translates it to json and saves back"""
old_data = askbot_settings.CATEGORY_TREE
#this special value is our new default,
#we don't want to create a tag with this name
if old_data.replace(' ', '') == '[["dummy",[]]]':
old_data = ''
json_data = parse_tree(old_data)
json_string = simplejson.dumps(json_data).replace(' ', '')
if json_string.replace(' ', '') != askbot_settings.CATEGORY_TREE:
askbot_settings.update('CATEGORY_TREE', json_string)
def backwards(self, orm):
"Write your backwards methods here."
pass
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Post']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'askbot.emailfeedsetting': {
'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.groupmembership': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupMembership'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_memberships'", 'to': "orm['askbot.Tag']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'group_memberships'", 'to': "orm['auth.User']"})
},
'askbot.groupprofile': {
'Meta': {'object_name': 'GroupProfile'},
'group_tag': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'group_profile'", 'unique': 'True', 'to': "orm['askbot.Tag']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.post': {
'Meta': {'object_name': 'Post'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'old_answer_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_question_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.postflagreason': {
'Meta': {'object_name': 'PostFlagReason'},
'added_at': ('django.db.models.fields.DateTimeField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'revision_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.replyaddress': {
'Meta': {'object_name': 'ReplyAddress'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'reply_action': ('django.db.models.fields.CharField', [], {'default': "'auto_answer_or_comment'", 'max_length': '32'}),
'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
symmetrical = True
| gpl-3.0 |
lino-framework/book | lino_book/projects/avanti1/tests/test_nullchar.py | 2 | 2585 | # -*- coding: utf-8 -*-
# Copyright 2017-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""
Tests some behaviour of the `Client.national_id` field.
You can run only these tests by issuing::
$ go adg
$ python manage.py test tests.test_nullchar
Tests whether `national_id` is set to NULL (not empty string).
"""
from __future__ import unicode_literals
from __future__ import print_function
import os
from six.moves.urllib.parse import urlencode
from lino.utils.djangotest import RemoteAuthTestCase
from lino.api import dd, rt
from lino.utils.instantiator import create_row
class TestCase(RemoteAuthTestCase):
maxDiff = None
# override_djangosite_settings = dict(use_java=True)
def test01(self):
from lino.core import constants
from lino.modlib.users.choicelists import UserTypes
from lino.api.shell import countries, users
Client = rt.models.avanti.Client
u = users.User(username='robin',
user_type=UserTypes.admin,
language="en")
u.save()
self.client.force_login(u)
be = countries.Country(name="Belgium", isocode="BE")
be.save()
kw = dict()
kw.update(national_id="680601 053-29")
kw.update(first_name="Jean")
kw.update(middle_name="Jacques")
kw.update(last_name="Jeffin")
jean = create_row(Client, **kw)
kw.update(first_name="Jo")
kw.update(national_id="680601 054-28")
kw.update(last_name="Jeffin")
jo = create_row(Client, **kw)
def grid_put(username, url, **data):
data[constants.URL_PARAM_ACTION_NAME] = 'grid_put'
kwargs = dict(data=urlencode(data))
kwargs['REMOTE_USER'] = username
response = self.client.put(url, **kwargs)
# print(response)
return self.check_json_result(
response, 'rows success message')
url = '/api/avanti/Clients/' + str(jean.pk)
result = grid_put('robin', url, national_id="")
self.assertEqual(result['success'], True)
self.assertEqual(result['message'], 'Client "JEFFIN Jean (100)" has been updated.')
jean = Client.objects.get(pk=jean.pk)
self.assertEqual(jean.national_id, None)
url = '/api/avanti/Clients/' + str(jo.pk)
result = grid_put('robin', url, national_id="")
self.assertEqual(result['success'], True)
self.assertEqual(result['message'], 'Client "JEFFIN Jo (101)" has been updated.')
| bsd-2-clause |
Zac-HD/home-assistant | homeassistant/loader.py | 4 | 9195 | """
Provides methods for loading Home Assistant components.
This module has quite some complex parts. I have tried to add as much
documentation as possible to keep it understandable.
Components are loaded by calling get_component('switch') from your code.
If you want to retrieve a platform that is part of a component, you should
call get_component('switch.your_platform'). In both cases the config directory
is checked to see if it contains a user provided version. If not available it
will check the built-in components and platforms.
"""
import importlib
import logging
import os
import pkgutil
import sys
from types import ModuleType
# pylint: disable=unused-import
from typing import Optional, Sequence, Set, Dict # NOQA
from homeassistant.const import PLATFORM_FORMAT
from homeassistant.util import OrderedSet
# Typing imports
# pylint: disable=using-constant-test,unused-import
if False:
from homeassistant.core import HomeAssistant # NOQA
PREPARED = False
# List of available components
AVAILABLE_COMPONENTS = [] # type: List[str]
# Dict of loaded components mapped name => module
_COMPONENT_CACHE = {} # type: Dict[str, ModuleType]
_LOGGER = logging.getLogger(__name__)
def prepare(hass: 'HomeAssistant'):
"""Prepare the loading of components.
This method needs to run in an executor.
"""
global PREPARED # pylint: disable=global-statement
# Load the built-in components
import homeassistant.components as components
AVAILABLE_COMPONENTS.clear()
AVAILABLE_COMPONENTS.extend(
item[1] for item in
pkgutil.iter_modules(components.__path__, 'homeassistant.components.'))
# Look for available custom components
custom_path = hass.config.path("custom_components")
if os.path.isdir(custom_path):
# Ensure we can load custom components using Pythons import
sys.path.insert(0, hass.config.config_dir)
# We cannot use the same approach as for built-in components because
# custom components might only contain a platform for a component.
# ie custom_components/switch/some_platform.py. Using pkgutil would
# not give us the switch component (and neither should it).
# Assumption: the custom_components dir only contains directories or
# python components. If this assumption is not true, HA won't break,
# just might output more errors.
for fil in os.listdir(custom_path):
if fil == '__pycache__':
continue
elif os.path.isdir(os.path.join(custom_path, fil)):
AVAILABLE_COMPONENTS.append('custom_components.{}'.format(fil))
else:
# For files we will strip out .py extension
AVAILABLE_COMPONENTS.append(
'custom_components.{}'.format(fil[0:-3]))
PREPARED = True
def set_component(comp_name: str, component: ModuleType) -> None:
"""Set a component in the cache.
Async friendly.
"""
_check_prepared()
_COMPONENT_CACHE[comp_name] = component
def get_platform(domain: str, platform: str) -> Optional[ModuleType]:
"""Try to load specified platform.
Async friendly.
"""
return get_component(PLATFORM_FORMAT.format(domain, platform))
def get_component(comp_name) -> Optional[ModuleType]:
"""Try to load specified component.
Looks in config dir first, then built-in components.
Only returns it if also found to be valid.
Async friendly.
"""
if comp_name in _COMPONENT_CACHE:
return _COMPONENT_CACHE[comp_name]
_check_prepared()
# If we ie. try to load custom_components.switch.wemo but the parent
# custom_components.switch does not exist, importing it will trigger
# an exception because it will try to import the parent.
# Because of this behavior, we will approach loading sub components
# with caution: only load it if we can verify that the parent exists.
# We do not want to silent the ImportErrors as they provide valuable
# information to track down when debugging Home Assistant.
# First check custom, then built-in
potential_paths = ['custom_components.{}'.format(comp_name),
'homeassistant.components.{}'.format(comp_name)]
for path in potential_paths:
# Validate here that root component exists
# If path contains a '.' we are specifying a sub-component
# Using rsplit we get the parent component from sub-component
root_comp = path.rsplit(".", 1)[0] if '.' in comp_name else path
if root_comp not in AVAILABLE_COMPONENTS:
continue
try:
module = importlib.import_module(path)
# In Python 3 you can import files from directories that do not
# contain the file __init__.py. A directory is a valid module if
# it contains a file with the .py extension. In this case Python
# will succeed in importing the directory as a module and call it
# a namespace. We do not care about namespaces.
# This prevents that when only
# custom_components/switch/some_platform.py exists,
# the import custom_components.switch would succeeed.
if module.__spec__.origin == 'namespace':
continue
_LOGGER.info("Loaded %s from %s", comp_name, path)
_COMPONENT_CACHE[comp_name] = module
return module
except ImportError as err:
# This error happens if for example custom_components/switch
# exists and we try to load switch.demo.
if str(err) != "No module named '{}'".format(path):
_LOGGER.exception(
("Error loading %s. Make sure all "
"dependencies are installed"), path)
_LOGGER.error("Unable to find component %s", comp_name)
return None
def load_order_components(components: Sequence[str]) -> OrderedSet:
"""Take in a list of components we want to load.
- filters out components we cannot load
- filters out components that have invalid/circular dependencies
- Will make sure the recorder component is loaded first
- Will ensure that all components that do not directly depend on
the group component will be loaded before the group component.
- returns an OrderedSet load order.
- Makes sure MQTT eventstream is available for publish before
components start updating states.
Async friendly.
"""
_check_prepared()
load_order = OrderedSet()
# Sort the list of modules on if they depend on group component or not.
# Components that do not depend on the group usually set up states.
# Components that depend on group usually use states in their setup.
for comp_load_order in sorted((load_order_component(component)
for component in components),
key=lambda order: 'group' in order):
load_order.update(comp_load_order)
# Push some to first place in load order
for comp in ('mqtt_eventstream', 'mqtt', 'logger',
'recorder', 'introduction'):
if comp in load_order:
load_order.promote(comp)
return load_order
def load_order_component(comp_name: str) -> OrderedSet:
"""Return an OrderedSet of components in the correct order of loading.
Raises HomeAssistantError if a circular dependency is detected.
Returns an empty list if component could not be loaded.
Async friendly.
"""
return _load_order_component(comp_name, OrderedSet(), set())
def _load_order_component(comp_name: str, load_order: OrderedSet,
loading: Set) -> OrderedSet:
"""Recursive function to get load order of components.
Async friendly.
"""
component = get_component(comp_name)
# If None it does not exist, error already thrown by get_component.
if component is None:
return OrderedSet()
loading.add(comp_name)
for dependency in getattr(component, 'DEPENDENCIES', []):
# Check not already loaded
if dependency in load_order:
continue
# If we are already loading it, we have a circular dependency.
if dependency in loading:
_LOGGER.error('Circular dependency detected: %s -> %s',
comp_name, dependency)
return OrderedSet()
dep_load_order = _load_order_component(dependency, load_order, loading)
# length == 0 means error loading dependency or children
if len(dep_load_order) == 0:
_LOGGER.error('Error loading %s dependency: %s',
comp_name, dependency)
return OrderedSet()
load_order.update(dep_load_order)
load_order.add(comp_name)
loading.remove(comp_name)
return load_order
def _check_prepared() -> None:
"""Issue a warning if loader.prepare() has never been called.
Async friendly.
"""
if not PREPARED:
_LOGGER.warning((
"You did not call loader.prepare() yet. "
"Certain functionality might not be working."))
| apache-2.0 |
eickenberg/scikit-learn | sklearn/metrics/tests/test_regression.py | 5 | 2825 | from __future__ import division, print_function
import numpy as np
from itertools import product
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.metrics import explained_variance_score
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
from sklearn.metrics.regression import _check_reg_targets
def test_regression_metrics(n_samples=50):
y_true = np.arange(n_samples)
y_pred = y_true + 1
assert_almost_equal(mean_squared_error(y_true, y_pred), 1.)
assert_almost_equal(mean_absolute_error(y_true, y_pred), 1.)
assert_almost_equal(r2_score(y_true, y_pred), 0.995, 2)
assert_almost_equal(explained_variance_score(y_true, y_pred), 1.)
def test_multioutput_regression():
y_true = np.array([[1, 0, 0, 1], [0, 1, 1, 1], [1, 1, 0, 1]])
y_pred = np.array([[0, 0, 0, 1], [1, 0, 1, 1], [0, 0, 0, 1]])
error = mean_squared_error(y_true, y_pred)
assert_almost_equal(error, (1. / 3 + 2. / 3 + 2. / 3) / 4.)
# mean_absolute_error and mean_squared_error are equal because
# it is a binary problem.
error = mean_absolute_error(y_true, y_pred)
assert_almost_equal(error, (1. / 3 + 2. / 3 + 2. / 3) / 4.)
error = r2_score(y_true, y_pred)
assert_almost_equal(error, 1 - 5. / 2)
def test_regression_metrics_at_limits():
assert_almost_equal(mean_squared_error([0.], [0.]), 0.00, 2)
assert_almost_equal(mean_absolute_error([0.], [0.]), 0.00, 2)
assert_almost_equal(explained_variance_score([0.], [0.]), 1.00, 2)
assert_almost_equal(r2_score([0., 1], [0., 1]), 1.00, 2)
def test__check_reg_targets():
# All of length 3
EXAMPLES = [
("continuous", [1, 2, 3], 1),
("continuous", [[1], [2], [3]], 1),
("continuous-multioutput", [[1, 1], [2, 2], [3, 1]], 2),
("continuous-multioutput", [[5, 1], [4, 2], [3, 1]], 2),
("continuous-multioutput", [[1, 3, 4], [2, 2, 2], [3, 1, 1]], 3),
]
for (type1, y1, n_out1), (type2, y2, n_out2) in product(EXAMPLES,
repeat=2):
if type1 == type2 and n_out1 == n_out2:
y_type, y_check1, y_check2 = _check_reg_targets(y1, y2)
assert_equal(type1, y_type)
if type1 == 'continuous':
assert_array_equal(y_check1, np.reshape(y1, (-1, 1)))
assert_array_equal(y_check2, np.reshape(y2, (-1, 1)))
else:
assert_array_equal(y_check1, y1)
assert_array_equal(y_check2, y2)
else:
assert_raises(ValueError, _check_reg_targets, y1, y2)
| bsd-3-clause |
papados/ordersys | Lib/_abcoll.py | 218 | 14672 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
]
### ONE-TRICK PONIES ###
def _hasattr(C, attr):
try:
return any(attr in B.__dict__ for B in C.__mro__)
except AttributeError:
# Old-style class
return hasattr(C, attr)
class Hashable:
__metaclass__ = ABCMeta
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
try:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
except AttributeError:
# Old-style class
if getattr(C, "__hash__", None):
return True
return NotImplemented
class Iterable:
__metaclass__ = ABCMeta
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if _hasattr(C, "__iter__"):
return True
return NotImplemented
Iterable.register(str)
class Iterator(Iterable):
@abstractmethod
def next(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if _hasattr(C, "next") and _hasattr(C, "__iter__"):
return True
return NotImplemented
class Sized:
__metaclass__ = ABCMeta
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if _hasattr(C, "__len__"):
return True
return NotImplemented
class Container:
__metaclass__ = ABCMeta
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if _hasattr(C, "__contains__"):
return True
return NotImplemented
class Callable:
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if _hasattr(C, "__call__"):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
# Sets are not hashable by default, but subclasses can change this
__hash__ = None
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxint
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
@abstractmethod
def add(self, value):
"""Add an element."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Remove an element. Do not raise an exception if absent."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = next(it)
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, it):
for value in (self - it):
self.discard(value)
return self
def __ixor__(self, it):
if it is self:
self.clear()
else:
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
if it is self:
self.clear()
else:
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MappingView(Sized):
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
class KeysView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
for key in self._mapping:
yield key
class ItemsView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
class ValuesView(MappingView):
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(basestring)
Sequence.register(buffer)
Sequence.register(xrange)
class MutableSequence(Sequence):
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
raise IndexError
def append(self, value):
self.insert(len(self), value)
def reverse(self):
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
for v in values:
self.append(v)
def pop(self, index=-1):
v = self[index]
del self[index]
return v
def remove(self, value):
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
return self
MutableSequence.register(list)
| unlicense |
crichardson17/starburst_atlas | SFH_comparison/data/Padova_inst/padova_inst_0/fullgrid/peaks_reader.py | 1 | 5057 | import csv
import matplotlib.pyplot as plt
from numpy import *
import scipy.interpolate
import math
from pylab import *
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import matplotlib.patches as patches
from matplotlib.path import Path
import os
# ---------------------------------------------------
headerloc = "/Users/helen/Documents/Elon/Thesis_Research/github_repo/starburst_atlas/headers_dir/headers.txt"
# ------------------------------------------------------------------------------------------------------
#data files' names from source directory constructed here. default source directory is working directory
numFiles = 3 #change this if you have more/less files
gridFiles = [None]*numFiles
emissionFiles = [None]*numFiles
for i in range(numFiles):
for file in os.listdir('.'):
if file.endswith("{:d}.grd".format(i+1)):
gridFiles[i] = file
#keep track of all the files you'll be importing by printing
#print file
if file.endswith("{:d}.txt".format(i+1)):
emissionFiles[i] = file
#keep track of all the files you'll be importing by printing
#print file
print ("Files names constructed")
# ---------------------------------------------------
#this is where the grid information (phi and hdens) is read in and saved to grid.
print("Beginning file import")
for i in range(numFiles):
gridI = [];
with open(gridFiles[i], 'rb') as f:
csvReader = csv.reader(f, delimiter='\t')
for row in csvReader:
gridI.append(row)
gridI = asarray(gridI)
gridI = gridI[1:,6:8]
if ( i == 0 ):
grid = gridI
else :
grid = concatenate((grid,gridI))
for i in range(numFiles):
emissionLineI = [];
with open(emissionFiles[i], 'rb') as f:
csvReader = csv.reader(f, delimiter='\t')
headers = csvReader.next()
for row in csvReader:
emissionLineI.append(row)
emissionLineI = asarray(emissionLineI)
emissionLineI = emissionLineI[:,1:]
if ( i == 0 ):
Emissionlines = emissionLineI
else :
Emissionlines = concatenate((Emissionlines,emissionLineI))
hdens_values = grid[:,1]
phi_values = grid[:,0]
print("Import files complete")
#To fix when hdens > 10
#many of my grids were run off with hdens up to 12 so we needed to cut off part of the data
#first create temorary arrays
print("modifications begun")
hdens_values_2 = empty(shape=[0, 1])
phi_values_2 = empty(shape=[0, 1])
Emissionlines_2 = empty(shape=[0, len(Emissionlines[0,:])])
#save data in range desired to temp arrays
for i in range(len(hdens_values)):
if (float(hdens_values[i]) < 6.100) & (float(phi_values[i]) < 17.100) :
hdens_values_2 = append(hdens_values_2, hdens_values[i])
phi_values_2 = append(phi_values_2, phi_values[i])
Emissionlines_2 = vstack([Emissionlines_2, Emissionlines[i,:]])
#overwrite old arrays
hdens_values = hdens_values_2
phi_values = phi_values_2
Emissionlines = Emissionlines_2
print("modifications complete")
# ---------------------------------------------------
# ---------------------------------------------------
#there are the emission line names properly formatted
print("Importing headers from header file")
headersFile = open(headerloc,'r')
headers = headersFile.read().splitlines()
headersFile.close()
# ---------------------------------------------------
#constructing grid by scaling
#select the scaling factor
#for 1215
#incident = Emissionlines[1:,4]
concatenated_data = zeros((len(Emissionlines),len(Emissionlines[0])))
max_values = zeros((len(concatenated_data[0]),4))
#for 4860
incident = concatenated_data[:,57]
#take the ratio of incident and all the lines and put it all in an array concatenated_data
for i in range(len(Emissionlines)):
for j in range(len(Emissionlines[0])):
if math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10) > 0:
concatenated_data[i,j] = math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10)
else:
concatenated_data[i,j] == 0
# for 1215
#for i in range(len(Emissionlines)):
# for j in range(len(Emissionlines[0])):
# if math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10) > 0:
# concatenated_data[i,j] = math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10)
# else:
# concatenated_data[i,j] == 0
# ---------------------------------------------------
#find the maxima to plot onto the contour plots
for j in range(len(concatenated_data[0])):
max_values[j,0] = max(concatenated_data[:,j])
max_values[j,1] = argmax(concatenated_data[:,j], axis = 0)
max_values[j,2] = hdens_values[max_values[j,1]]
max_values[j,3] = phi_values[max_values[j,1]]
#to round off the maxima
max_values[:,0] = [ '%.1f' % elem for elem in max_values[:,0] ]
print "data arranged"
# ---------------------------------------------------
#Creating the grid to interpolate with for contours.
gridarray = zeros((len(concatenated_data),2))
gridarray[:,0] = hdens_values
gridarray[:,1] = phi_values
x = gridarray[:,0]
y = gridarray[:,1]
# ---------------------------------------------------
savetxt('peaks_Padova_inst_0', max_values, delimiter='\t')
| gpl-2.0 |
maggrey/cherrymusic | cherrymusicserver/configuration.py | 4 | 37944 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic - a standalone music server
# Copyright (c) 2012 - 2014 Tom Wallroth & Tilman Boerner
#
# Project page:
# http://fomori.org/cherrymusic/
# Sources on github:
# http://github.com/devsnd/cherrymusic/
#
# CherryMusic is based on
# jPlayer (GPL/MIT license) http://www.jplayer.org/
# CherryPy (BSD license) http://www.cherrypy.org/
#
# licensed under GNU GPL version 3 (or later)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
#python 2.6+ backward compability
from __future__ import unicode_literals
from io import open
import itertools
import os
import re
import weakref
from collections import Mapping, namedtuple
from backport.collections import OrderedDict
from backport import callable
from cherrymusicserver import util
from cherrymusicserver import log as logging
def _validate_basedir(basedir):
if not basedir:
raise ValueError('basedir must be set')
if not os.path.isabs(basedir):
raise ValueError('basedir must be absolute path: {basedir}'.format(basedir=basedir))
if not os.path.exists(basedir):
raise ValueError("basedir must exist: {basedir}".format(basedir=basedir))
if not os.path.isdir(basedir):
raise ValueError("basedir must be a directory: {basedir}".format(basedir=basedir))
return True
def from_defaults():
'''load default configuration. must work if path to standard config file is unknown.'''
c = ConfigBuilder()
with c['media.basedir'] as basedir:
basedir.value = None
basedir.valid = _validate_basedir
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
basedir.doc = _("""
BASEDIR specifies where the media that should be
served is located. It must be an absolute path, e.g.
BASEDIR=/absolute/path/to/media.
Links: If your operating system supports them,
you can use symlinks directly in BASEDIR. Links to
directories which contain BASEDIR will be ignored,
just like all links not directly in, but in sublevels
of BASEDIR. This is to guard against the adverse
effects of link cycles.
""")
with c['media.transcode'] as transcode:
transcode.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
transcode.doc = _("""
TRANSCODE (experimental!) enables automatic live transcoding
of the media to be able to listen to every format on every device.
This requires you to have the appropriate codecs installed.
Please note that transcoding will significantly increase the stress on the CPU!
""")
with c['media.fetch_album_art'] as fetch:
fetch.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
fetch.doc = _("""
Tries to fetch the album cover from various locations in the web,
if no image is found locally. By default it will be fetched from amazon.
They will be shown next to folders that qualify as a possible
album.
""")
with c['media.show_subfolder_count'] as subfoldercount:
subfoldercount.value = True
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
subfoldercount.doc = _("""
Show the number of sub-folders and tracks contained
in any folder. This will increase the stress for the
server, so if you're running CherryMusic on a 386DX II
or similar, it is recommended to deactivate this feature.
""")
with c['media.maximum_download_size'] as maxdl:
maxdl.value = 1024*1024*250
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
maxdl.doc = _("""
Maximum size in bytes of all files to be downloaded in one zipfile.
Defaults to {default_value} {default_unit}.
""".format(default_value='250', default_unit=_('megabytes')))
with c['search.maxresults'] as maxresults:
maxresults.value = 20
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
maxresults.doc = _("""
MAXRESULTS sets the maximum amount of search results
to be displayed. If MAXRESULTS is set to a higher value,
the search will take longer, but will also be more accurate.
""")
with c['search.load_file_db_into_memory'] as memory:
memory.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
memory.doc = _("""
This will load parts of the database into memory for improved
performance. This option should only be used on systems with
sufficient memory, because it will hurt the performance otherwise.
""")
with c['browser.maxshowfiles'] as maxshowfiles:
maxshowfiles.value = 100
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
maxshowfiles.doc = _('''
MAXSHOWFILES specifies how many files and folders should
be shown at the same time. E.g. if you open a folder
with more than MAXSHOWFILES, the files will be grouped
according to the first letter in their name.
100 is a good value, as a CD can have up to 99 tracks.
''')
with c['browser.pure_database_lookup'] as pure_database_lookup:
pure_database_lookup.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
pure_database_lookup.doc = _("""
Only use the media database, never the filesystem, for content
lookups in browser and search. Useful if the media files reside
on an external hard drive or behind a slow network connection.
""")
with c['server.port'] as port:
port.value = 8080
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
port.doc = _('The port the server will listen to.')
with c['server.ipv6_enabled'] as ipv6:
ipv6.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
ipv6.doc = _("""When set to true, the server will listen on a IPv6
socket instead of IPv4""")
with c['server.localhost_only'] as localhost_only:
localhost_only.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
localhost_only.doc = _('''
When localhost_only is set to true, the server will not
be visible in the network and only play music on the
same computer it is running on.
Activating this option binds the server to IP 127.0.0.1 or
[::1], depending on whether server.ipv6_enabled is true.
The server should also be reachable as "localhost" in any
case.
''')
with c['server.rootpath'] as rootpath:
rootpath.value = '/'
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
rootpath.doc = _('''
The path cherrymusic will be available on. Normally
you'll want to leave it as '/', so that CherryMusic is
available under e.g. localhost:8080. You might want to
change the path if CherryMusic runs behind a reverse
proxy. Changing it to '/cherrymusic' will make it available
under e.g. localhost:8080/cherrymusic
''')
with c['server.localhost_auto_login'] as localhost_auto_login:
localhost_auto_login.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
localhost_auto_login.doc = _('''
When localhost_auto_login is set to "True", the server will
not ask for credentials when using it locally. The user will
be automatically logged in as admin.
''')
with c['server.permit_remote_admin_login'] as permit_remote_admin_login:
permit_remote_admin_login.value = True
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
permit_remote_admin_login.doc = _('''
When permit_remote_admin_login is set to "False", admin users
may only log in from the computer cherrymusic is currently
running on. This can improve security.
''')
with c['server.keep_session_in_ram'] as keep_session_in_ram:
keep_session_in_ram.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
keep_session_in_ram.doc = _('''
Will keep the user sessions in RAM instead of a file in the
configuration directory. This means, that any unsaved
playlists will be lost when the server is restarted.
''')
with c['server.ssl_enabled'] as ssl_enabled:
ssl_enabled.value = False
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
ssl_enabled.doc = _('''
The following options allow you to use cherrymusic with
https encryption. If ssl_enabled is set to "False", all other
ssl options will be ommited.
''')
with c['server.ssl_port'] as ssl_port:
ssl_port.value = 8443
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
ssl_port.doc = _('''
The port that will listen to SSL encrypted requests. If
ssl_enabled is set to "True", all unencrypted HTTP requests
will be redirected to this port.
''')
with c['server.ssl_certificate'] as ssl_certificate:
ssl_certificate.value = 'certs/server.crt'
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
ssl_certificate.doc = _('''
The SSL certiticate sent to the client to verify the
server's authenticity. A relative path is relative to the
location of the CherryMusic configuration file.
''')
with c['server.ssl_private_key'] as ssl_private_key:
ssl_private_key.value = 'certs/server.key'
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
ssl_private_key.doc = _('''
SSL private key file used by the server to decrypt and sign
secure communications. Keep this one secret! A relative
path is relative to the location of the CherryMusic
configuration file.
''')
with c['general.update_notification'] as update_notification:
update_notification.value = True
# i18n: Don't mind whitespace - string will be re-wrapped automatically. Use blank lines to separate paragraphs.
update_notification.doc = _('''
Notify admins about available security and feature updates.
''')
return c.to_configuration()
def from_configparser(filepath):
"""Have an ini file that the python configparser can understand? Pass the filepath
to this function, and a matching Configuration will magically be returned."""
if not os.path.exists(filepath):
logging.error(_('configuration file not found: %(filepath)s'), {'filepath':filepath})
return None
if not os.path.isfile(filepath):
logging.error(_('configuration path is not a file: %(filepath)s'), {'filepath':filepath})
return None
try:
from configparser import ConfigParser
except ImportError:
from backport.configparser import ConfigParser
cfgp = ConfigParser()
with open(filepath, encoding='utf-8') as fp:
cfgp.readfp(fp)
dic = OrderedDict()
for section_name in cfgp.sections():
if 'DEFAULT' == section_name:
section_name = ''
for name, value in cfgp.items(section_name):
value += '' # inner workaround for python 2.6+
# transforms ascii str to unicode because
# of unicode_literals import
dic[Key(section_name) + name] = value
return Configuration.from_mapping(dic)
def write_to_file(cfg, filepath):
""" Write a configuration to the given file so that it's readable by
configparser.
"""
with open(filepath, mode='w', encoding='utf-8') as f:
def printf(s):
f.write(s + os.linesep)
lastsection = None
for prop in cfg.to_properties():
if prop.hidden:
continue
key, value, doc = (Key(prop.key), prop.value, prop.doc)
section, subkey = str(key.head), str(key.tail)
if section != lastsection:
lastsection = section
printf('%s[%s]' % (os.linesep, section,))
if doc:
printf('')
lines = util.phrase_to_lines(doc)
for line in lines:
printf('; %s' % (line,))
printf('%s = %s' % (subkey, value))
def from_dict(mapping):
'''Alias for :meth:`Configuration.from_mapping`.'''
return Configuration.from_mapping(mapping)
def from_list(properties):
'''Alias for :meth:`Configuration.from_properties`.'''
return Configuration.from_properties(properties)
def to_list(cfg):
'''Alias for :meth:`Configuration.to_properties`.'''
return cfg.to_properties()
class ConfigError(Exception):
"""Base class for configuration errors."""
def __init__(self, key, value=None, msg='', detail=''):
self.key = key
self.value = value
self.msg = msg % {'key': key, 'value': value}
self.detail = detail % {'key': key, 'value': value}
Exception.__init__(self, self.key, self.value, self.msg, self.detail)
def __repr__(self):
return "{cls}: {msg}, key:{key} value:{val}, {detail}".format(
cls=self.__class__.__name__,
key=repr(self.key),
val=repr(self.value),
msg=self.msg,
detail=self.detail,
)
def __str__(self):
detail = self.detail.strip() if hasattr(self, 'detail') else ''
if detail:
detail = ' ({0})'.format(detail)
return '{0}: {1}{2}'.format(self.__class__.__name__, self.msg, detail)
class ConfigNamingError(ConfigError):
"""Something is wrong with the name ('Key') of a config Property."""
def __init__(self, key, detail=''):
ConfigError.__init__(self, key, None,
'invalid key name: %(key)r', detail)
class ConfigKeyError(ConfigError, KeyError):
""" A config key does not exist. """
def __init__(self, key, detail=''):
ConfigError.__init__(self, key, None,
'key does not exist: %(key)r', detail)
class ConfigValueError(ConfigError, ValueError):
"""A configuration property does not accept a value."""
def __init__(self, key, value, detail=''):
ConfigError.__init__(self, key, value,
'invalid value: %(value)r', detail)
class ConfigWriteError(ConfigError):
"""Error while trying to change an existing configuration property."""
def __init__(self, key, value, detail=''):
ConfigError.__init__(self, key, value,
"can't write to %(key)s", detail)
def raising_error_handler(e):
"Simply raise the active exception."
raise
class error_collector(object):
""" Callable that can be used to collect errors of Configuration operations
instead of raising them.
"""
def __init__(self):
self.errors = []
def __call__(self, error):
self.errors.append(error)
def __len__(self):
return len(self.errors)
def __iter__(self):
return iter(self.errors)
class Key(object):
""" A hierarchical property name; alphanumerical and caseless.
Keys parts can contain ASCII letters, digits and `_`; they must start
with a letter and be separated by a `.`.
"""
_sep = '.'
_re = re.compile(r'^({name}({sep}{name})*)?$'.format(
name=r'[A-Za-z][A-Za-z0-9_]*',
sep=_sep,
))
def __init__(self, name=None):
""" name : Key or str
`None` means ''
"""
if None is name:
name = ''
elif isinstance(name, Key):
name = name._str
elif not isinstance(name, (str, type(''))):
raise ConfigNamingError(name, 'name must be a Key, str or unicode (is {type!r})'.format(type=type(name)))
elif not self._re.match(name):
raise ConfigNamingError(
name, 'Key parts must only contain the characters [A-Za-z0-9_],'
' start with a letter and be separated by a {seperator}'.format(seperator=self._sep))
name += '' # inner workaround for python 2.6+
# transforms ascii str to unicode because
# of unicode_literals import
self._str = name.lower()
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self._str)
def __str__(self):
return self._str
def __iter__(self):
"""Iterate over hierarchical key parts,"""
return iter(map(Key, self._str.split(self._sep)))
def __len__(self):
"""The number of non-empty hierarchical parts in this Key."""
return self._str.count(self._sep) + 1 if self._str else 0
def __add__(self, other):
"""Append something that can become a Key to a copy of this Key."""
other = Key(other)
if self and other:
return self._sep.join((self._str, other._str))
return Key(self or other)
def __radd__(self, other):
"""Make a Key of the left operand and add a copy of this key to it."""
return Key(other) + self
def __hash__(self):
return hash(self.normal)
def __eq__(self, other):
return self.normal == Key(other).normal
def __ne__(self, other):
return not (self == other)
@property
def parent(self):
""" This Key without its last hierarchical part; evaluates to `False`
if there are less than two parts in this Key.
"""
lastsep = self._str.rfind(self._sep)
if lastsep >= 0:
return Key(self._str[:lastsep])
return Key()
@property
def head(self):
""" The first hierarchical part of this Key."""
firstsep = self._str.find(self._sep)
if firstsep >= 0:
return Key(self._str[:firstsep])
return self
@property
def tail(self):
""" This key without its last hierarchical part; evaluates to `False`
if there are less than two parts in this Key.
"""
firstsep = self._str.find(self._sep)
if firstsep >= 0:
return Key(self._str[firstsep + 1:])
return Key()
@property
def normal(self):
"""The normal, hashable form of this Key to compare against."""
return self._str
class _PropertyMap(Mapping):
""" A map of keys to corresponding Properties; immutable, but can generate
updated copies of itself. Certain unset property attributes are
inherited from the property with the closest parent key. These
inherited attributes are: ``valid``, ``readonly`` and ``hidden``.
Uses the Property.replace mechanic to update existing properties.
"""
def __init__(self, properties=()):
dic = OrderedDict((p.key, p) for p in properties)
sortedkeys = sorted(dic, key=lambda k: Key(k).normal)
inherit = _InheritanceViewer(dic)
for key in sortedkeys:
dic[key] = inherit.property_with_inherited_attributes(key)
self._dic = dic
def __repr__(self):
return '{%s}' % (', '.join(
'%r: %r' % (k, v) for k, v in self._dic.items()))
def __len__(self):
return len(self._dic)
def __contains__(self, key):
return key in self._dic
def __iter__(self):
return iter(self._dic)
def __getitem__(self, key):
try:
return self._dic[key]
except KeyError:
raise ConfigKeyError(key)
def replace(self, properties, on_error):
def getnew(prop):
return self[prop.key].replace(**prop.to_dict())
return self._copy_with_new_properties(getnew, properties, on_error)
def update(self, properties, on_error):
def getnew(prop):
try:
return self[prop.key].replace(**prop.to_dict())
except KeyError:
return prop
return self._copy_with_new_properties(getnew, properties, on_error)
def _copy_with_new_properties(self, getnew, properties, on_error):
newdic = OrderedDict(self._dic)
for prop in properties:
try:
newprop = getnew(prop)
except ConfigError as error:
on_error(error)
continue
newdic[newprop.key] = newprop
return self.__class__(newdic.values())
class Property(namedtuple('PropertyTuple', 'key value type valid readonly hidden doc')):
""" A configuration Property with attributes for key (name), value, type,
validation and doc(umentation); immutable.
Use :meth:`replace` to return a new Property with changed attributes.
Attribute values of `None` are considered *not set*, and are the
default. They also have a special meaning to :meth:`replace`.
key : str
A string that acts as this Property's identifier (name).
value :
Anything goes that fits possible type or validity constraints,
except for `dict`s (and mappings in general); use hierarchical
keys to express those.
type :
The desired value type to auto-cast to; factually a constraint to
possible values. If `None` or an empty string, the property value
will remain unchanged.
valid : str or callable
A validity constraint on the value, applied after `type`. A
*callable* value will be called and the result evaluated in
boolean context, to decide if a value is valid. A *str* value will
be interpreted as a regular expression which the whole
``str()`` form of a value will be matched against.
readonly : bool
A readonly property will refuse any :meth"`replace` calls with a
:class:`ConfigWriteError`.
hidden : bool
Just a flag; interpretation is up to the user.
doc : str
A documentation string.
"""
def __new__(cls, key=None, value=None, type=None, valid=None, readonly=None,
hidden=None, doc=None):
try:
key = Key(key).normal
type = cls._get_valid_type(value, type)
valid = valid
value = cls._validate(valid, cls._to_type(type, value), type)
readonly = readonly
hidden = hidden
doc = doc
except ValueError as e:
raise ConfigValueError(key, value, detail=str(e))
return super(cls, cls).__new__(
cls, key, value, type, valid, readonly, hidden, doc)
@property
def _args(self):
"""The arguments needed to create this Property: ``(name, value)*``."""
for name in ('key', 'value', 'type', 'valid', 'readonly', 'hidden', 'doc'):
attr = getattr(self, name)
if attr is not None:
yield name, attr
def to_dict(self):
return dict(self._args)
def replace(self, **kwargs):
""" Return a new property as a copy of this property, with attributes
changed according to `kwargs`.
Generally, all attributes can be overridden if they are currently
unset (`None`). An exception is `value`, which will be overridden
by anything but `None`. Restrictions set by `type` and `valid`
apply.
"""
dic = self.to_dict()
dic.update(kwargs)
other = Property(**dic)
if self.key and other.key and self.key != other.key:
raise ConfigWriteError(self.key, other.key,
'new key must match old ({newkey!r} != {oldkey!r})'.format(
newkey=other.key, oldkey=self.key))
if self.readonly:
raise ConfigWriteError(self.key, other.value,
'is readonly ({value!r})'.format(value=self.value))
return Property(
key=self.key or other.key,
value=self._override_self('value', other),
type=self._override_other('type', other),
valid=self._override_other('valid', other),
readonly=self._override_other('readonly', other),
hidden=self._override_other('hidden', other),
doc=self._override_other('doc', other),
)
def _override_self(self, attrname, other):
""" Select the value of an attribute from self or another instance,
with preference to other."""
return self.__select_with_preference(other, self, attrname)
def _override_other(self, attrname, other):
""" Select the value of an attribute from self or another instance,
with preference to self."""
return self.__select_with_preference(self, other, attrname)
@staticmethod
def __select_with_preference(preferred, alt, attrname):
""" Select one of the values of an attribute to two objects, preferring
the first unless it holds `None`.
"""
preference = getattr(preferred, attrname, None)
alternative = getattr(alt, attrname, None)
return alternative if preference is None else preference
@staticmethod
def _get_valid_type(value, type_):
""" Turn the type argument into something useful. """
if type_ in (None, ''):
if type(value) in (bool, int, float, str, type('')):
type_ = type(value)
else:
return None
typestr = type_.__name__ if isinstance(type_, type) else str(type_)
typestr += '' # inner workaround for python 2.6+
# transforms ascii str to unicode because
# of unicode_literals import
if not typestr in Transformers:
return None
return typestr
@staticmethod
def _to_type(type_, value):
if value is None:
return value
try:
return Transformers[type_](value)
except TransformError:
raise ValueError('cannot transform value to type %s' % (type_,))
@classmethod
def _validate(cls, valid, value, type_):
if value is None:
return value
validator = cls._validator(valid)
return cls._validate_single_value(validator, value)
@classmethod
def _validate_single_value(cls, validator, value):
if not validator(value):
raise ValueError(validator.__name__)
return value
@classmethod
def _validator(cls, valid):
if callable(valid):
return valid
if not valid:
return lambda _: True
return cls._regexvalidator(valid)
@staticmethod
def _regexvalidator(valid):
def regex_validator(value):
testvalue = '' if value is None else str(value)
testvalue += '' # python2.6 compatibility
exp = valid.strip().lstrip('^').rstrip('$').strip()
exp = '^' + exp + '$'
if not re.match(exp, testvalue):
raise ValueError('value string must match {0!r}, is {1!r}'.format(exp, testvalue))
return True
return regex_validator
class _PropertyModel(object):
""" Objects whose __dict__ can be used to create a Property from;
calling it with a ``key`` argument will yield a nested model.
"""
# as class member to keep children out of instance __dict__s
_children = weakref.WeakKeyDictionary()
@staticmethod
def to_property(model):
return Property(**model.__dict__)
@classmethod
def model_family_to_properties(cls, parent_model):
return (Property(**m.__dict__) for m in cls._family(parent_model))
@classmethod
def _makechild(cls, parent, key):
child = cls(Key(parent.key) + key)
cls._children[parent].append(child)
return child
@classmethod
def _family(cls, root):
yield root
for child in itertools.chain(*[cls._family(c) for c in cls._children[root]]):
yield child
def __init__(self, key=None):
self._children[self] = []
self.key = Key(key).normal
def __getitem__(self, key):
return self._makechild(self, key)
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
pass
class ConfigBuilder(object):
def __init__(self):
self.models = OrderedDict()
def __getitem__(self, key):
return self.models.setdefault(key, _PropertyModel(key))
def properties(self):
return itertools.chain(
*(_PropertyModel.model_family_to_properties(m) for m in self.models.values()))
def to_configuration(self):
return Configuration.from_properties(self.properties())
class Configuration(Mapping):
""" A mapping of keys to corresponding values, backed by a collection of
:class:`Property` objects.
Immutable; call :meth:`update` or :meth:`replace` with a mapping
argument to modify a copy of a configuration.
Unset Property attributes of ``valid``, ``readonly`` and ``hidden``
are overridden by those of a property with a "parent" key.
"""
@classmethod
def from_properties(cls, properties):
cfg = cls()
cfg.__propertymap = _PropertyMap(properties)
return cfg
def to_properties(self):
return self.__propertymap.values()
@classmethod
def from_mapping(cls, mapping):
properties = (Property(key, value) for key, value in mapping.items())
return cls.from_properties(properties)
def to_nested_dict(self):
d = {}
for key, value in self.items():
target = d
for part in Key(key):
target = target.setdefault(str(part), {})
if value is not None:
target[''] = self[key]
for key in self:
parent = None
target = d
for part in Key(key):
parent = target
target = target[str(part)]
if [''] == list(target):
parent[str(part)] = target.pop('')
return d
def __init__(self):
self.__propertymap = _PropertyMap()
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__,
tuple(self.__propertymap.values()))
def __contains__(self, key):
return key in self.__propertymap
def __len__(self):
return len(self.__propertymap)
def __iter__(self):
return iter(self.__propertymap)
def __getitem__(self, key):
return self.property(key).value
def property(self, key):
""" Return the property corresponding to the key argument or raise a
ConfigKeyError.
"""
return self.__propertymap[key]
def replace(self, mapping, on_error=raising_error_handler):
""" Return a copy of this configuration with some values replaced by
the corresponding values in the mapping argument; adding new keys
is not allowed.
Resulting ConfigErrors will be raised or passed to a callable
error handler, if given.
"""
return self._mutated_by(mapping, self.__propertymap.replace, on_error)
def update(self, mapping, on_error=raising_error_handler):
""" Return a copy of this configuration with some values replaced or
added corresponding to the values in the mapping argument.
Resulting ConfigErrors will be raised or passed to a callable
error handler, if given.
"""
return self._mutated_by(mapping, self.__propertymap.update, on_error)
def _mutated_by(self, mapping, mutator, on_error):
mutated = self.__class__()
properties = []
for key, value in mapping.items():
try:
properties.append(Property(key, value))
except ConfigError as e:
on_error(e)
mutated.__propertymap = mutator(properties, on_error)
return mutated
class _InheritanceViewer(object):
def __init__(self, propertymap):
self.propertymap = propertymap
def property_with_inherited_attributes(self, key):
property = self.propertymap[key]
model = _PropertyModel()
model.__dict__.update(property.to_dict())
self._inherit_attribute_if_not_set('valid', model)
self._inherit_attribute_if_not_set('readonly', model)
self._inherit_attribute_if_not_set('hidden', model)
return _PropertyModel.to_property(model)
def _inherit_attribute_if_not_set(self, attrname, model):
if getattr(model, attrname, None) is None:
key = Key(model.key).parent
value = None
while value is None and key:
try:
value = getattr(self.propertymap[key.normal], attrname, None)
except KeyError:
pass
key = key.parent
setattr(model, attrname, value)
Transformers = {}
def transformer(name, *more):
global Transformers # hell yeah!
def transformer_decorator(func):
Transformers[name] = func
for additional in more:
Transformers[additional] = func
return func
return transformer_decorator
class TransformError(Exception):
def __init__(self, transformername, val):
msg = ("Error while trying to parse value with transformer "
"'%s': %s" % (transformername, val))
super(self.__class__, self).__init__(msg)
@transformer(None)
def _identity(val=None):
return val
@transformer(name='bool')
def _to_bool_transformer(val=None):
if isinstance(val, (bool, int, float, complex, list, set, dict, tuple)):
return bool(val)
if isinstance(val, (type(''), str)):
if val.strip().lower() in ('yes', 'true', 'y', '1'):
return True
if val.strip().lower() in ('false', 'no', '', 'n', '0'):
return False
raise TransformError('bool', val)
@transformer('int')
def _to_int_transformer(val=None):
try:
return int(val)
except (TypeError, ValueError):
raise TransformError('int', val)
@transformer('float')
def _to_float_transformer(val=None):
try:
return float(val)
except (TypeError, ValueError):
raise TransformError('float', val)
@transformer('str', 'unicode')
def _to_str_transformer(val=None):
if val is None:
return ''
if isinstance(val, (str, type(''))):
return val.strip() + '' # inner workaround for python 2.6+
return str(val) + '' # transforms ascii str to unicode because
# of unicode_literals import
| gpl-3.0 |
bdh1011/wau | venv/lib/python2.7/site-packages/pandas/util/terminal.py | 16 | 3559 | """
get_terminal_size() -- return width and height of terminal as a tuple
code from:
http://stackoverflow.com/questions/566746/how-to-get-console- window-width-in-
python
written by
Harco Kuppens (http://stackoverflow.com/users/825214/harco-kuppens)
It is mentioned in the stackoverflow response that this code works
on linux, os x, windows and cygwin (windows).
"""
from __future__ import print_function
import os
__all__ = ['get_terminal_size']
def get_terminal_size():
"""
Detect terminal size and return tuple = (width, height).
Only to be used when running in a terminal. Note that the IPython notebook,
IPython zmq frontends, or IDLE do not run in a terminal,
"""
import platform
current_os = platform.system()
tuple_xy = None
if current_os == 'Windows':
tuple_xy = _get_terminal_size_windows()
if tuple_xy is None:
tuple_xy = _get_terminal_size_tput()
# needed for window's python in cygwin's xterm!
if current_os == 'Linux' or \
current_os == 'Darwin' or \
current_os.startswith('CYGWIN'):
tuple_xy = _get_terminal_size_linux()
if tuple_xy is None:
tuple_xy = (80, 25) # default value
return tuple_xy
def _get_terminal_size_windows():
res = None
try:
from ctypes import windll, create_string_buffer
# stdin handle is -10
# stdout handle is -11
# stderr handle is -12
h = windll.kernel32.GetStdHandle(-12)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
except:
return None
if res:
import struct
(bufx, bufy, curx, cury, wattr, left, top, right, bottom, maxx,
maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
sizex = right - left + 1
sizey = bottom - top + 1
return sizex, sizey
else:
return None
def _get_terminal_size_tput():
# get terminal width
# src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width
# -height-of-a-terminal-window
try:
import subprocess
proc = subprocess.Popen(["tput", "cols"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = proc.communicate(input=None)
cols = int(output[0])
proc = subprocess.Popen(["tput", "lines"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = proc.communicate(input=None)
rows = int(output[0])
return (cols, rows)
except:
return None
def _get_terminal_size_linux():
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
import os
cr = struct.unpack(
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr or cr == (0, 0):
try:
from os import environ as env
cr = (env['LINES'], env['COLUMNS'])
except:
return None
return int(cr[1]), int(cr[0])
if __name__ == "__main__":
sizex, sizey = get_terminal_size()
print('width = %s height = %s' % (sizex, sizey))
| mit |
brad-kaiser/spark | examples/src/main/python/ml/naive_bayes_example.py | 123 | 2017 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.classification import NaiveBayes
from pyspark.ml.evaluation import MulticlassClassificationEvaluator
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("NaiveBayesExample")\
.getOrCreate()
# $example on$
# Load training data
data = spark.read.format("libsvm") \
.load("data/mllib/sample_libsvm_data.txt")
# Split the data into train and test
splits = data.randomSplit([0.6, 0.4], 1234)
train = splits[0]
test = splits[1]
# create the trainer and set its parameters
nb = NaiveBayes(smoothing=1.0, modelType="multinomial")
# train the model
model = nb.fit(train)
# select example rows to display.
predictions = model.transform(test)
predictions.show()
# compute accuracy on the test set
evaluator = MulticlassClassificationEvaluator(labelCol="label", predictionCol="prediction",
metricName="accuracy")
accuracy = evaluator.evaluate(predictions)
print("Test set accuracy = " + str(accuracy))
# $example off$
spark.stop()
| apache-2.0 |
PandaWei/tp-qemu | qemu/tests/cpuinfo_query.py | 9 | 1051 | from autotest.client.shared import error, utils
from virttest import utils_misc
@error.context_aware
def run(test, params, env):
"""
cpuinfo query test:
1). run query cmd. e.g -cpu ?cpuid
2). check the expected info is inclued in the cmd output.
3). raise error if defined info is missing.
"""
qemu_binary = utils_misc.get_qemu_binary(params)
error.context("run query cmd")
qcmd = params.get("query_cmd")
if qcmd is None:
raise error.TestError("query cmd is missing,"
"pls check query_cmd in config file")
cmd = qemu_binary + qcmd
output = utils.system_output(cmd)
error.context("check if expected info is included in output of %s " % cmd)
cpuinfos = params.get("cpu_info", "Conroe").split(",")
missing = []
for cpuinfo in cpuinfos:
if cpuinfo not in output:
missing.append(cpuinfo)
if missing:
raise error.TestFail("%s is missing in the output\n %s" %
(", ".join(missing), output))
| gpl-2.0 |
barbour-em/osf.io | scripts/tests/test_osfstorage_oldels.py | 51 | 5506 | import mock
from modularodm import Q
from framework.auth import Auth
from tests.base import OsfTestCase
from tests.factories import ProjectFactory
from website.addons.osfstorage import model
from website.addons.osfstorage import utils
from website.addons.osfstorage import oldels
from scripts.osfstorage import migrate_from_oldels as migration
class TestMigrateOldels(OsfTestCase):
def setUp(self):
super(TestMigrateOldels, self).setUp()
with mock.patch.object(model.OsfStorageNodeSettings, 'on_add'):
self.project = ProjectFactory()
self.user = self.project.creator
self.auth = Auth(user=self.user)
self.node_settings = self.project.get_addon('osfstorage')
tree, _ = oldels.OsfStorageFileTree.get_or_create('', self.node_settings)
tree.save()
self.node_settings.file_tree = tree
self.node_settings.save()
def test_creates_root_node(self):
assert self.node_settings.root_node is None
migration.migrate_node_settings(self.node_settings, dry=False)
assert self.node_settings.root_node is not None
assert not self.node_settings._dirty
def test_creates_root_node_on_none_file_tree(self):
self.node_settings.file_tree = None
self.node_settings.save()
assert self.node_settings.root_node is None
assert self.node_settings.file_tree is None
migration.migrate_node_settings(self.node_settings, dry=False)
assert self.node_settings.root_node is not None
assert not self.node_settings._dirty
def test_migrates_files(self):
names = []
for num in range(10):
names.append('DEAR GOD! {} CARPNADOS'.format(num))
oldels.OsfStorageFileRecord.get_or_create(names[-1], self.node_settings)
assert len(self.node_settings.file_tree.children) == 10
migration.migrate_node_settings(self.node_settings, dry=False)
migration.migrate_children(self.node_settings, dry=False)
children = self.node_settings.root_node.children
assert not self.node_settings._dirty
assert self.node_settings.root_node is not None
assert not self.node_settings.root_node._dirty
assert len(children) == 10
for child in children:
names.remove(child.name)
assert len(names) == 0
def test_migrates_guids(self):
names = []
for num in range(10):
names.append('DEAR GOD! {} CARPNADOS'.format(num))
guid = model.OsfStorageGuidFile(node=self.project, path=names[-1])
guid.save()
oldels.OsfStorageFileRecord.get_or_create(names[-1], self.node_settings)
assert len(model.OsfStorageGuidFile.find()) == 10
migration.migrate_node_settings(self.node_settings, dry=False)
migration.migrate_children(self.node_settings, dry=False)
guids = model.OsfStorageGuidFile.find()
paths = [x.path for x in model.OsfStorageFileNode.find(Q('kind', 'eq', 'file') & Q('node_settings', 'eq', self.node_settings))]
assert len(guids) == 10
for guid in guids:
paths.remove(guid._path)
assert len(paths) == 0
def test_migrate_logs(self):
names = []
for num in range(10):
names.append('DEAR GOD! {} CARPNADOS'.format(num))
x, _ = oldels.OsfStorageFileRecord.get_or_create(names[-1], self.node_settings)
x.delete(None)
self.project.logs[-1].params['path'] = x.path
self.project.logs[-1].save()
if num % 2 == 0:
x.undelete(None)
self.project.logs[-1].params['path'] = x.path
self.project.logs[-1].save()
migration.migrate_node_settings(self.node_settings, dry=False)
migration.migrate_children(self.node_settings, dry=False)
for log in self.project.logs:
if log.action.startswith('osf_storage_file'):
path = log.params['_path']
node = self.node_settings.root_node.find_child_by_name(path.strip('/'))
assert node._id in log.params['_urls']['view']
assert node._id in log.params['_urls']['download']
@mock.patch('framework.analytics.session')
def test_migrate_download_counts(self, mock_session):
names = []
for index, num in enumerate(range(10)):
names.append('DEAR GOD$! ({})^ CARPNADOS'.format(num))
fobj, _ = oldels.OsfStorageFileRecord.get_or_create(names[-1], self.node_settings)
for _id in range(index):
fobj.create_version(self.user, {
'folder': '',
'bucket': '',
'service': 'buttfiles',
'object': '{}{}'.format(index, _id),
})
utils.update_analytics(self.project, fobj.path, _id + 1)
assert len(fobj.versions) == index
assert fobj.get_download_count() == index
assert len(self.node_settings.file_tree.children) == 10
migration.migrate_node_settings(self.node_settings, dry=False)
migration.migrate_children(self.node_settings, dry=False)
for index, child in enumerate(self.node_settings.root_node.children):
assert len(child.versions) == index
assert child.get_download_count() == index
for _id in range(index):
assert child.get_download_count(_id) == 1
| apache-2.0 |
pathompongoo/ThGovJobApp | env/lib/python2.7/site-packages/setuptools/tests/test_markerlib.py | 449 | 2506 | import os
import unittest
from setuptools.tests.py26compat import skipIf
try:
import ast
except ImportError:
pass
class TestMarkerlib(unittest.TestCase):
@skipIf('ast' not in globals(),
"ast not available (Python < 2.6?)")
def test_markers(self):
from _markerlib import interpret, default_environment, compile
os_name = os.name
self.assertTrue(interpret(""))
self.assertTrue(interpret("os.name != 'buuuu'"))
self.assertTrue(interpret("os_name != 'buuuu'"))
self.assertTrue(interpret("python_version > '1.0'"))
self.assertTrue(interpret("python_version < '5.0'"))
self.assertTrue(interpret("python_version <= '5.0'"))
self.assertTrue(interpret("python_version >= '1.0'"))
self.assertTrue(interpret("'%s' in os.name" % os_name))
self.assertTrue(interpret("'%s' in os_name" % os_name))
self.assertTrue(interpret("'buuuu' not in os.name"))
self.assertFalse(interpret("os.name == 'buuuu'"))
self.assertFalse(interpret("os_name == 'buuuu'"))
self.assertFalse(interpret("python_version < '1.0'"))
self.assertFalse(interpret("python_version > '5.0'"))
self.assertFalse(interpret("python_version >= '5.0'"))
self.assertFalse(interpret("python_version <= '1.0'"))
self.assertFalse(interpret("'%s' not in os.name" % os_name))
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))
self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'"))
environment = default_environment()
environment['extra'] = 'test'
self.assertTrue(interpret("extra == 'test'", environment))
self.assertFalse(interpret("extra == 'doc'", environment))
def raises_nameError():
try:
interpret("python.version == '42'")
except NameError:
pass
else:
raise Exception("Expected NameError")
raises_nameError()
def raises_syntaxError():
try:
interpret("(x for x in (4,))")
except SyntaxError:
pass
else:
raise Exception("Expected SyntaxError")
raises_syntaxError()
statement = "python_version == '5'"
self.assertEqual(compile(statement).__doc__, statement)
| gpl-3.0 |
legalsylvain/OpenUpgrade | openerp/addons/base/tests/test_orm.py | 37 | 16485 | from collections import defaultdict
from openerp.tools import mute_logger
from openerp.tests import common
UID = common.ADMIN_USER_ID
DB = common.DB
class TestORM(common.TransactionCase):
""" test special behaviors of ORM CRUD functions
TODO: use real Exceptions types instead of Exception """
def setUp(self):
super(TestORM, self).setUp()
cr, uid = self.cr, self.uid
self.partner = self.registry('res.partner')
self.users = self.registry('res.users')
self.p1 = self.partner.name_create(cr, uid, 'W')[0]
self.p2 = self.partner.name_create(cr, uid, 'Y')[0]
self.ir_rule = self.registry('ir.rule')
# sample unprivileged user
employee_gid = self.ref('base.group_user')
self.uid2 = self.users.create(cr, uid, {'name': 'test user', 'login': 'test', 'groups_id': [4,employee_gid]})
@mute_logger('openerp.osv.orm')
def testAccessDeletedRecords(self):
""" Verify that accessing deleted records works as expected """
cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2
self.partner.unlink(cr, uid, [p1])
# read() is expected to skip deleted records because our API is not
# transactional for a sequence of search()->read() performed from the
# client-side... a concurrent deletion could therefore cause spurious
# exceptions even when simply opening a list view!
# /!\ Using unprileged user to detect former side effects of ir.rules!
self.assertEqual([{'id': p2, 'name': 'Y'}], self.partner.read(cr, uid2, [p1,p2], ['name']), "read() should skip deleted records")
self.assertEqual([], self.partner.read(cr, uid2, [p1], ['name']), "read() should skip deleted records")
# Deleting an already deleted record should be simply ignored
self.assertTrue(self.partner.unlink(cr, uid, [p1]), "Re-deleting should be a no-op")
# Updating an already deleted record should raise, even as admin
with self.assertRaises(Exception):
self.partner.write(cr, uid, [p1], {'name': 'foo'})
@mute_logger('openerp.osv.orm')
def testAccessFilteredRecords(self):
""" Verify that accessing filtered records works as expected for non-admin user """
cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2
partner_model = self.registry('ir.model').search(cr, uid, [('model','=','res.partner')])[0]
self.ir_rule.create(cr, uid, {'name': 'Y is invisible',
'domain_force': [('id', '!=', p1)],
'model_id': partner_model})
# search as unprivileged user
partners = self.partner.search(cr, uid2, [])
self.assertFalse(p1 in partners, "W should not be visible...")
self.assertTrue(p2 in partners, "... but Y should be visible")
# read as unprivileged user
with self.assertRaises(Exception):
self.partner.read(cr, uid2, [p1], ['name'])
# write as unprivileged user
with self.assertRaises(Exception):
self.partner.write(cr, uid2, [p1], {'name': 'foo'})
# unlink as unprivileged user
with self.assertRaises(Exception):
self.partner.unlink(cr, uid2, [p1])
# Prepare mixed case
self.partner.unlink(cr, uid, [p2])
# read mixed records: some deleted and some filtered
with self.assertRaises(Exception):
self.partner.read(cr, uid2, [p1,p2], ['name'])
# delete mixed records: some deleted and some filtered
with self.assertRaises(Exception):
self.partner.unlink(cr, uid2, [p1,p2])
@mute_logger('openerp.osv.orm')
def test_search_read(self):
# simple search_read
self.partner.create(self.cr, UID, {'name': 'MyPartner1'})
found = self.partner.search_read(self.cr, UID, [['name', '=', 'MyPartner1']], ['name'])
self.assertEqual(len(found), 1)
self.assertEqual(found[0]['name'], 'MyPartner1')
self.assertTrue('id' in found[0])
# search_read correct order
self.partner.create(self.cr, UID, {'name': 'MyPartner2'})
found = self.partner.search_read(self.cr, UID, [['name', 'like', 'MyPartner']], ['name'], order="name")
self.assertEqual(len(found), 2)
self.assertEqual(found[0]['name'], 'MyPartner1')
self.assertEqual(found[1]['name'], 'MyPartner2')
found = self.partner.search_read(self.cr, UID, [['name', 'like', 'MyPartner']], ['name'], order="name desc")
self.assertEqual(len(found), 2)
self.assertEqual(found[0]['name'], 'MyPartner2')
self.assertEqual(found[1]['name'], 'MyPartner1')
# search_read that finds nothing
found = self.partner.search_read(self.cr, UID, [['name', '=', 'Does not exists']], ['name'])
self.assertEqual(len(found), 0)
def test_groupby_date(self):
partners = dict(
A='2012-11-19',
B='2012-12-17',
C='2012-12-31',
D='2013-01-07',
E='2013-01-14',
F='2013-01-28',
G='2013-02-11',
)
all_partners = []
partners_by_day = defaultdict(set)
partners_by_month = defaultdict(set)
partners_by_year = defaultdict(set)
for name, date in partners.items():
p = self.partner.create(self.cr, UID, dict(name=name, date=date))
all_partners.append(p)
partners_by_day[date].add(p)
partners_by_month[date.rsplit('-', 1)[0]].add(p)
partners_by_year[date.split('-', 1)[0]].add(p)
def read_group(interval, domain=None):
main_domain = [('id', 'in', all_partners)]
if domain:
domain = ['&'] + main_domain + domain
else:
domain = main_domain
rg = self.partner.read_group(self.cr, self.uid, domain, ['date'], 'date' + ':' + interval)
result = {}
for r in rg:
result[r['date:' + interval]] = set(self.partner.search(self.cr, self.uid, r['__domain']))
return result
self.assertEqual(len(read_group('day')), len(partners_by_day))
self.assertEqual(len(read_group('month')), len(partners_by_month))
self.assertEqual(len(read_group('year')), len(partners_by_year))
rg = self.partner.read_group(self.cr, self.uid, [('id', 'in', all_partners)],
['date'], ['date:month', 'date:day'], lazy=False)
self.assertEqual(len(rg), len(all_partners))
class TestInherits(common.TransactionCase):
""" test the behavior of the orm for models that use _inherits;
specifically: res.users, that inherits from res.partner
"""
def setUp(self):
super(TestInherits, self).setUp()
self.partner = self.registry('res.partner')
self.user = self.registry('res.users')
def test_create(self):
""" creating a user should automatically create a new partner """
partners_before = self.partner.search(self.cr, UID, [])
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
foo = self.user.browse(self.cr, UID, foo_id)
self.assertNotIn(foo.partner_id.id, partners_before)
def test_create_with_ancestor(self):
""" creating a user with a specific 'partner_id' should not create a new partner """
par_id = self.partner.create(self.cr, UID, {'name': 'Foo'})
partners_before = self.partner.search(self.cr, UID, [])
foo_id = self.user.create(self.cr, UID, {'partner_id': par_id, 'login': 'foo', 'password': 'foo'})
partners_after = self.partner.search(self.cr, UID, [])
self.assertEqual(set(partners_before), set(partners_after))
foo = self.user.browse(self.cr, UID, foo_id)
self.assertEqual(foo.name, 'Foo')
self.assertEqual(foo.partner_id.id, par_id)
@mute_logger('openerp.osv.orm')
def test_read(self):
""" inherited fields should be read without any indirection """
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
foo_values, = self.user.read(self.cr, UID, [foo_id])
partner_id = foo_values['partner_id'][0]
partner_values, = self.partner.read(self.cr, UID, [partner_id])
self.assertEqual(foo_values['name'], partner_values['name'])
foo = self.user.browse(self.cr, UID, foo_id)
self.assertEqual(foo.name, foo.partner_id.name)
@mute_logger('openerp.osv.orm')
def test_copy(self):
""" copying a user should automatically copy its partner, too """
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
foo_before, = self.user.read(self.cr, UID, [foo_id])
bar_id = self.user.copy(self.cr, UID, foo_id, {'login': 'bar', 'password': 'bar'})
foo_after, = self.user.read(self.cr, UID, [foo_id])
self.assertEqual(foo_before, foo_after)
foo, bar = self.user.browse(self.cr, UID, [foo_id, bar_id])
self.assertEqual(bar.login, 'bar')
self.assertNotEqual(foo.id, bar.id)
self.assertNotEqual(foo.partner_id.id, bar.partner_id.id)
@mute_logger('openerp.osv.orm')
def test_copy_with_ancestor(self):
""" copying a user with 'parent_id' in defaults should not duplicate the partner """
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
par_id = self.partner.create(self.cr, UID, {'name': 'Bar'})
foo_before, = self.user.read(self.cr, UID, [foo_id])
partners_before = self.partner.search(self.cr, UID, [])
bar_id = self.user.copy(self.cr, UID, foo_id, {'partner_id': par_id, 'login': 'bar'})
foo_after, = self.user.read(self.cr, UID, [foo_id])
partners_after = self.partner.search(self.cr, UID, [])
self.assertEqual(foo_before, foo_after)
self.assertEqual(set(partners_before), set(partners_after))
foo, bar = self.user.browse(self.cr, UID, [foo_id, bar_id])
self.assertNotEqual(foo.id, bar.id)
self.assertEqual(bar.partner_id.id, par_id)
self.assertEqual(bar.login, 'bar', "login is given from copy parameters")
self.assertEqual(bar.password, foo.password, "password is given from original record")
self.assertEqual(bar.name, 'Bar', "name is given from specific partner")
CREATE = lambda values: (0, False, values)
UPDATE = lambda id, values: (1, id, values)
DELETE = lambda id: (2, id, False)
FORGET = lambda id: (3, id, False)
LINK_TO = lambda id: (4, id, False)
DELETE_ALL = lambda: (5, False, False)
REPLACE_WITH = lambda ids: (6, False, ids)
def sorted_by_id(list_of_dicts):
"sort dictionaries by their 'id' field; useful for comparisons"
return sorted(list_of_dicts, key=lambda d: d.get('id'))
class TestO2MSerialization(common.TransactionCase):
""" test the orm method 'write' on one2many fields """
def setUp(self):
super(TestO2MSerialization, self).setUp()
self.partner = self.registry('res.partner')
def test_no_command(self):
" empty list of commands yields an empty list of records "
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [])
self.assertEqual(results, [])
def test_CREATE_commands(self):
" returns the VALUES dict as-is "
values = [{'foo': 'bar'}, {'foo': 'baz'}, {'foo': 'baq'}]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', map(CREATE, values))
self.assertEqual(results, values)
def test_LINK_TO_command(self):
" reads the records from the database, records are returned with their ids. "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = map(LINK_TO, ids)
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'},
{'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_bare_ids_command(self):
" same as the equivalent LINK_TO commands "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', ids, ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'},
{'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_UPDATE_command(self):
" take the in-db records and merge the provided information in "
id_foo = self.partner.create(self.cr, UID, {'name': 'foo'})
id_bar = self.partner.create(self.cr, UID, {'name': 'bar'})
id_baz = self.partner.create(self.cr, UID, {'name': 'baz', 'city': 'tag'})
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [
LINK_TO(id_foo),
UPDATE(id_bar, {'name': 'qux', 'city': 'tagtag'}),
UPDATE(id_baz, {'name': 'quux'})
], ['name', 'city'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': id_foo, 'name': 'foo', 'city': False},
{'id': id_bar, 'name': 'qux', 'city': 'tagtag'},
{'id': id_baz, 'name': 'quux', 'city': 'tag'}
]))
def test_DELETE_command(self):
" deleted records are not returned at all. "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = [DELETE(ids[0]), DELETE(ids[1]), DELETE(ids[2])]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
self.assertEqual(results, [])
def test_mixed_commands(self):
ids = [
self.partner.create(self.cr, UID, {'name': name})
for name in ['NObar', 'baz', 'qux', 'NOquux', 'NOcorge', 'garply']
]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [
CREATE({'name': 'foo'}),
UPDATE(ids[0], {'name': 'bar'}),
LINK_TO(ids[1]),
DELETE(ids[2]),
UPDATE(ids[3], {'name': 'quux',}),
UPDATE(ids[4], {'name': 'corge'}),
CREATE({'name': 'grault'}),
LINK_TO(ids[5])
], ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'name': 'foo'},
{'id': ids[0], 'name': 'bar'},
{'id': ids[1], 'name': 'baz'},
{'id': ids[3], 'name': 'quux'},
{'id': ids[4], 'name': 'corge'},
{'name': 'grault'},
{'id': ids[5], 'name': 'garply'}
]))
def test_LINK_TO_pairs(self):
"LINK_TO commands can be written as pairs, instead of triplets"
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = map(lambda id: (4, id), ids)
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'},
{'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_singleton_commands(self):
"DELETE_ALL can appear as a singleton"
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [DELETE_ALL()], ['name'])
self.assertEqual(results, [])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hahalml/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Platform/__init__.py | 61 | 9379 | """SCons.Platform
SCons platform selection.
This looks for modules that define a callable object that can modify a
construction environment as appropriate for a given platform.
Note that we take a more simplistic view of "platform" than Python does.
We're looking for a single string that determines a set of
tool-independent variables with which to initialize a construction
environment. Consequently, we'll examine both sys.platform and os.name
(and anything else that might come in to play) in order to return some
specification which is unique enough for our purposes.
Note that because this subsysem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "platform specification" in an arbitrary callable function.
No one needs to use or tie in to this subsystem in order to roll
their own platform definition.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/__init__.py 5134 2010/08/16 23:02:40 bdeegan"
import SCons.compat
import imp
import os
import sys
import tempfile
import SCons.Errors
import SCons.Subst
import SCons.Tool
def platform_default():
"""Return the platform string for our execution environment.
The returned value should map to one of the SCons/Platform/*.py
files. Since we're architecture independent, though, we don't
care about the machine architecture.
"""
osname = os.name
if osname == 'java':
osname = os._osType
if osname == 'posix':
if sys.platform == 'cygwin':
return 'cygwin'
elif sys.platform.find('irix') != -1:
return 'irix'
elif sys.platform.find('sunos') != -1:
return 'sunos'
elif sys.platform.find('hp-ux') != -1:
return 'hpux'
elif sys.platform.find('aix') != -1:
return 'aix'
elif sys.platform.find('darwin') != -1:
return 'darwin'
else:
return 'posix'
elif os.name == 'os2':
return 'os2'
else:
return sys.platform
def platform_module(name = platform_default()):
"""Return the imported module for the platform.
This looks for a module name that matches the specified argument.
If the name is unspecified, we fetch the appropriate default for
our execution environment.
"""
full_name = 'SCons.Platform.' + name
if full_name not in sys.modules:
if os.name == 'java':
eval(full_name)
else:
try:
file, path, desc = imp.find_module(name,
sys.modules['SCons.Platform'].__path__)
try:
mod = imp.load_module(full_name, file, path, desc)
finally:
if file:
file.close()
except ImportError:
try:
import zipimport
importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] )
mod = importer.load_module(full_name)
except ImportError:
raise SCons.Errors.UserError("No platform named '%s'" % name)
setattr(SCons.Platform, name, mod)
return sys.modules[full_name]
def DefaultToolList(platform, env):
"""Select a default tool list for the specified platform.
"""
return SCons.Tool.tool_list(platform, env)
class PlatformSpec(object):
def __init__(self, name, generate):
self.name = name
self.generate = generate
def __call__(self, *args, **kw):
return self.generate(*args, **kw)
def __str__(self):
return self.name
class TempFileMunge(object):
"""A callable class. You can set an Environment variable to this,
then call it with a string argument, then it will perform temporary
file substitution on it. This is used to circumvent the long command
line limitation.
Example usage:
env["TEMPFILE"] = TempFileMunge
env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}"
By default, the name of the temporary file used begins with a
prefix of '@'. This may be configred for other tool chains by
setting '$TEMPFILEPREFIX'.
env["TEMPFILEPREFIX"] = '-@' # diab compiler
env["TEMPFILEPREFIX"] = '-via' # arm tool chain
"""
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, target, source, env, for_signature):
if for_signature:
# If we're being called for signature calculation, it's
# because we're being called by the string expansion in
# Subst.py, which has the logic to strip any $( $) that
# may be in the command line we squirreled away. So we
# just return the raw command line and let the upper
# string substitution layers do their thing.
return self.cmd
# Now we're actually being called because someone is actually
# going to try to execute the command, so we have to do our
# own expansion.
cmd = env.subst_list(self.cmd, SCons.Subst.SUBST_CMD, target, source)[0]
try:
maxline = int(env.subst('$MAXLINELENGTH'))
except ValueError:
maxline = 2048
length = 0
for c in cmd:
length += len(c)
if length <= maxline:
return self.cmd
# We do a normpath because mktemp() has what appears to be
# a bug in Windows that will use a forward slash as a path
# delimiter. Windows's link mistakes that for a command line
# switch and barfs.
#
# We use the .lnk suffix for the benefit of the Phar Lap
# linkloc linker, which likes to append an .lnk suffix if
# none is given.
(fd, tmp) = tempfile.mkstemp('.lnk', text=True)
native_tmp = SCons.Util.get_native_path(os.path.normpath(tmp))
if env['SHELL'] and env['SHELL'] == 'sh':
# The sh shell will try to escape the backslashes in the
# path, so unescape them.
native_tmp = native_tmp.replace('\\', r'\\\\')
# In Cygwin, we want to use rm to delete the temporary
# file, because del does not exist in the sh shell.
rm = env.Detect('rm') or 'del'
else:
# Don't use 'rm' if the shell is not sh, because rm won't
# work with the Windows shells (cmd.exe or command.com) or
# Windows path names.
rm = 'del'
prefix = env.subst('$TEMPFILEPREFIX')
if not prefix:
prefix = '@'
args = list(map(SCons.Subst.quote_spaces, cmd[1:]))
os.write(fd, " ".join(args) + "\n")
os.close(fd)
# XXX Using the SCons.Action.print_actions value directly
# like this is bogus, but expedient. This class should
# really be rewritten as an Action that defines the
# __call__() and strfunction() methods and lets the
# normal action-execution logic handle whether or not to
# print/execute the action. The problem, though, is all
# of that is decided before we execute this method as
# part of expanding the $TEMPFILE construction variable.
# Consequently, refactoring this will have to wait until
# we get more flexible with allowing Actions to exist
# independently and get strung together arbitrarily like
# Ant tasks. In the meantime, it's going to be more
# user-friendly to not let obsession with architectural
# purity get in the way of just being helpful, so we'll
# reach into SCons.Action directly.
if SCons.Action.print_actions:
print("Using tempfile "+native_tmp+" for command line:\n"+
str(cmd[0]) + " " + " ".join(args))
return [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ]
def Platform(name = platform_default()):
"""Select a canned Platform specification.
"""
module = platform_module(name)
spec = PlatformSpec(name, module.generate)
return spec
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
denisff/python-for-android | python3-alpha/python3-src/Tools/scripts/untabify.py | 49 | 1298 | #! /usr/bin/env python3
"Replace tabs with spaces in argument files. Print names of changed files."
import os
import sys
import getopt
import tokenize
def main():
tabsize = 8
try:
opts, args = getopt.getopt(sys.argv[1:], "t:")
if not args:
raise getopt.error("At least one file argument required")
except getopt.error as msg:
print(msg)
print("usage:", sys.argv[0], "[-t tabwidth] file ...")
return
for optname, optvalue in opts:
if optname == '-t':
tabsize = int(optvalue)
for filename in args:
process(filename, tabsize)
def process(filename, tabsize, verbose=True):
try:
with tokenize.open(filename) as f:
text = f.read()
encoding = f.encoding
except IOError as msg:
print("%r: I/O error: %s" % (filename, msg))
return
newtext = text.expandtabs(tabsize)
if newtext == text:
return
backup = filename + "~"
try:
os.unlink(backup)
except os.error:
pass
try:
os.rename(filename, backup)
except os.error:
pass
with open(filename, "w", encoding=encoding) as f:
f.write(newtext)
if verbose:
print(filename)
if __name__ == '__main__':
main()
| apache-2.0 |
PythonNut/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/filters/inject_meta_charset.py | 1730 | 2746 | from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def __init__(self, source, encoding):
_base.Filter.__init__(self, source)
self.encoding = encoding
def __iter__(self):
state = "pre_head"
meta_found = (self.encoding is None)
pending = []
for token in _base.Filter.__iter__(self):
type = token["type"]
if type == "StartTag":
if token["name"].lower() == "head":
state = "in_head"
elif type == "EmptyTag":
if token["name"].lower() == "meta":
# replace charset with actual encoding
has_http_equiv_content_type = False
for (namespace, name), value in token["data"].items():
if namespace is not None:
continue
elif name.lower() == 'charset':
token["data"][(namespace, name)] = self.encoding
meta_found = True
break
elif name == 'http-equiv' and value.lower() == 'content-type':
has_http_equiv_content_type = True
else:
if has_http_equiv_content_type and (None, "content") in token["data"]:
token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
meta_found = True
elif token["name"].lower() == "head" and not meta_found:
# insert meta into empty head
yield {"type": "StartTag", "name": "head",
"data": token["data"]}
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
yield {"type": "EndTag", "name": "head"}
meta_found = True
continue
elif type == "EndTag":
if token["name"].lower() == "head" and pending:
# insert meta into head (if necessary) and flush pending queue
yield pending.pop(0)
if not meta_found:
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
while pending:
yield pending.pop(0)
meta_found = True
state = "post_head"
if state == "in_head":
pending.append(token)
else:
yield token
| mpl-2.0 |
franklai/lyric-get | lyric_engine/modules/evesta.py | 1 | 2169 | import logging
from utils import common
from utils.lyric_base import LyricBase
site_class = 'Evesta'
site_index = 'evesta'
site_keyword = 'evesta'
site_url = 'http://lyric.evesta.jp'
test_url = 'http://lyric.evesta.jp/l7bb423.html'
class Evesta(LyricBase):
def parse_page(self):
url = self.url
html = self.get_html(url)
if not html:
logging.info('Failed to get html of url [%s]', url)
return False
if not self.parse_lyric(html):
logging.info('Failed to get lyric of url [%s]', url)
return False
if not self.parse_song_info(html):
logging.info('Failed to get song info of url [%s]', url)
return True
def get_html(self, url):
html = common.get_url_content(url)
if not html:
return False
return html
def parse_lyric(self, html):
html = html.replace("\r\n", "")
prefix = '<div id="lyricbody">'
suffix = "</div>"
lyric = common.find_string_by_prefix_suffix(
html, prefix, suffix, False)
if not lyric:
logging.info("Failed to parse lyric from html [%s]", html)
return False
lyric = lyric.replace("<br>", "\n")
lyric = lyric.strip()
lyric = common.unicode2string(lyric)
lyric = common.half2full(lyric)
self.lyric = lyric
return True
def parse_song_info(self, html):
prefix = '<div id="lyrictitle">'
suffix = "</div>"
block = common.find_string_by_prefix_suffix(
html, prefix, suffix, False)
patterns = {
"title": r"<h1>(.*?) 歌詞</h1>",
"artist": r">歌:(.*?)</p>",
"lyricist": r">作詞:(.*?)</p>",
"composer": r">作曲:(.*?)</p>"
}
self.set_attr(patterns, block)
return True
def get_lyric(url):
obj = Evesta(url)
return obj.get()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
url = test_url
full = get_lyric(url)
if not full:
print('Failed to get lyric')
exit()
print(full)
| mit |
fdupoux/ansible | contrib/inventory/apache-libcloud.py | 151 | 11756 | #!/usr/bin/env python
# (c) 2013, Sebastien Goasguen <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
'''
Apache Libcloud generic external inventory script
=================================
Generates inventory that Ansible can understand by making API request to
Cloud providers using the Apache libcloud library.
This script also assumes there is a libcloud.ini file alongside it
'''
import sys
import os
import argparse
import re
from time import time
import ConfigParser
from six import iteritems, string_types
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security as sec
try:
import json
except ImportError:
import simplejson as json
class LibcloudInventory(object):
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = {}
# Index of hostname (address) to instance ID
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if len(self.inventory) == 0:
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the libcloud.ini file '''
config = ConfigParser.SafeConfigParser()
libcloud_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libcloud.ini')
libcloud_ini_path = os.environ.get('LIBCLOUD_INI_PATH', libcloud_default_ini_path)
config.read(libcloud_ini_path)
if not config.has_section('driver'):
raise ValueError('libcloud.ini file must contain a [driver] section')
if config.has_option('driver', 'provider'):
self.provider = config.get('driver','provider')
else:
raise ValueError('libcloud.ini does not have a provider defined')
if config.has_option('driver', 'key'):
self.key = config.get('driver','key')
else:
raise ValueError('libcloud.ini does not have a key defined')
if config.has_option('driver', 'secret'):
self.secret = config.get('driver','secret')
else:
raise ValueError('libcloud.ini does not have a secret defined')
if config.has_option('driver', 'host'):
self.host = config.get('driver', 'host')
if config.has_option('driver', 'secure'):
self.secure = config.get('driver', 'secure')
if config.has_option('driver', 'verify_ssl_cert'):
self.verify_ssl_cert = config.get('driver', 'verify_ssl_cert')
if config.has_option('driver', 'port'):
self.port = config.get('driver', 'port')
if config.has_option('driver', 'path'):
self.path = config.get('driver', 'path')
if config.has_option('driver', 'api_version'):
self.api_version = config.get('driver', 'api_version')
Driver = get_driver(getattr(Provider, self.provider))
self.conn = Driver(key=self.key, secret=self.secret, secure=self.secure,
host=self.host, path=self.path)
# Cache related
cache_path = config.get('cache', 'cache_path')
self.cache_path_cache = cache_path + "/ansible-libcloud.cache"
self.cache_path_index = cache_path + "/ansible-libcloud.index"
self.cache_max_age = config.getint('cache', 'cache_max_age')
def parse_cli_args(self):
'''
Command line argument processing
'''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on libcloud supported providers')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
'''
Do API calls to a location, and save data in cache files
'''
self.get_nodes()
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def get_nodes(self):
'''
Gets the list of all nodes
'''
for node in self.conn.list_nodes():
self.add_node(node)
def get_node(self, node_id):
'''
Gets details about a specific node
'''
return [node for node in self.conn.list_nodes() if node.id == node_id][0]
def add_node(self, node):
'''
Adds a node to the inventory and index, as long as it is
addressable
'''
# Only want running instances
if node.state != 0:
return
# Select the best destination address
if not node.public_ips == []:
dest = node.public_ips[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = node.name
# Inventory: Group by instance ID (always a group of 1)
self.inventory[node.name] = [dest]
'''
# Inventory: Group by region
self.push(self.inventory, region, dest)
# Inventory: Group by availability zone
self.push(self.inventory, node.placement, dest)
# Inventory: Group by instance type
self.push(self.inventory, self.to_safe('type_' + node.instance_type), dest)
'''
# Inventory: Group by key pair
if node.extra['key_name']:
self.push(self.inventory, self.to_safe('key_' + node.extra['key_name']), dest)
# Inventory: Group by security group, quick thing to handle single sg
if node.extra['security_group']:
self.push(self.inventory, self.to_safe('sg_' + node.extra['security_group'][0]), dest)
# Inventory: Group by tag
if node.extra['tags']:
for tagkey in node.extra['tags'].keys():
self.push(self.inventory, self.to_safe('tag_' + tagkey + '_' + node.extra['tags'][tagkey]), dest)
def get_host_info(self):
'''
Get variables about a specific host
'''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
# host migh not exist anymore
return self.json_format_dict({}, True)
node_id = self.index[self.args.host]
node = self.get_node(node_id)
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
if isinstance(value, (int, bool)):
instance_vars[key] = value
elif isinstance(value, string_types):
instance_vars[key] = value.strip()
elif value is None:
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2_tags':
for k, v in iteritems(value):
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join(group_ids)
instance_vars["ec2_security_group_names"] = ','.join(group_names)
else:
pass
# TODO Product codes if someone finds them useful
#print(key)
#print(type(value))
#print(value)
return self.json_format_dict(instance_vars, True)
def push(self, my_dict, key, element):
'''
Pushed an element onto an array that may not have been defined in
the dict
'''
if key in my_dict:
my_dict[key].append(element);
else:
my_dict[key] = [element]
def get_inventory_from_cache(self):
'''
Reads the inventory from the cache file and returns it as a JSON
object
'''
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
'''
Reads the index from the cache file sets self.index
'''
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
'''
Writes data in JSON format to a file
'''
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
'''
Converts 'bad' characters in a string to underscores so they can be
used as Ansible groups
'''
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
'''
Converts a dict to a JSON object and dumps it as a formatted
string
'''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
def main():
LibcloudInventory()
if __name__ == '__main__':
main()
| gpl-3.0 |
pyfa-org/eos | tests/integration/source_switch/source_switch/test_side_effect.py | 1 | 5038 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Booster
from eos import Fit
from eos.const.eve import EffectCategoryId
from tests.integration.source_switch.testcase import SourceSwitchTestCase
class TestSourceSwitchSideEffect(SourceSwitchTestCase):
def test_persistence(self):
# Here we check that when item type doesn't have effect which was
# disabled anymore, everything runs as expected, and when this effect
# appears again - it's disabled
# Setup
chance_attr1_id = self.allocate_attr_id('src1', 'src2')
self.mkattr(src='src1', attr_id=chance_attr1_id)
self.mkattr(src='src2', attr_id=chance_attr1_id)
chance_attr2 = self.mkattr(src='src1')
chance_attr3 = self.mkattr(src='src1')
# 1st effect exists as side-effect in both sources
effect1_id = self.allocate_effect_id('src1', 'src2')
effect1_src1 = self.mkeffect(
src='src1',
effect_id=effect1_id,
category_id=EffectCategoryId.passive,
fitting_usage_chance_attr_id=chance_attr1_id)
effect1_src2 = self.mkeffect(
src='src2',
effect_id=effect1_id,
category_id=EffectCategoryId.passive,
fitting_usage_chance_attr_id=chance_attr1_id)
# 2nd effect exists as side-effect in src1, and as regular effect in
# src2
effect2_id = self.allocate_effect_id('src1', 'src2')
effect2_src1 = self.mkeffect(
src='src1',
effect_id=effect2_id,
category_id=EffectCategoryId.passive,
fitting_usage_chance_attr_id=chance_attr2.id)
effect2_src2 = self.mkeffect(
src='src2',
effect_id=effect2_id,
category_id=EffectCategoryId.passive)
# 3rd effect exists as side-effect in src1 and doesn't exist in src2 at
# all
effect3_id = self.allocate_effect_id('src1', 'src2')
effect3_src1 = self.mkeffect(
src='src1',
effect_id=effect3_id,
category_id=EffectCategoryId.passive,
fitting_usage_chance_attr_id=chance_attr3.id)
item_type_id = self.allocate_type_id('src1', 'src2')
self.mktype(
src='src1',
type_id=item_type_id,
attrs={
chance_attr1_id: 0.2,
chance_attr2.id: 0.3,
chance_attr3.id: 0.4},
effects=(effect1_src1, effect2_src1, effect3_src1))
self.mktype(
src='src2',
type_id=item_type_id,
attrs={chance_attr1_id: 0.7},
effects=(effect1_src2, effect2_src2))
fit = Fit()
item = Booster(item_type_id)
fit.boosters.add(item)
item.set_side_effect_status(effect1_id, True)
item.set_side_effect_status(effect2_id, True)
item.set_side_effect_status(effect3_id, True)
# Action
fit.solar_system.source = 'src2'
# Verification
side_effects = item.side_effects
self.assertEqual(len(side_effects), 1)
self.assertIn(effect1_id, side_effects)
side_effect1 = side_effects[effect1_id]
self.assertAlmostEqual(side_effect1.chance, 0.7)
self.assertIs(side_effect1.status, True)
# Action
fit.solar_system.source = 'src1'
# Verification
side_effects = item.side_effects
self.assertEqual(len(side_effects), 3)
self.assertIn(effect1_id, side_effects)
side_effect1 = side_effects[effect1_id]
self.assertAlmostEqual(side_effect1.chance, 0.2)
self.assertIs(side_effect1.status, True)
self.assertIn(effect2_id, side_effects)
side_effect2 = side_effects[effect2_id]
self.assertAlmostEqual(side_effect2.chance, 0.3)
self.assertIs(side_effect2.status, True)
self.assertIn(effect3_id, side_effects)
side_effect3 = side_effects[effect3_id]
self.assertAlmostEqual(side_effect3.chance, 0.4)
self.assertIs(side_effect3.status, True)
# Cleanup
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 |
x303597316/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/gis/maps/google/gmap.py | 174 | 9102 | from django.conf import settings
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.six.moves import xrange
from django.contrib.gis.maps.google.overlays import GPolygon, GPolyline, GMarker
class GoogleMapException(Exception):
pass
# The default Google Maps URL (for the API javascript)
# TODO: Internationalize for Japan, UK, etc.
GOOGLE_MAPS_URL='http://maps.google.com/maps?file=api&v=%s&key='
class GoogleMap(object):
"A class for generating Google Maps JavaScript."
# String constants
onunload = mark_safe('onunload="GUnload()"') # Cleans up after Google Maps
vml_css = mark_safe('v\:* {behavior:url(#default#VML);}') # CSS for IE VML
xmlns = mark_safe('xmlns:v="urn:schemas-microsoft-com:vml"') # XML Namespace (for IE VML).
def __init__(self, key=None, api_url=None, version=None,
center=None, zoom=None, dom_id='map',
kml_urls=[], polylines=None, polygons=None, markers=None,
template='gis/google/google-map.js',
js_module='geodjango',
extra_context={}):
# The Google Maps API Key defined in the settings will be used
# if not passed in as a parameter. The use of an API key is
# _required_.
if not key:
try:
self.key = settings.GOOGLE_MAPS_API_KEY
except AttributeError:
raise GoogleMapException('Google Maps API Key not found (try adding GOOGLE_MAPS_API_KEY to your settings).')
else:
self.key = key
# Getting the Google Maps API version, defaults to using the latest ("2.x"),
# this is not necessarily the most stable.
if not version:
self.version = getattr(settings, 'GOOGLE_MAPS_API_VERSION', '2.x')
else:
self.version = version
# Can specify the API URL in the `api_url` keyword.
if not api_url:
self.api_url = getattr(settings, 'GOOGLE_MAPS_URL', GOOGLE_MAPS_URL) % self.version
else:
self.api_url = api_url
# Setting the DOM id of the map, the load function, the JavaScript
# template, and the KML URLs array.
self.dom_id = dom_id
self.extra_context = extra_context
self.js_module = js_module
self.template = template
self.kml_urls = kml_urls
# Does the user want any GMarker, GPolygon, and/or GPolyline overlays?
overlay_info = [[GMarker, markers, 'markers'],
[GPolygon, polygons, 'polygons'],
[GPolyline, polylines, 'polylines']]
for overlay_class, overlay_list, varname in overlay_info:
setattr(self, varname, [])
if overlay_list:
for overlay in overlay_list:
if isinstance(overlay, overlay_class):
getattr(self, varname).append(overlay)
else:
getattr(self, varname).append(overlay_class(overlay))
# If GMarker, GPolygons, and/or GPolylines are used the zoom will be
# automatically calculated via the Google Maps API. If both a zoom
# level and a center coordinate are provided with polygons/polylines,
# no automatic determination will occur.
self.calc_zoom = False
if self.polygons or self.polylines or self.markers:
if center is None or zoom is None:
self.calc_zoom = True
# Defaults for the zoom level and center coordinates if the zoom
# is not automatically calculated.
if zoom is None: zoom = 4
self.zoom = zoom
if center is None: center = (0, 0)
self.center = center
def render(self):
"""
Generates the JavaScript necessary for displaying this Google Map.
"""
params = {'calc_zoom' : self.calc_zoom,
'center' : self.center,
'dom_id' : self.dom_id,
'js_module' : self.js_module,
'kml_urls' : self.kml_urls,
'zoom' : self.zoom,
'polygons' : self.polygons,
'polylines' : self.polylines,
'icons': self.icons,
'markers' : self.markers,
}
params.update(self.extra_context)
return render_to_string(self.template, params)
@property
def body(self):
"Returns HTML body tag for loading and unloading Google Maps javascript."
return format_html('<body {0} {1}>', self.onload, self.onunload)
@property
def onload(self):
"Returns the `onload` HTML <body> attribute."
return format_html('onload="{0}.{1}_load()"', self.js_module, self.dom_id)
@property
def api_script(self):
"Returns the <script> tag for the Google Maps API javascript."
return format_html('<script src="{0}{1}" type="text/javascript"></script>',
self.api_url, self.key)
@property
def js(self):
"Returns only the generated Google Maps JavaScript (no <script> tags)."
return self.render()
@property
def scripts(self):
"Returns all <script></script> tags required with Google Maps JavaScript."
return format_html('{0}\n <script type="text/javascript">\n//<![CDATA[\n{1}//]]>\n </script>',
self.api_script, mark_safe(self.js))
@property
def style(self):
"Returns additional CSS styling needed for Google Maps on IE."
return format_html('<style type="text/css">{0}</style>', self.vml_css)
@property
def xhtml(self):
"Returns XHTML information needed for IE VML overlays."
return format_html('<html xmlns="http://www.w3.org/1999/xhtml" {0}>', self.xmlns)
@property
def icons(self):
"Returns a sequence of GIcon objects in this map."
return set([marker.icon for marker in self.markers if marker.icon])
class GoogleMapSet(GoogleMap):
def __init__(self, *args, **kwargs):
"""
A class for generating sets of Google Maps that will be shown on the
same page together.
Example:
gmapset = GoogleMapSet( GoogleMap( ... ), GoogleMap( ... ) )
gmapset = GoogleMapSet( [ gmap1, gmap2] )
"""
# The `google-multi.js` template is used instead of `google-single.js`
# by default.
template = kwargs.pop('template', 'gis/google/google-multi.js')
# This is the template used to generate the GMap load JavaScript for
# each map in the set.
self.map_template = kwargs.pop('map_template', 'gis/google/google-single.js')
# Running GoogleMap.__init__(), and resetting the template
# value with default obtained above.
super(GoogleMapSet, self).__init__(**kwargs)
self.template = template
# If a tuple/list passed in as first element of args, then assume
if isinstance(args[0], (tuple, list)):
self.maps = args[0]
else:
self.maps = args
# Generating DOM ids for each of the maps in the set.
self.dom_ids = ['map%d' % i for i in xrange(len(self.maps))]
def load_map_js(self):
"""
Returns JavaScript containing all of the loading routines for each
map in this set.
"""
result = []
for dom_id, gmap in zip(self.dom_ids, self.maps):
# Backup copies the GoogleMap DOM id and template attributes.
# They are overridden on each GoogleMap instance in the set so
# that only the loading JavaScript (and not the header variables)
# is used with the generated DOM ids.
tmp = (gmap.template, gmap.dom_id)
gmap.template = self.map_template
gmap.dom_id = dom_id
result.append(gmap.js)
# Restoring the backup values.
gmap.template, gmap.dom_id = tmp
return mark_safe(''.join(result))
def render(self):
"""
Generates the JavaScript for the collection of Google Maps in
this set.
"""
params = {'js_module' : self.js_module,
'dom_ids' : self.dom_ids,
'load_map_js' : self.load_map_js(),
'icons' : self.icons,
}
params.update(self.extra_context)
return render_to_string(self.template, params)
@property
def onload(self):
"Returns the `onload` HTML <body> attribute."
# Overloaded to use the `load` function defined in the
# `google-multi.js`, which calls the load routines for
# each one of the individual maps in the set.
return mark_safe('onload="%s.load()"' % self.js_module)
@property
def icons(self):
"Returns a sequence of all icons in each map of the set."
icons = set()
for map in self.maps: icons |= map.icons
return icons
| apache-2.0 |
bennojoy/ansible | test/units/playbook/test_playbook.py | 17 | 2231 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook import Playbook
from ansible.vars import VariableManager
from units.mock.loader import DictDataLoader
class TestPlaybook(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_playbook(self):
fake_loader = DictDataLoader({})
p = Playbook(loader=fake_loader)
def test_basic_playbook(self):
fake_loader = DictDataLoader({
"test_file.yml":"""
- hosts: all
""",
})
p = Playbook.load("test_file.yml", loader=fake_loader)
plays = p.get_plays()
def test_bad_playbook_files(self):
fake_loader = DictDataLoader({
# represents a playbook which is not a list of plays
"bad_list.yml": """
foo: bar
""",
# represents a playbook where a play entry is mis-formatted
"bad_entry.yml": """
-
- "This should be a mapping..."
""",
})
vm = VariableManager()
self.assertRaises(AnsibleParserError, Playbook.load, "bad_list.yml", vm, fake_loader)
self.assertRaises(AnsibleParserError, Playbook.load, "bad_entry.yml", vm, fake_loader)
| gpl-3.0 |
mshafiq9/django | tests/utils_tests/test_feedgenerator.py | 163 | 4306 | from __future__ import unicode_literals
import datetime
import unittest
from django.utils import feedgenerator
from django.utils.timezone import get_fixed_timezone
class FeedgeneratorTest(unittest.TestCase):
"""
Tests for the low-level syndication feed framework.
"""
def test_get_tag_uri(self):
"""
Test get_tag_uri() correctly generates TagURIs.
"""
self.assertEqual(
feedgenerator.get_tag_uri('http://example.org/foo/bar#headline', datetime.date(2004, 10, 25)),
'tag:example.org,2004-10-25:/foo/bar/headline')
def test_get_tag_uri_with_port(self):
"""
Test that get_tag_uri() correctly generates TagURIs from URLs with port
numbers.
"""
self.assertEqual(
feedgenerator.get_tag_uri('http://www.example.org:8000/2008/11/14/django#headline', datetime.datetime(2008, 11, 14, 13, 37, 0)),
'tag:www.example.org,2008-11-14:/2008/11/14/django/headline')
def test_rfc2822_date(self):
"""
Test rfc2822_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"Fri, 14 Nov 2008 13:37:00 -0000"
)
def test_rfc2822_date_with_timezone(self):
"""
Test rfc2822_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(60))),
"Fri, 14 Nov 2008 13:37:00 +0100"
)
def test_rfc2822_date_without_time(self):
"""
Test rfc2822_date() correctly formats date objects.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.date(2008, 11, 14)),
"Fri, 14 Nov 2008 00:00:00 -0000"
)
def test_rfc3339_date(self):
"""
Test rfc3339_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"2008-11-14T13:37:00Z"
)
def test_rfc3339_date_with_timezone(self):
"""
Test rfc3339_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(120))),
"2008-11-14T13:37:00+02:00"
)
def test_rfc3339_date_without_time(self):
"""
Test rfc3339_date() correctly formats date objects.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.date(2008, 11, 14)),
"2008-11-14T00:00:00Z"
)
def test_atom1_mime_type(self):
"""
Test to make sure Atom MIME type has UTF8 Charset parameter set
"""
atom_feed = feedgenerator.Atom1Feed("title", "link", "description")
self.assertEqual(
atom_feed.content_type, "application/atom+xml; charset=utf-8"
)
def test_rss_mime_type(self):
"""
Test to make sure RSS MIME type has UTF8 Charset parameter set
"""
rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description")
self.assertEqual(
rss_feed.content_type, "application/rss+xml; charset=utf-8"
)
# Two regression tests for #14202
def test_feed_without_feed_url_gets_rendered_without_atom_link(self):
feed = feedgenerator.Rss201rev2Feed('title', '/link/', 'descr')
self.assertEqual(feed.feed['feed_url'], None)
feed_content = feed.writeString('utf-8')
self.assertNotIn('<atom:link', feed_content)
self.assertNotIn('href="/feed/"', feed_content)
self.assertNotIn('rel="self"', feed_content)
def test_feed_with_feed_url_gets_rendered_with_atom_link(self):
feed = feedgenerator.Rss201rev2Feed('title', '/link/', 'descr', feed_url='/feed/')
self.assertEqual(feed.feed['feed_url'], '/feed/')
feed_content = feed.writeString('utf-8')
self.assertIn('<atom:link', feed_content)
self.assertIn('href="/feed/"', feed_content)
self.assertIn('rel="self"', feed_content)
| bsd-3-clause |
kyvinh/home-assistant | homeassistant/components/binary_sensor/modbus.py | 28 | 1774 | """
Support for Modbus Coil sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.modbus/
"""
import logging
import voluptuous as vol
import homeassistant.components.modbus as modbus
from homeassistant.const import CONF_NAME
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.helpers import config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['modbus']
CONF_COIL = "coil"
CONF_COILS = "coils"
CONF_SLAVE = "slave"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COILS): [{
vol.Required(CONF_COIL): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_SLAVE): cv.positive_int
}]
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup Modbus binary sensors."""
sensors = []
for coil in config.get(CONF_COILS):
sensors.append(ModbusCoilSensor(
coil.get(CONF_NAME),
coil.get(CONF_SLAVE),
coil.get(CONF_COIL)))
add_devices(sensors)
class ModbusCoilSensor(BinarySensorDevice):
"""Modbus coil sensor."""
def __init__(self, name, slave, coil):
"""Initialize the modbus coil sensor."""
self._name = name
self._slave = int(slave) if slave else None
self._coil = int(coil)
self._value = None
@property
def is_on(self):
"""Return the state of the sensor."""
return self._value
def update(self):
"""Update the state of the sensor."""
result = modbus.HUB.read_coils(self._slave, self._coil, 1)
self._value = result.bits[0]
| apache-2.0 |
astrocatalogs/supernovae | tasks/ptf.py | 1 | 3899 | """Import tasks for the Palomar Transient Factory (PTF).
"""
import os
from astrocats.catalog.utils import is_number, pbar
from bs4 import BeautifulSoup
from ..supernova import SUPERNOVA
def do_ptf(catalog):
# response =
# urllib.request.urlopen('http://wiserep.weizmann.ac.il/objects/list')
# bs = BeautifulSoup(response, 'html5lib')
# select = bs.find('select', {'name': 'objid'})
# options = select.findAll('option')
# for option in options:
# print(option.text)
# name = option.text
# if ((name.startswith('PTF') and is_number(name[3:5])) or
# name.startswith('PTFS') or name.startswith('iPTF')):
# name = catalog.add_entry(name)
task_str = catalog.get_current_task_str()
html = catalog.load_url('http://wiserep1.weizmann.ac.il/spectra/update',
os.path.join(catalog.get_current_task_repo(),
'PTF/update.html'))
bs = BeautifulSoup(html, 'html5lib')
select = bs.find('select', {'name': 'objid'})
options = select.findAll('option')
for option in pbar(options, task_str):
name = option.text
if (((name.startswith('PTF') and is_number(name[3:5])) or
name.startswith('PTFS') or name.startswith('iPTF'))):
if '(' in name:
alias = name.split('(')[0].strip(' ')
name = name.split('(')[-1].strip(') ').replace('sn', 'SN')
if name == 'SNiauname': # A misentered entry
continue
name, source = catalog.new_entry(
name, bibcode='2012PASP..124..668Y')
catalog.entries[name].add_quantity(SUPERNOVA.ALIAS, alias,
source)
else:
# name = catalog.add_entry(name)
name, source = catalog.new_entry(
name, bibcode='2012PASP..124..668Y')
with open(
os.path.join(catalog.get_current_task_repo(),
'PTF/old-ptf-events.csv')) as f:
for suffix in pbar(f.read().splitlines(), task_str):
name = catalog.add_entry('PTF' + suffix)
with open(
os.path.join(catalog.get_current_task_repo(),
'PTF/perly-2016.csv')) as f:
for row in pbar(f.read().splitlines(), task_str):
cols = [x.strip() for x in row.split(',')]
alias = ''
if cols[8]:
name = cols[8]
alias = 'PTF' + cols[0]
else:
name = 'PTF' + cols[0]
name = catalog.add_entry(name)
source = catalog.entries[name].add_source(
bibcode='2016ApJ...830...13P')
catalog.entries[name].add_quantity(SUPERNOVA.ALIAS, name, source)
if alias:
catalog.entries[name].add_quantity(SUPERNOVA.ALIAS, alias,
source)
catalog.entries[name].add_quantity(SUPERNOVA.RA, cols[1], source)
catalog.entries[name].add_quantity(SUPERNOVA.DEC, cols[2], source)
catalog.entries[name].add_quantity(SUPERNOVA.CLAIMED_TYPE,
'SLSN-' + cols[3], source)
catalog.entries[name].add_quantity(
SUPERNOVA.REDSHIFT, cols[4], source, kind='spectroscopic')
maxdate = cols[6].replace('-', '/')
upl = maxdate.startswith('<')
catalog.entries[name].add_quantity(
SUPERNOVA.MAX_DATE,
maxdate.lstrip('<'),
source,
upperlimit=upl)
catalog.entries[name].add_quantity(
SUPERNOVA.EBV, cols[7], source, kind='spectroscopic')
name = catalog.add_entry('PTF' + suffix)
catalog.journal_entries()
return
| mit |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python2.7/idlelib/ReplaceDialog.py | 28 | 6641 | from Tkinter import *
from idlelib import SearchEngine
from idlelib.SearchDialogBase import SearchDialogBase
import re
def replace(text):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, "_replacedialog"):
engine._replacedialog = ReplaceDialog(root, engine)
dialog = engine._replacedialog
dialog.open(text)
class ReplaceDialog(SearchDialogBase):
title = "Replace Dialog"
icon = "Replace"
def __init__(self, root, engine):
SearchDialogBase.__init__(self, root, engine)
self.replvar = StringVar(root)
def open(self, text):
SearchDialogBase.open(self, text)
try:
first = text.index("sel.first")
except TclError:
first = None
try:
last = text.index("sel.last")
except TclError:
last = None
first = first or text.index("insert")
last = last or first
self.show_hit(first, last)
self.ok = 1
def create_entries(self):
SearchDialogBase.create_entries(self)
self.replent = self.make_entry("Replace with:", self.replvar)[0]
def create_command_buttons(self):
SearchDialogBase.create_command_buttons(self)
self.make_button("Find", self.find_it)
self.make_button("Replace", self.replace_it)
self.make_button("Replace+Find", self.default_command, 1)
self.make_button("Replace All", self.replace_all)
def find_it(self, event=None):
self.do_find(0)
def replace_it(self, event=None):
if self.do_find(self.ok):
self.do_replace()
def default_command(self, event=None):
if self.do_find(self.ok):
if self.do_replace(): # Only find next match if replace succeeded.
# A bad re can cause a it to fail.
self.do_find(0)
def _replace_expand(self, m, repl):
""" Helper function for expanding a regular expression
in the replace field, if needed. """
if self.engine.isre():
try:
new = m.expand(repl)
except re.error:
self.engine.report_error(repl, 'Invalid Replace Expression')
new = None
else:
new = repl
return new
def replace_all(self, event=None):
prog = self.engine.getprog()
if not prog:
return
repl = self.replvar.get()
text = self.text
res = self.engine.search_text(text, prog)
if not res:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.tag_remove("hit", "1.0", "end")
line = res[0]
col = res[1].start()
if self.engine.iswrap():
line = 1
col = 0
ok = 1
first = last = None
# XXX ought to replace circular instead of top-to-bottom when wrapping
text.undo_block_start()
while 1:
res = self.engine.search_forward(text, prog, line, col, 0, ok)
if not res:
break
line, m = res
chars = text.get("%d.0" % line, "%d.0" % (line+1))
orig = m.group()
new = self._replace_expand(m, repl)
if new is None:
break
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
if new == orig:
text.mark_set("insert", last)
else:
text.mark_set("insert", first)
if first != last:
text.delete(first, last)
if new:
text.insert(first, new)
col = i + len(new)
ok = 0
text.undo_block_stop()
if first and last:
self.show_hit(first, last)
self.close()
def do_find(self, ok=0):
if not self.engine.getprog():
return False
text = self.text
res = self.engine.search_text(text, None, ok)
if not res:
text.bell()
return False
line, m = res
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
self.show_hit(first, last)
self.ok = 1
return True
def do_replace(self):
prog = self.engine.getprog()
if not prog:
return False
text = self.text
try:
first = pos = text.index("sel.first")
last = text.index("sel.last")
except TclError:
pos = None
if not pos:
first = last = pos = text.index("insert")
line, col = SearchEngine.get_line_col(pos)
chars = text.get("%d.0" % line, "%d.0" % (line+1))
m = prog.match(chars, col)
if not prog:
return False
new = self._replace_expand(m, self.replvar.get())
if new is None:
return False
text.mark_set("insert", first)
text.undo_block_start()
if m.group():
text.delete(first, last)
if new:
text.insert(first, new)
text.undo_block_stop()
self.show_hit(first, text.index("insert"))
self.ok = 0
return True
def show_hit(self, first, last):
text = self.text
text.mark_set("insert", first)
text.tag_remove("sel", "1.0", "end")
text.tag_add("sel", first, last)
text.tag_remove("hit", "1.0", "end")
if first == last:
text.tag_add("hit", first)
else:
text.tag_add("hit", first, last)
text.see("insert")
text.update_idletasks()
def close(self, event=None):
SearchDialogBase.close(self, event)
self.text.tag_remove("hit", "1.0", "end")
def _replace_dialog(parent):
root = Tk()
root.title("Test ReplaceDialog")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
# mock undo delegator methods
def undo_block_start():
pass
def undo_block_stop():
pass
text = Text(root)
text.undo_block_start = undo_block_start
text.undo_block_stop = undo_block_stop
text.pack()
text.insert("insert","This is a sample string.\n"*10)
def show_replace():
text.tag_add(SEL, "1.0", END)
replace(text)
text.tag_remove(SEL, "1.0", END)
button = Button(root, text="Replace", command=show_replace)
button.pack()
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_replace_dialog)
| gpl-2.0 |
neilLasrado/frappe | frappe/website/doctype/blog_post/test_blog_post.py | 15 | 1037 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.tests.test_website import set_request
from frappe.website.render import render
class TestBlogPost(unittest.TestCase):
def test_generator_view(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 1, 'route': ('!=', '')}, limit =1)
set_request(path=pages[0].route)
response = render()
self.assertTrue(response.status_code, 200)
html = response.get_data().decode()
self.assertTrue('<article class="blog-content" itemscope itemtype="http://schema.org/BlogPosting">' in html)
def test_generator_not_found(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 0}, limit =1)
frappe.db.set_value('Blog Post', pages[0].name, 'route', 'test-route-000')
set_request(path='test-route-000')
response = render()
self.assertTrue(response.status_code, 404)
| mit |
yuquanshan/customizedMesos | src/python/cli/src/mesos/http.py | 21 | 1629 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Helper for doing an HTTP GET given a PID, a path, and a query dict.
# For example:
#
# get('[email protected]:123',
# '/endpoint',
# {'first': 'ben',
# 'last': 'hindman'})
#
# Would yield: 1.2.3.4:123/endpoint?first='ben'&last='hindman'
#
# Note that you can also pass an IP:port (or hostname:port) for 'pid'
# (i.e., you can omit the ID component of the PID, e.g., 'foo@').
def get(pid, path, query=None):
import urllib2
from contextlib import closing
url = 'http://' + pid[(pid.find('@') + 1):] + path
if query is not None and len(query) > 0:
url += '?' + '&'.join(
['%s=%s' % (urllib2.quote(str(key)), urllib2.quote(str(value)))
for (key, value) in query.items()])
with closing(urllib2.urlopen(url)) as file:
return file.read()
| apache-2.0 |
broferek/ansible | lib/ansible/modules/cloud/cloudstack/cs_physical_network.py | 13 | 14676 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, Netservers Ltd. <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_physical_network
short_description: Manages physical networks on Apache CloudStack based clouds.
description:
- Create, update and remove networks.
- Enabled and disabled Network Service Providers
- Enables Internal LoadBalancer and VPC/VirtualRouter elements as required
version_added: "2.8"
author:
- Netservers Ltd. (@netservers)
- Patryk Cichy (@PatTheSilent)
options:
name:
description:
- Name of the physical network.
required: true
aliases:
- physical_network
type: str
zone:
description:
- Name of the zone in which the network belongs.
- If not set, default zone is used.
type: str
broadcast_domain_range:
description:
- broadcast domain range for the physical network[Pod or Zone].
choices: [ POD, ZONE ]
type: str
domain:
description:
- Domain the network is owned by.
type: str
isolation_method:
description:
- Isolation method for the physical network.
choices: [ VLAN, GRE, L3 ]
type: str
network_speed:
description:
- The speed for the physical network.
choices: [1G, 10G]
type: str
tags:
description:
- A tag to identify this network.
- Physical networks support only one tag.
- To remove an existing tag pass an empty string.
aliases:
- tag
type: str
vlan:
description:
- The VLAN/VNI Ranges of the physical network.
type: str
nsps_enabled:
description:
- List of Network Service Providers to enable.
type: list
nsps_disabled:
description:
- List of Network Service Providers to disable.
type: list
state:
description:
- State of the physical network.
default: present
type: str
choices: [ present, absent, disabled, enabled ]
poll_async:
description:
- Poll async jobs until job has finished.
default: yes
type: bool
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure a network is present
cs_physical_network:
name: net01
zone: zone01
isolation_method: VLAN
broadcast_domain_range: ZONE
delegate_to: localhost
- name: Set a tag on a network
cs_physical_network:
name: net01
tag: overlay
delegate_to: localhost
- name: Remove tag on a network
cs_physical_network:
name: net01
tag: ""
delegate_to: localhost
- name: Ensure a network is enabled with specific nsps enabled
cs_physical_network:
name: net01
zone: zone01
isolation_method: VLAN
vlan: 100-200,300-400
broadcast_domain_range: ZONE
state: enabled
nsps_enabled:
- virtualrouter
- internallbvm
- vpcvirtualrouter
delegate_to: localhost
- name: Ensure a network is disabled
cs_physical_network:
name: net01
zone: zone01
state: disabled
delegate_to: localhost
- name: Ensure a network is enabled
cs_physical_network:
name: net01
zone: zone01
state: enabled
delegate_to: localhost
- name: Ensure a network is absent
cs_physical_network:
name: net01
zone: zone01
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the network.
returned: success
type: str
sample: 3f8f25cd-c498-443f-9058-438cfbcbff50
name:
description: Name of the network.
returned: success
type: str
sample: net01
state:
description: State of the network [Enabled/Disabled].
returned: success
type: str
sample: Enabled
broadcast_domain_range:
description: broadcastdomainrange of the network [POD / ZONE].
returned: success
type: str
sample: ZONE
isolation_method:
description: isolationmethod of the network [VLAN/GRE/L3].
returned: success
type: str
sample: VLAN
network_speed:
description: networkspeed of the network [1G/10G].
returned: success
type: str
sample: 1G
zone:
description: Name of zone the physical network is in.
returned: success
type: str
sample: ch-gva-2
domain:
description: Name of domain the network is in.
returned: success
type: str
sample: domain1
nsps:
description: list of enabled or disabled Network Service Providers
type: complex
returned: on enabling/disabling of Network Service Providers
contains:
enabled:
description: list of Network Service Providers that were enabled
returned: on Network Service Provider enabling
type: list
sample:
- virtualrouter
disabled:
description: list of Network Service Providers that were disabled
returned: on Network Service Provider disabling
type: list
sample:
- internallbvm
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackPhysicalNetwork(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPhysicalNetwork, self).__init__(module)
self.returns = {
'isolationmethods': 'isolation_method',
'broadcastdomainrange': 'broadcast_domain_range',
'networkspeed': 'network_speed',
'vlan': 'vlan',
'tags': 'tags',
}
self.nsps = []
self.vrouters = None
self.loadbalancers = None
def _get_common_args(self):
args = {
'name': self.module.params.get('name'),
'isolationmethods': self.module.params.get('isolation_method'),
'broadcastdomainrange': self.module.params.get('broadcast_domain_range'),
'networkspeed': self.module.params.get('network_speed'),
'tags': self.module.params.get('tags'),
'vlan': self.module.params.get('vlan'),
}
state = self.module.params.get('state')
if state in ['enabled', 'disabled']:
args['state'] = state.capitalize()
return args
def get_physical_network(self, key=None):
physical_network = self.module.params.get('name')
if self.physical_network:
return self._get_by_key(key, self.physical_network)
args = {
'zoneid': self.get_zone(key='id')
}
physical_networks = self.query_api('listPhysicalNetworks', **args)
if physical_networks:
for net in physical_networks['physicalnetwork']:
if physical_network.lower() in [net['name'].lower(), net['id']]:
self.physical_network = net
self.result['physical_network'] = net['name']
break
return self._get_by_key(key, self.physical_network)
def get_nsp(self, name=None):
if not self.nsps:
args = {
'physicalnetworkid': self.get_physical_network(key='id')
}
res = self.query_api('listNetworkServiceProviders', **args)
self.nsps = res['networkserviceprovider']
names = []
for nsp in self.nsps:
names.append(nsp['name'])
if nsp['name'].lower() == name.lower():
return nsp
self.module.fail_json(msg="Failed: '{0}' not in network service providers list '[{1}]'".format(name, names))
def update_nsp(self, name=None, state=None, service_list=None):
nsp = self.get_nsp(name)
if not service_list and nsp['state'] == state:
return nsp
args = {
'id': nsp['id'],
'servicelist': service_list,
'state': state
}
if not self.module.check_mode:
res = self.query_api('updateNetworkServiceProvider', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
nsp = self.poll_job(res, 'networkserviceprovider')
self.result['changed'] = True
return nsp
def get_vrouter_element(self, nsp_name='virtualrouter'):
nsp = self.get_nsp(nsp_name)
nspid = nsp['id']
if self.vrouters is None:
self.vrouters = dict()
res = self.query_api('listVirtualRouterElements', )
for vrouter in res['virtualrouterelement']:
self.vrouters[vrouter['nspid']] = vrouter
if nspid not in self.vrouters:
self.module.fail_json(msg="Failed: No VirtualRouterElement found for nsp '%s'" % nsp_name)
return self.vrouters[nspid]
def get_loadbalancer_element(self, nsp_name='internallbvm'):
nsp = self.get_nsp(nsp_name)
nspid = nsp['id']
if self.loadbalancers is None:
self.loadbalancers = dict()
res = self.query_api('listInternalLoadBalancerElements', )
for loadbalancer in res['internalloadbalancerelement']:
self.loadbalancers[loadbalancer['nspid']] = loadbalancer
if nspid not in self.loadbalancers:
self.module.fail_json(msg="Failed: No Loadbalancer found for nsp '%s'" % nsp_name)
return self.loadbalancers[nspid]
def set_vrouter_element_state(self, enabled, nsp_name='virtualrouter'):
vrouter = self.get_vrouter_element(nsp_name)
if vrouter['enabled'] == enabled:
return vrouter
args = {
'id': vrouter['id'],
'enabled': enabled
}
if not self.module.check_mode:
res = self.query_api('configureVirtualRouterElement', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vrouter = self.poll_job(res, 'virtualrouterelement')
self.result['changed'] = True
return vrouter
def set_loadbalancer_element_state(self, enabled, nsp_name='internallbvm'):
loadbalancer = self.get_loadbalancer_element(nsp_name=nsp_name)
if loadbalancer['enabled'] == enabled:
return loadbalancer
args = {
'id': loadbalancer['id'],
'enabled': enabled
}
if not self.module.check_mode:
res = self.query_api('configureInternalLoadBalancerElement', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
loadbalancer = self.poll_job(res, 'internalloadbalancerelement')
self.result['changed'] = True
return loadbalancer
def present_network(self):
network = self.get_physical_network()
if network:
network = self._update_network()
else:
network = self._create_network()
return network
def _create_network(self):
self.result['changed'] = True
args = dict(zoneid=self.get_zone(key='id'))
args.update(self._get_common_args())
if self.get_domain(key='id'):
args['domainid'] = self.get_domain(key='id')
if not self.module.check_mode:
resource = self.query_api('createPhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.network = self.poll_job(resource, 'physicalnetwork')
return self.network
def _update_network(self):
network = self.get_physical_network()
args = dict(id=network['id'])
args.update(self._get_common_args())
if self.has_changed(args, network):
self.result['changed'] = True
if not self.module.check_mode:
resource = self.query_api('updatePhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.physical_network = self.poll_job(resource, 'physicalnetwork')
return self.physical_network
def absent_network(self):
physical_network = self.get_physical_network()
if physical_network:
self.result['changed'] = True
args = {
'id': physical_network['id'],
}
if not self.module.check_mode:
resource = self.query_api('deletePhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(resource, 'success')
return physical_network
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True, aliases=['physical_network']),
zone=dict(),
domain=dict(),
vlan=dict(),
nsps_disabled=dict(type='list'),
nsps_enabled=dict(type='list'),
network_speed=dict(choices=['1G', '10G']),
broadcast_domain_range=dict(choices=['POD', 'ZONE']),
isolation_method=dict(choices=['VLAN', 'GRE', 'L3']),
state=dict(choices=['present', 'enabled', 'disabled', 'absent'], default='present'),
tags=dict(aliases=['tag']),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_network = AnsibleCloudStackPhysicalNetwork(module)
state = module.params.get('state')
nsps_disabled = module.params.get('nsps_disabled', [])
nsps_enabled = module.params.get('nsps_enabled', [])
if state in ['absent']:
network = acs_network.absent_network()
else:
network = acs_network.present_network()
if nsps_disabled is not None:
for name in nsps_disabled:
acs_network.update_nsp(name=name, state='Disabled')
if nsps_enabled is not None:
for nsp_name in nsps_enabled:
if nsp_name.lower() in ['virtualrouter', 'vpcvirtualrouter']:
acs_network.set_vrouter_element_state(enabled=True, nsp_name=nsp_name)
elif nsp_name.lower() == 'internallbvm':
acs_network.set_loadbalancer_element_state(enabled=True, nsp_name=nsp_name)
acs_network.update_nsp(name=nsp_name, state='Enabled')
result = acs_network.get_result(network)
if nsps_enabled:
result['nsps_enabled'] = nsps_enabled
if nsps_disabled:
result['nsps_disabled'] = nsps_disabled
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chrisjaquet/FreeCAD | src/Mod/Sandbox/Init.py | 54 | 1955 | # FreeCAD init script of the Sandbox module
# (c) 2001 Juergen Riegel
#***************************************************************************
#* (c) Juergen Riegel ([email protected]) 2002 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#* Juergen Riegel 2002 *
#***************************************************************************/
import FreeCAD
FreeCAD.addExportType("DRAWEXE script (*.tcl)","exportDRAWEXE")
| lgpl-2.1 |
dineshram/linux-media-si4713USBDriver | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
tfroehlich82/erpnext | erpnext/patches/v7_0/update_mode_of_payment_type.py | 52 | 1299 | from __future__ import unicode_literals
import frappe
from frappe.utils import flt
def execute():
frappe.reload_doc('accounts', 'doctype', 'mode_of_payment')
frappe.db.sql(""" update `tabMode of Payment` set type = 'Cash' where (type is null or type = '') and name = 'Cash'""")
for data in frappe.db.sql("""select name from `tabSales Invoice` where is_pos=1 and docstatus<2 and
(ifnull(paid_amount, 0) - ifnull(change_amount, 0)) > ifnull(grand_total, 0) and modified > '2016-05-01'""", as_dict=1):
if data.name:
si_doc = frappe.get_doc("Sales Invoice", data.name)
remove_payment = []
mode_of_payment = [d.mode_of_payment for d in si_doc.payments if flt(d.amount) > 0]
if mode_of_payment != set(mode_of_payment):
for payment_data in si_doc.payments:
if payment_data.idx != 1 and payment_data.amount == si_doc.grand_total:
remove_payment.append(payment_data)
frappe.db.sql(""" delete from `tabSales Invoice Payment`
where name = %(name)s""", {'name': payment_data.name})
if len(remove_payment) > 0:
for d in remove_payment:
si_doc.remove(d)
si_doc.set_paid_amount()
si_doc.db_set("paid_amount", si_doc.paid_amount, update_modified = False)
si_doc.db_set("base_paid_amount", si_doc.base_paid_amount, update_modified = False) | gpl-3.0 |
Som-Energia/cchloader | cchloader/parsers/parser.py | 1 | 1134 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import re
from cchloader import logger
from cchloader.exceptions import ParserNotFoundException
_PARSERS = {
}
def register(cls):
"""Register a parser
:type cls: Parser class
"""
module = cls.__module__
path = '.'.join([module, cls.__name__])
_PARSERS[path] = cls
def get_parser(cch_file):
for path, cls in _PARSERS.items():
if cls.detect(cch_file):
return cls
logger.error("Parser not found for file %s", cch_file)
raise ParserNotFoundException()
class Parser(object):
"""Base parser interface.
"""
encoding = "iso-8859-15"
patterns = []
@classmethod
def detect(cls, cch_file):
for pattern in cls.patterns:
if re.match(pattern, os.path.basename(cch_file)):
return True
return False
def parse_line(self, line):
"""Parse a line of a CCH file.
:param line: line of the file
"""
raise NotImplementedError("Should have implemented this")
| gpl-3.0 |
SimonSapin/servo | tests/wpt/web-platform-tests/tools/third_party/atomicwrites/atomicwrites/__init__.py | 52 | 5965 | import contextlib
import os
import sys
import tempfile
try:
import fcntl
except ImportError:
fcntl = None
__version__ = '1.1.5'
PY2 = sys.version_info[0] == 2
text_type = unicode if PY2 else str # noqa
def _path_to_unicode(x):
if not isinstance(x, text_type):
return x.decode(sys.getfilesystemencoding())
return x
_proper_fsync = os.fsync
if sys.platform != 'win32':
if hasattr(fcntl, 'F_FULLFSYNC'):
def _proper_fsync(fd):
# https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html
# https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html
# https://github.com/untitaker/python-atomicwrites/issues/6
fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
def _sync_directory(directory):
# Ensure that filenames are written to disk
fd = os.open(directory, 0)
try:
_proper_fsync(fd)
finally:
os.close(fd)
def _replace_atomic(src, dst):
os.rename(src, dst)
_sync_directory(os.path.normpath(os.path.dirname(dst)))
def _move_atomic(src, dst):
os.link(src, dst)
os.unlink(src)
src_dir = os.path.normpath(os.path.dirname(src))
dst_dir = os.path.normpath(os.path.dirname(dst))
_sync_directory(dst_dir)
if src_dir != dst_dir:
_sync_directory(src_dir)
else:
from ctypes import windll, WinError
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_windows_default_flags = _MOVEFILE_WRITE_THROUGH
def _handle_errors(rv):
if not rv:
raise WinError()
def _replace_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags | _MOVEFILE_REPLACE_EXISTING
))
def _move_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags
))
def replace_atomic(src, dst):
'''
Move ``src`` to ``dst``. If ``dst`` exists, it will be silently
overwritten.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _replace_atomic(src, dst)
def move_atomic(src, dst):
'''
Move ``src`` to ``dst``. There might a timewindow where both filesystem
entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be
raised.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _move_atomic(src, dst)
class AtomicWriter(object):
'''
A helper class for performing atomic writes. Usage::
with AtomicWriter(path).open() as f:
f.write(...)
:param path: The destination filepath. May or may not exist.
:param mode: The filemode for the temporary file.
:param overwrite: If set to false, an error is raised if ``path`` exists.
Errors are only raised after the file has been written to. Either way,
the operation is atomic.
If you need further control over the exact behavior, you are encouraged to
subclass.
'''
def __init__(self, path, mode='w', overwrite=False):
if 'a' in mode:
raise ValueError(
'Appending to an existing file is not supported, because that '
'would involve an expensive `copy`-operation to a temporary '
'file. Open the file in normal `w`-mode and copy explicitly '
'if that\'s what you\'re after.'
)
if 'x' in mode:
raise ValueError('Use the `overwrite`-parameter instead.')
if 'w' not in mode:
raise ValueError('AtomicWriters can only be written to.')
self._path = path
self._mode = mode
self._overwrite = overwrite
def open(self):
'''
Open the temporary file.
'''
return self._open(self.get_fileobject)
@contextlib.contextmanager
def _open(self, get_fileobject):
f = None # make sure f exists even if get_fileobject() fails
try:
success = False
with get_fileobject() as f:
yield f
self.sync(f)
self.commit(f)
success = True
finally:
if not success:
try:
self.rollback(f)
except Exception:
pass
def get_fileobject(self, dir=None, **kwargs):
'''Return the temporary file to use.'''
if dir is None:
dir = os.path.normpath(os.path.dirname(self._path))
return tempfile.NamedTemporaryFile(mode=self._mode, dir=dir,
delete=False, **kwargs)
def sync(self, f):
'''responsible for clearing as many file caches as possible before
commit'''
f.flush()
_proper_fsync(f.fileno())
def commit(self, f):
'''Move the temporary file to the target location.'''
if self._overwrite:
replace_atomic(f.name, self._path)
else:
move_atomic(f.name, self._path)
def rollback(self, f):
'''Clean up all temporary resources.'''
os.unlink(f.name)
def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs):
'''
Simple atomic writes. This wraps :py:class:`AtomicWriter`::
with atomic_write(path) as f:
f.write(...)
:param path: The target path to write to.
:param writer_cls: The writer class to use. This parameter is useful if you
subclassed :py:class:`AtomicWriter` to change some behavior and want to
use that new subclass.
Additional keyword arguments are passed to the writer class. See
:py:class:`AtomicWriter`.
'''
return writer_cls(path, **cls_kwargs).open()
| mpl-2.0 |
google-research/google-research | cnn_quantization/tf_cnn_benchmarks/test_data/create_cifar10_test_data.py | 1 | 2409 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates fake cifar10 test data to be used in tf_cnn_benchmark tests.
Each image is a single color. There are 10 colors total, and each color appears
in the dataset 10 times, for a total of 100 images in the dataset. Each color
has a unique label. The complete dataset of 100 images is written to each of the
seven files data_batch_1 through data_batch_6 and test_batch.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import pickle
import numpy as np
NAME_TO_RGB = {
'black': (0, 0, 0),
'white': (255, 255, 255),
'red': (255, 0, 0),
'green': (0, 255, 0),
'blue': (0, 0, 255),
'yellow': (255, 255, 0),
'cyan': (0, 255, 255),
'magenta': (255, 0, 255),
'gray': (128, 128, 128),
'teal': (0, 128, 128)
}
COLORS = sorted(NAME_TO_RGB.keys())
NUM_COLORS = len(COLORS)
NUM_IMAGES_PER_COLOR = 10
NUM_BATCHES = NUM_COLORS * NUM_IMAGES_PER_COLOR
NUM_PIXELS_PER_IMAGE = 1024
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--output_dir', type=str, required=True)
args = parser.parse_args()
data = np.empty((NUM_BATCHES, NUM_PIXELS_PER_IMAGE * 3), np.uint8)
labels = []
for i in range(NUM_BATCHES):
color = COLORS[i % NUM_COLORS]
red, green, blue = NAME_TO_RGB[color]
data[i, 0:NUM_PIXELS_PER_IMAGE] = red
data[i, NUM_PIXELS_PER_IMAGE:2 * NUM_PIXELS_PER_IMAGE] = green
data[i, 2 * NUM_PIXELS_PER_IMAGE:] = blue
labels.append(i % NUM_COLORS)
d = {b'data': data, b'labels': labels}
filenames = ['data_batch_%d' % i for i in range(1, 7)] + ['test_batch']
for filename in filenames:
with open(os.path.join(args.output_dir, filename), 'wb') as f:
pickle.dump(d, f)
if __name__ == '__main__':
main()
| apache-2.0 |
u0m3/gr-baz | python/message_server.py | 4 | 5683 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# message_server.py
#
# Copyright 2013 Balint Seeber <balint@crawfish>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from __future__ import with_statement
import threading, traceback, socket, SocketServer, time
from gnuradio import gr, gru
class ThreadedTCPRequestHandler(SocketServer.StreamRequestHandler): # BaseRequestHandler
# No __init__
def setup(self):
SocketServer.StreamRequestHandler.setup(self)
print "==> Connection from:", self.client_address
self.request.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
with self.server.client_lock:
self.server.clients.append(self)
self.server.connect_event.set()
#self.server.command_queue.insert_tail(gr.message_from_string("", -1))
def handle(self):
buffer = ""
while True:
data = "" # Initialise to nothing so if there's an exception it'll disconnect
try:
data = self.request.recv(1024)
except socket.error, (e, msg):
if e != 104: # Connection reset by peer
print "==>", self.client_address, "-", msg
#data = self.rfile.readline().strip()
if len(data) == 0:
break
#print "==> Received from", self.client_address, ":", data
#cur_thread = threading.currentThread()
#response = "%s: %s" % (cur_thread.getName(), data)
#self.request.send(response)
buffer += data
lines = buffer.splitlines(True)
for line in lines:
if line[-1] != '\n':
buffer = line
break
line = line.strip()
#print "==> Submitting command:", line
#msg = gr.message_from_string(line, -1)
#self.server.command_queue.insert_tail(msg)
else:
buffer = ""
def finish(self):
print "==> Disconnection from:", self.client_address
with self.server.client_lock:
self.server.clients.remove(self)
if len(self.server.clients) == 0:
self.server.connect_event.clear()
try:
SocketServer.StreamRequestHandler.finish(self)
except socket.error, (e, msg):
if (e != 32): # Broken pipe
print "==>", self.client_address, "-", msg
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
class message_server_thread(threading.Thread):
def __init__(self, msgq, port, start=True, **kwds):
threading.Thread.__init__(self, **kwds)
self.setDaemon(True)
self.msgq = msgq
self.keep_running = True
self.stop_event = threading.Event()
HOST, PORT = "", port # "localhost"
print "==> Starting TCP server on port:", port
while True:
try:
self.server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
self.server.command_queue = msgq
self.server.client_lock = threading.Lock()
self.server.clients = []
self.server.connect_event = threading.Event()
ip, port = self.server.server_address
self.server_thread = threading.Thread(target=self.server.serve_forever)
self.server_thread.setDaemon(True)
self.server_thread.start()
except socket.error, (e, msg):
print " Socket error:", msg
if (e == 98):
print " Waiting, then trying again..."
time.sleep(5)
continue
break
print "==> TCP server running in thread:", self.server_thread.getName()
if start:
self.start()
def start(self):
print "Starting..."
threading.Thread.start(self)
def stop(self):
print "Stopping..."
self.keep_running = False
msg = gr.message() # Empty message to signal end
self.msgq.insert_tail(msg)
self.stop_event.wait()
self.server.shutdown()
print "Stopped"
#def __del__(self):
# print "DTOR"
def run(self):
if self.msgq:
while self.keep_running:
msg = self.msgq.delete_head()
if self.keep_running == False:
break
try:
#msg.type()
msg_str = msg.to_string()
with self.server.client_lock:
for client in self.server.clients:
try:
client.wfile.write(msg_str + "\n")
except socket.error, (e, msg):
if (e != 32): # Broken pipe
print "==>", client.client_address, "-", msg
except Exception, e:
print e
traceback.print_exc()
self.stop_event.set()
class message_server(gr.hier_block2):
def __init__(self, msgq, port, **kwds):
gr.hier_block2.__init__(self, "message_server",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
self.thread = message_server_thread(msgq, port, start=False, **kwds)
self.start()
def start(self):
self.thread.start()
def stop(self):
self.thread.stop()
def __del__(self):
self.stop()
def main():
return 0
if __name__ == '__main__':
main()
| gpl-3.0 |
stephanie-wang/ray | python/ray/setup-dev.py | 1 | 2356 | #!/usr/bin/env python
"""This script allows you to develop RLlib without needing to compile Ray."""
import argparse
import click
import os
import subprocess
import ray
def do_link(package, force=False, local_path=""):
package_home = os.path.abspath(
os.path.join(ray.__file__, "../{}".format(package)))
local_home = os.path.abspath(
os.path.join(__file__, local_path + "../{}".format(package)))
if not os.path.isdir(package_home):
print("{} does not exist. Continuing to link.".format(package_home))
assert os.path.isdir(local_home), local_home
if not force and not click.confirm(
"This will replace:\n {}\nwith a symlink to:\n {}".format(
package_home, local_home),
default=True):
return
if os.access(os.path.dirname(package_home), os.W_OK):
subprocess.check_call(["rm", "-rf", package_home])
subprocess.check_call(["ln", "-s", local_home, package_home])
else:
print("You don't have write permission to {}, using sudo:".format(
package_home))
subprocess.check_call(["sudo", "rm", "-rf", package_home])
subprocess.check_call(["sudo", "ln", "-s", local_home, package_home])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Setup dev.")
parser.add_argument(
"--yes", action="store_true", help="Don't ask for confirmation.")
args = parser.parse_args()
do_link("rllib", force=args.yes, local_path="../../")
do_link("tune", force=args.yes)
do_link("autoscaler", force=args.yes)
do_link("scripts", force=args.yes)
do_link("internal", force=args.yes)
do_link("tests", force=args.yes)
do_link("experimental", force=args.yes)
print("Created links.\n\nIf you run into issues initializing Ray, please "
"ensure that your local repo and the installed Ray are in sync "
"(pip install -U the latest wheels at "
"https://ray.readthedocs.io/en/latest/installation.html, "
"and ensure you are up-to-date on the master branch on git).\n\n"
"Note that you may need to delete the package symlinks when pip "
"installing new Ray versions to prevent pip from overwriting files "
"in your git repo.")
| apache-2.0 |
newerthcom/savagerebirth | libs/python-2.72/Lib/encodings/euc_jis_2004.py | 816 | 1051 | #
# euc_jis_2004.py: Python Unicode Codec for EUC_JIS_2004
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('euc_jis_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_jis_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| gpl-2.0 |
felipenaselva/felipe.repository | script.module.urlresolver/lib/urlresolver/plugins/watchers.py | 4 | 1798 | """
OVERALL CREDIT TO:
t0mm0, Eldorado, VOINAGE, BSTRDMKR, tknorris, smokdpi, TheHighway
urlresolver XBMC Addon
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from lib import helpers
from urlresolver import common
from urlresolver.resolver import UrlResolver, ResolverError
class WatchersResolver(UrlResolver):
name = "watchers"
domains = ['watchers.to']
pattern = '(?://|\.)(watchers\.to)/(?:embed-)?([a-zA-Z0-9]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
headers = {'User-Agent': common.RAND_UA}
html = self.net.http_GET(web_url, headers=headers).content
if html:
packed = helpers.get_packed_data(html)
headers.update({'Referer': web_url})
sources = helpers.parse_sources_list(packed)
if sources: return helpers.pick_source(sources) + helpers.append_headers(headers)
raise ResolverError('File not found')
def get_url(self, host, media_id):
return self._default_get_url(host, media_id)
| gpl-2.0 |
OTWillems/GEO1005 | SpatialDecision/external/networkx/algorithms/tests/test_dominance.py | 49 | 6847 | import networkx as nx
from nose.tools import *
class TestImmediateDominators(object):
def test_exceptions(self):
G = nx.Graph()
G.add_node(0)
assert_raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0)
G = nx.MultiGraph(G)
assert_raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0)
G = nx.DiGraph([[0, 0]])
assert_raises(nx.NetworkXError, nx.immediate_dominators, G, 1)
def test_singleton(self):
G = nx.DiGraph()
G.add_node(0)
assert_equal(nx.immediate_dominators(G, 0), {0: 0})
G.add_edge(0, 0)
assert_equal(nx.immediate_dominators(G, 0), {0: 0})
def test_path(self):
n = 5
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.immediate_dominators(G, 0),
{i: max(i - 1, 0) for i in range(n)})
def test_cycle(self):
n = 5
G = nx.cycle_graph(n, create_using=nx.DiGraph())
assert_equal(nx.immediate_dominators(G, 0),
{i: max(i - 1, 0) for i in range(n)})
def test_unreachable(self):
n = 5
assert_greater(n, 1)
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.immediate_dominators(G, n // 2),
{i: max(i - 1, n // 2) for i in range(n // 2, n)})
def test_irreducible1(self):
# Graph taken from Figure 2 of
# K. D. Cooper, T. J. Harvey, and K. Kennedy.
# A simple, fast dominance algorithm.
# Software Practice & Experience, 4:110, 2001.
edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
G = nx.DiGraph(edges)
assert_equal(nx.immediate_dominators(G, 5),
{i: 5 for i in range(1, 6)})
def test_irreducible2(self):
# Graph taken from Figure 4 of
# K. D. Cooper, T. J. Harvey, and K. Kennedy.
# A simple, fast dominance algorithm.
# Software Practice & Experience, 4:110, 2001.
edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1),
(6, 4), (6, 5)]
G = nx.DiGraph(edges)
assert_equal(nx.immediate_dominators(G, 6),
{i: 6 for i in range(1, 7)})
def test_domrel_png(self):
# Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png
edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)]
G = nx.DiGraph(edges)
assert_equal(nx.immediate_dominators(G, 1),
{1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.immediate_dominators(G, 6),
{1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6})
def test_boost_example(self):
# Graph taken from Figure 1 of
# http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm
edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6),
(5, 7), (6, 4)]
G = nx.DiGraph(edges)
assert_equal(nx.immediate_dominators(G, 0),
{0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.immediate_dominators(G, 7),
{0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7})
class TestDominanceFrontiers(object):
def test_exceptions(self):
G = nx.Graph()
G.add_node(0)
assert_raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0)
G = nx.MultiGraph(G)
assert_raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0)
G = nx.DiGraph([[0, 0]])
assert_raises(nx.NetworkXError, nx.dominance_frontiers, G, 1)
def test_singleton(self):
G = nx.DiGraph()
G.add_node(0)
assert_equal(nx.dominance_frontiers(G, 0), {0: []})
G.add_edge(0, 0)
assert_equal(nx.dominance_frontiers(G, 0), {0: []})
def test_path(self):
n = 5
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, 0),
{i: [] for i in range(n)})
def test_cycle(self):
n = 5
G = nx.cycle_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, 0),
{i: [] for i in range(n)})
def test_unreachable(self):
n = 5
assert_greater(n, 1)
G = nx.path_graph(n, create_using=nx.DiGraph())
assert_equal(nx.dominance_frontiers(G, n // 2),
{i: [] for i in range(n // 2, n)})
def test_irreducible1(self):
# Graph taken from Figure 2 of
# K. D. Cooper, T. J. Harvey, and K. Kennedy.
# A simple, fast dominance algorithm.
# Software Practice & Experience, 4:110, 2001.
edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
G = nx.DiGraph(edges)
assert_equal({u: sorted(df)
for u, df in nx.dominance_frontiers(G, 5).items()},
{1: [2], 2: [1], 3: [2], 4: [1], 5: []})
def test_irreducible2(self):
# Graph taken from Figure 4 of
# K. D. Cooper, T. J. Harvey, and K. Kennedy.
# A simple, fast dominance algorithm.
# Software Practice & Experience, 4:110, 2001.
edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1),
(6, 4), (6, 5)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 6),
{1: [2], 2: [1, 3], 3: [2], 4: [2, 3], 5: [1], 6: []})
def test_domrel_png(self):
# Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png
edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 1),
{1: [], 2: [], 3: [5], 4: [5], 5: [2], 6: []})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.dominance_frontiers(G, 6),
{1: [], 2: [], 3: [2], 4: [2], 5: [2], 6: []})
def test_boost_example(self):
# Graph taken from Figure 1 of
# http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm
edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6),
(5, 7), (6, 4)]
G = nx.DiGraph(edges)
assert_equal(nx.dominance_frontiers(G, 0),
{0: [], 1: [], 2: [7], 3: [7], 4: [7], 5: [7], 6: [4],
7: []})
# Test postdominance.
with nx.utils.reversed(G):
assert_equal(nx.dominance_frontiers(G, 7),
{0: [], 1: [], 2: [1], 3: [1], 4: [1], 5: [1], 6: [4],
7: []})
| gpl-2.0 |
Richi91/nntools | lasagne/utils.py | 4 | 9388 | import numpy as np
import theano
import theano.tensor as T
def floatX(arr):
"""Converts data to a numpy array of dtype ``theano.config.floatX``.
Parameters
----------
arr : array_like
The data to be converted.
Returns
-------
numpy ndarray
The input array in the ``floatX`` dtype configured for Theano.
If `arr` is an ndarray of correct dtype, it is returned as is.
"""
return np.asarray(arr, dtype=theano.config.floatX)
def shared_empty(dim=2, dtype=None):
"""Creates empty Theano shared variable.
Shortcut to create an empty Theano shared variable with
the specified number of dimensions.
Parameters
----------
dim : int, optional
The number of dimensions for the empty variable, defaults to 2.
dtype : a numpy data-type, optional
The desired dtype for the variable. Defaults to the Theano
``floatX`` dtype.
Returns
-------
Theano shared variable
An empty Theano shared variable of dtype ``dtype`` with
`dim` dimensions.
"""
if dtype is None:
dtype = theano.config.floatX
shp = tuple([1] * dim)
return theano.shared(np.zeros(shp, dtype=dtype))
def as_theano_expression(input):
"""Wrap as Theano expression.
Wraps the given input as a Theano constant if it is not
a valid Theano expression already. Useful to transparently
handle numpy arrays and Python scalars, for example.
Parameters
----------
input : number, numpy array or Theano expression
Expression to be converted to a Theano constant.
Returns
-------
Theano symbolic constant
Theano constant version of `input`.
"""
if isinstance(input, theano.gof.Variable):
return input
else:
try:
return theano.tensor.constant(input)
except Exception as e:
raise TypeError("Input of type %s is not a Theano expression and "
"cannot be wrapped as a Theano constant (original "
"exception: %s)" % (type(input), e))
def one_hot(x, m=None):
"""One-hot representation of integer vector.
Given a vector of integers from 0 to m-1, returns a matrix
with a one-hot representation, where each row corresponds
to an element of x.
Parameters
----------
x : integer vector
The integer vector to convert to a one-hot representation.
m : int, optional
The number of different columns for the one-hot representation. This
needs to be strictly greater than the maximum value of `x`.
Defaults to ``max(x) + 1``.
Returns
-------
Theano tensor variable
A Theano tensor variable of shape (``n``, `m`), where ``n`` is the
length of `x`, with the one-hot representation of `x`.
Notes
-----
If your integer vector represents target class memberships, and you wish to
compute the cross-entropy between predictions and the target class
memberships, then there is no need to use this function, since the function
:func:`lasagne.objectives.categorical_crossentropy()` can compute the
cross-entropy from the integer vector directly.
"""
if m is None:
m = T.cast(T.max(x) + 1, 'int32')
return T.eye(m)[T.cast(x, 'int32')]
def unique(l):
"""Filters duplicates of iterable.
Create a new list from l with duplicate entries removed,
while preserving the original order.
Parameters
----------
l : iterable
Input iterable to filter of duplicates.
Returns
-------
list
A list of elements of `l` without duplicates and in the same order.
"""
new_list = []
for el in l:
if el not in new_list:
new_list.append(el)
return new_list
def as_tuple(x, N):
"""
Coerce a value to a tuple of length N.
Parameters:
-----------
x : value or iterable
N : integer
length of the desired tuple
Returns:
--------
tuple
``tuple(x)`` if `x` is iterable, ``(x,) * N`` otherwise.
"""
try:
X = tuple(x)
except TypeError:
X = (x,) * N
if len(X) != N:
raise ValueError("input must be a single value "
"or an iterable with length {0}".format(N))
return X
def compute_norms(array, norm_axes=None):
""" Compute incoming weight vector norms.
Parameters
----------
array : ndarray
Weight array.
norm_axes : sequence (list or tuple)
The axes over which to compute the norm. This overrides the
default norm axes defined for the number of dimensions
in `array`. When this is not specified and `array` is a 2D array,
this is set to `(0,)`. If `array` is a 3D, 4D or 5D array, it is
set to a tuple listing all axes but axis 0. The former default is
useful for working with dense layers, the latter is useful for 1D,
2D and 3D convolutional layers.
(Optional)
Returns
-------
norms : 1D array
1D array of incoming weight vector norms.
Examples
--------
>>> array = np.random.randn(100, 200)
>>> norms = compute_norms(array)
>>> norms.shape
(200,)
>>> norms = compute_norms(array, norm_axes=(1,))
>>> norms.shape
(100,)
"""
ndim = array.ndim
if norm_axes is not None:
sum_over = tuple(norm_axes)
elif ndim == 2: # DenseLayer
sum_over = (0,)
elif ndim in [3, 4, 5]: # Conv{1,2,3}DLayer
sum_over = tuple(range(1, ndim))
else:
raise ValueError(
"Unsupported tensor dimensionality {}."
"Must specify `norm_axes`".format(array.ndim)
)
norms = np.sqrt(np.sum(array**2, axis=sum_over))
return norms
def create_param(spec, shape, name=None):
"""
Helper method to create Theano shared variables for layer parameters
and to initialize them.
Parameters
----------
spec : numpy array, Theano shared variable, or callable
Either of the following:
* a numpy array with the initial parameter values
* a Theano shared variable representing the parameters
* a function or callable that takes the desired shape of
the parameter array as its single argument and returns
a numpy array.
shape : iterable of int
a tuple or other iterable of integers representing the desired
shape of the parameter array.
name : string, optional
If a new variable is created, the name to give to the parameter
variable. This is ignored if `spec` is already a Theano shared
variable.
Returns
-------
Theano shared variable
a Theano shared variable representing layer parameters. If a
numpy array was provided, the variable is initialized to
contain this array. If a shared variable was provided, it is
simply returned. If a callable was provided, it is called, and
its output is used to initialize the variable.
Notes
-----
This function is called by :meth:`Layer.add_param()` in the constructor
of most :class:`Layer` subclasses. This enables those layers to
support initialization with numpy arrays, existing Theano shared
variables, and callables for generating initial parameter values.
"""
shape = tuple(shape) # convert to tuple if needed
if any(d <= 0 for d in shape):
raise ValueError((
"Cannot create param with a non-positive shape dimension. "
"Tried to create param with shape=%r, name=%r") % (shape, name))
if isinstance(spec, theano.compile.SharedVariable):
# We cannot check the shape here, the shared variable might not be
# initialized correctly yet. We can check the dimensionality
# though. Note that we cannot assign a name here. We could assign
# to the `name` attribute of the shared variable, but we shouldn't
# because the user may have already named the variable and we don't
# want to override this.
if spec.ndim != len(shape):
raise RuntimeError("shared variable has %d dimensions, "
"should be %d" % (spec.ndim, len(shape)))
return spec
elif isinstance(spec, np.ndarray):
if spec.shape != shape:
raise RuntimeError("parameter array has shape %s, should be "
"%s" % (spec.shape, shape))
return theano.shared(spec, name=name)
elif hasattr(spec, '__call__'):
arr = spec(shape)
try:
arr = floatX(arr)
except Exception:
raise RuntimeError("cannot initialize parameters: the "
"provided callable did not return an "
"array-like value")
if arr.shape != shape:
raise RuntimeError("cannot initialize parameters: the "
"provided callable did not return a value "
"with the correct shape")
return theano.shared(arr, name=name)
else:
raise RuntimeError("cannot initialize parameters: 'spec' is not "
"a numpy array, a Theano shared variable, or a "
"callable")
| mit |
weka-io/boto | boto/cognito/sync/__init__.py | 128 | 1675 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the Amazon Cognito Sync service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.cognito.sync.layer1 import CognitoSyncConnection
return get_regions('cognito-sync', connection_cls=CognitoSyncConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
techtonik/numpy | numpy/f2py/tests/test_return_complex.py | 69 | 4672 | from __future__ import division, absolute_import, print_function
from numpy.testing import *
from numpy import array
from numpy.compat import long
import util
class TestReturnComplex(util.F2PyTest):
def check_function(self, t):
tname = t.__doc__.split()[0]
if tname in ['t0', 't8', 's0', 's8']:
err = 1e-5
else:
err = 0.0
assert_( abs(t(234j)-234.0j)<=err)
assert_( abs(t(234.6)-234.6)<=err)
assert_( abs(t(long(234))-234.0)<=err)
assert_( abs(t(234.6+3j)-(234.6+3j))<=err)
#assert_( abs(t('234')-234.)<=err)
#assert_( abs(t('234.6')-234.6)<=err)
assert_( abs(t(-234)+234.)<=err)
assert_( abs(t([234])-234.)<=err)
assert_( abs(t((234,))-234.)<=err)
assert_( abs(t(array(234))-234.)<=err)
assert_( abs(t(array(23+4j, 'F'))-(23+4j))<=err)
assert_( abs(t(array([234]))-234.)<=err)
assert_( abs(t(array([[234]]))-234.)<=err)
assert_( abs(t(array([234], 'b'))+22.)<=err)
assert_( abs(t(array([234], 'h'))-234.)<=err)
assert_( abs(t(array([234], 'i'))-234.)<=err)
assert_( abs(t(array([234], 'l'))-234.)<=err)
assert_( abs(t(array([234], 'q'))-234.)<=err)
assert_( abs(t(array([234], 'f'))-234.)<=err)
assert_( abs(t(array([234], 'd'))-234.)<=err)
assert_( abs(t(array([234+3j], 'F'))-(234+3j))<=err)
assert_( abs(t(array([234], 'D'))-234.)<=err)
#assert_raises(TypeError, t, array([234], 'a1'))
assert_raises(TypeError, t, 'abc')
assert_raises(IndexError, t, [])
assert_raises(IndexError, t, ())
assert_raises(TypeError, t, t)
assert_raises(TypeError, t, {})
try:
r = t(10**400)
assert_( repr(r) in ['(inf+0j)', '(Infinity+0j)'], repr(r))
except OverflowError:
pass
class TestF77ReturnComplex(TestReturnComplex):
code = """
function t0(value)
complex value
complex t0
t0 = value
end
function t8(value)
complex*8 value
complex*8 t8
t8 = value
end
function t16(value)
complex*16 value
complex*16 t16
t16 = value
end
function td(value)
double complex value
double complex td
td = value
end
subroutine s0(t0,value)
complex value
complex t0
cf2py intent(out) t0
t0 = value
end
subroutine s8(t8,value)
complex*8 value
complex*8 t8
cf2py intent(out) t8
t8 = value
end
subroutine s16(t16,value)
complex*16 value
complex*16 t16
cf2py intent(out) t16
t16 = value
end
subroutine sd(td,value)
double complex value
double complex td
cf2py intent(out) td
td = value
end
"""
@dec.slow
def test_all(self):
for name in "t0,t8,t16,td,s0,s8,s16,sd".split(","):
self.check_function(getattr(self.module, name))
class TestF90ReturnComplex(TestReturnComplex):
suffix = ".f90"
code = """
module f90_return_complex
contains
function t0(value)
complex :: value
complex :: t0
t0 = value
end function t0
function t8(value)
complex(kind=4) :: value
complex(kind=4) :: t8
t8 = value
end function t8
function t16(value)
complex(kind=8) :: value
complex(kind=8) :: t16
t16 = value
end function t16
function td(value)
double complex :: value
double complex :: td
td = value
end function td
subroutine s0(t0,value)
complex :: value
complex :: t0
!f2py intent(out) t0
t0 = value
end subroutine s0
subroutine s8(t8,value)
complex(kind=4) :: value
complex(kind=4) :: t8
!f2py intent(out) t8
t8 = value
end subroutine s8
subroutine s16(t16,value)
complex(kind=8) :: value
complex(kind=8) :: t16
!f2py intent(out) t16
t16 = value
end subroutine s16
subroutine sd(td,value)
double complex :: value
double complex :: td
!f2py intent(out) td
td = value
end subroutine sd
end module f90_return_complex
"""
@dec.slow
def test_all(self):
for name in "t0,t8,t16,td,s0,s8,s16,sd".split(","):
self.check_function(getattr(self.module.f90_return_complex, name))
if __name__ == "__main__":
import nose
nose.runmodule()
| bsd-3-clause |
ltiao/networkx | examples/drawing/atlas.py | 54 | 2609 | #!/usr/bin/env python
"""
Atlas of all graphs of 6 nodes or less.
"""
__author__ = """Aric Hagberg ([email protected])"""
# Copyright (C) 2004 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.generators.atlas import *
from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic as isomorphic
import random
def atlas6():
""" Return the atlas of all connected graphs of 6 nodes or less.
Attempt to check for isomorphisms and remove.
"""
Atlas=graph_atlas_g()[0:208] # 208
# remove isolated nodes, only connected graphs are left
U=nx.Graph() # graph for union of all graphs in atlas
for G in Atlas:
zerodegree=[n for n in G if G.degree(n)==0]
for n in zerodegree:
G.remove_node(n)
U=nx.disjoint_union(U,G)
# list of graphs of all connected components
C=nx.connected_component_subgraphs(U)
UU=nx.Graph()
# do quick isomorphic-like check, not a true isomorphism checker
nlist=[] # list of nonisomorphic graphs
for G in C:
# check against all nonisomorphic graphs so far
if not iso(G,nlist):
nlist.append(G)
UU=nx.disjoint_union(UU,G) # union the nonisomorphic graphs
return UU
def iso(G1, glist):
"""Quick and dirty nonisomorphism checker used to check isomorphisms."""
for G2 in glist:
if isomorphic(G1,G2):
return True
return False
if __name__ == '__main__':
import networkx as nx
G=atlas6()
print("graph has %d nodes with %d edges"\
%(nx.number_of_nodes(G),nx.number_of_edges(G)))
print(nx.number_connected_components(G),"connected components")
try:
from networkx import graphviz_layout
except ImportError:
raise ImportError("This example needs Graphviz and either PyGraphviz or Pydot")
import matplotlib.pyplot as plt
plt.figure(1,figsize=(8,8))
# layout graphs with positions using graphviz neato
pos=nx.graphviz_layout(G,prog="neato")
# color nodes the same in each connected subgraph
C=nx.connected_component_subgraphs(G)
for g in C:
c=[random.random()]*nx.number_of_nodes(g) # random color...
nx.draw(g,
pos,
node_size=40,
node_color=c,
vmin=0.0,
vmax=1.0,
with_labels=False
)
plt.savefig("atlas.png",dpi=75)
| bsd-3-clause |
pszemus/grpc | tools/line_count/collect-history.py | 27 | 1442 | #!/usr/bin/env python
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import datetime
# this script is only of historical interest: it's the script that was used to
# bootstrap the dataset
def daterange(start, end):
for n in range(int((end - start).days)):
yield start + datetime.timedelta(n)
start_date = datetime.date(2017, 3, 26)
end_date = datetime.date(2017, 3, 29)
for dt in daterange(start_date, end_date):
dmy = dt.strftime('%Y-%m-%d')
sha1 = subprocess.check_output(
['git', 'rev-list', '-n', '1',
'--before=%s' % dmy, 'master']).strip()
subprocess.check_call(['git', 'checkout', sha1])
subprocess.check_call(['git', 'submodule', 'update'])
subprocess.check_call(['git', 'clean', '-f', '-x', '-d'])
subprocess.check_call([
'cloc', '--vcs=git', '--by-file', '--yaml',
'--out=../count/%s.yaml' % dmy, '.'
])
| apache-2.0 |
cancro7/gem5 | util/style/repo.py | 11 | 9560 | #!/usr/bin/env python
#
# Copyright (c) 2016 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from abc import *
import os
import subprocess
from region import *
from style import modified_regions
class AbstractRepo(object):
__metaclass__ = ABCMeta
def file_path(self, fname):
"""Get the absolute path to a file relative within the repository. The
input file name must be a valid path within the repository.
"""
return os.path.join(self.repo_base(), fname)
def in_repo(self, fname):
"""Check if a path points to something within the repository base. Not
that this does not check for the presence of the object in the
file system as it could exist in the index without being in
the file system.
"""
fname = os.path.abspath(fname)
repo_path = os.path.abspath(self.repo_base())
return os.path.commonprefix([repo_path, fname]) == repo_path
def repo_path(self, fname):
"""Get the path of a file relative to the repository base. The input
file name is assumed to be an absolute path or a path relative
to the current working directory.
"""
return os.path.relpath(fname, self.repo_base())
def get_file(self, name):
"""Get the contents of a file in the file system using a path relative
to the repository root.
"""
with open(self.file_path(name), "r") as f:
return f.read()
@abstractmethod
def repo_base(self):
"""Get the path to the base of the repository"""
pass
@abstractmethod
def staged_files(self):
"""Get a tuple describing the files that have been staged for a
commit: (list of new, list of modified)
"""
pass
@abstractmethod
def staged_regions(self, fname, context=0):
"""Get modified regions that will be committed by the next commit
command
"""
pass
@abstractmethod
def modified_regions(self, fname, context=0):
"""Get modified regions that have been staged for commit or are
present in the file system.
"""
pass
class GitRepo(AbstractRepo):
def __init__(self):
self.git = "git"
self._head_revision = None
self._repo_base = None
def repo_base(self):
if self._repo_base is None:
self._repo_base = subprocess.check_output(
[ self.git, "rev-parse", "--show-toplevel" ]).rstrip("\n")
return self._repo_base
def staged_files(self):
added = []
modified = []
for action, fname in self.status(filter="MA", cached=True):
if action == "M":
modified.append(fname)
elif action == "A":
added.append(fname)
return added, modified
def staged_regions(self, fname, context=0):
if self.file_status(fname, cached=True) in ("", "A", ):
return all_regions
old = self.file_from_head(self.repo_path(fname)).split("\n")
new = self.file_from_index(self.repo_path(fname)).split("\n")
return modified_regions(old, new, context=context)
def modified_regions(self, fname, context=0):
if self.file_status(fname) in ("", "A", ):
return all_regions
old = self.file_from_head(self.repo_path(fname)).split("\n")
new = self.get_file(self.repo_path(fname)).split("\n")
return modified_regions(old, new, context=context)
def head_revision(self):
if self._head_revision is not None:
return self._head_revision
try:
self._head_revision = subprocess.check_output(
[ self.git, "rev-parse", "--verify", "HEAD" ],
stderr=subprocess.PIPE).rstrip("\n")
except subprocess.CalledProcessError:
# Assume that the repo is empty and use the semi-magic
# empty tree revision if git rev-parse returned an error.
self._head_revision = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
return self._head_revision
def file_status(self, fname, cached=False):
status = self.status(files=[fname], cached=cached)
assert len(status) <= 1
if status:
return status[0][0]
else:
# No information available for the file. This usually
# means that it hasn't been added to the
# repository/commit.
return ""
def status(self, filter=None, files=[], cached=False):
cmd = [ self.git, "diff-index", "--name-status" ]
if cached:
cmd.append("--cached")
if filter:
cmd += [ "--diff-filter=%s" % filter ]
cmd += [ self.head_revision(), "--" ] + files
status = subprocess.check_output(cmd).rstrip("\n")
if status:
return [ f.split("\t") for f in status.split("\n") ]
else:
return []
def file_from_index(self, name):
return subprocess.check_output(
[ self.git, "show", ":%s" % (name, ) ])
def file_from_head(self, name):
return subprocess.check_output(
[ self.git, "show", "%s:%s" % (self.head_revision(), name) ])
class MercurialRepo(AbstractRepo):
def __init__(self):
self.hg = "hg"
self._repo_base = None
def repo_base(self):
if self._repo_base is None:
self._repo_base = subprocess.check_output(
[ self.hg, "root" ]).rstrip("\n")
return self._repo_base
def staged_files(self):
added = []
modified = []
for action, fname in self.status():
if action == "M":
modified.append(fname)
elif action == "A":
added.append(fname)
return added, modified
def staged_regions(self, fname, context=0):
return self.modified_regions(fname, context=context)
def modified_regions(self, fname, context=0):
old = self.file_from_tip(fname).split("\n")
new = self.get_file(fname).split("\n")
return modified_regions(old, new, context=context)
def status(self, filter=None):
files = subprocess.check_output([ self.hg, "status" ]).rstrip("\n")
if files:
return [ f.split(" ") for f in files.split("\n") ]
else:
return []
def file_from_tip(self, name):
return subprocess.check_output([ self.hg, "cat", name ])
def detect_repo(path="."):
"""Auto-detect the revision control system used for a source code
directory. The code starts searching for repository meta data
directories in path and then continues towards the root directory
until root is reached or a metadatadirectory has been found.
Returns: List of repository helper classes that can interface with
the detected revision control system(s).
"""
_repo_types = (
(".git", GitRepo),
(".hg", MercurialRepo),
)
repo_types = []
for repo_dir, repo_class in _repo_types:
if os.path.exists(os.path.join(path, repo_dir)):
repo_types.append(repo_class)
if repo_types:
return repo_types
else:
parent_dir = os.path.abspath(os.path.join(path, ".."))
if not os.path.samefile(parent_dir, path):
return detect_repo(path=parent_dir)
else:
# We reached the root directory without finding a meta
# data directory.
return []
| bsd-3-clause |
AntidoteLabs/Antidote-DM | Antidotes DM/youtube_dl/extractor/googlesearch.py | 168 | 1699 | from __future__ import unicode_literals
import itertools
import re
from .common import SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
)
class GoogleSearchIE(SearchInfoExtractor):
IE_DESC = 'Google Video search'
_MAX_RESULTS = 1000
IE_NAME = 'video.google:search'
_SEARCH_KEY = 'gvsearch'
_TEST = {
'url': 'gvsearch15:python language',
'info_dict': {
'id': 'python language',
'title': 'python language',
},
'playlist_count': 15,
}
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
res = {
'_type': 'playlist',
'id': query,
'title': query,
}
for pagenum in itertools.count():
result_url = (
'http://www.google.com/search?tbm=vid&q=%s&start=%s&hl=en'
% (compat_urllib_parse.quote_plus(query), pagenum * 10))
webpage = self._download_webpage(
result_url, 'gvsearch:' + query,
note='Downloading result page ' + str(pagenum + 1))
for hit_idx, mobj in enumerate(re.finditer(
r'<h3 class="r"><a href="([^"]+)"', webpage)):
# Skip playlists
if not re.search(r'id="vidthumb%d"' % (hit_idx + 1), webpage):
continue
entries.append({
'_type': 'url',
'url': mobj.group(1)
})
if (len(entries) >= n) or not re.search(r'id="pnnext"', webpage):
res['entries'] = entries[:n]
return res
| gpl-2.0 |
rupran/ansible | lib/ansible/modules/network/nxos/nxos_vxlan_vtep.py | 21 | 13085 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_vxlan_vtep
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages VXLAN Network Virtualization Endpoint (NVE).
description:
- Manages VXLAN Network Virtualization Endpoint (NVE) overlay interface
that terminates VXLAN tunnels.
author: Gabriele Gerbino (@GGabriele)
notes:
- The module is used to manage NVE properties, not to create NVE
interfaces. Use M(nxos_interface) if you wish to do so.
- C(state=absent) removes the interface.
- Default, where supported, restores params default value.
options:
interface:
description:
- Interface name for the VXLAN Network Virtualization Endpoint.
required: true
description:
description:
- Description of the NVE interface.
required: false
default: null
host_reachability:
description:
- Specify mechanism for host reachability advertisement.
required: false
choices: ['true', 'false']
default: null
shutdown:
description:
- Administratively shutdown the NVE interface.
required: false
choices: ['true','false']
default: false
source_interface:
description:
- Specify the loopback interface whose IP address should be
used for the NVE interface.
required: false
default: null
source_interface_hold_down_time:
description:
- Suppresses advertisement of the NVE loopback address until
the overlay has converged.
required: false
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_vxlan_vtep:
interface: nve1
description: default
host_reachability: default
source_interface: Loopback0
source_interface_hold_down_time: 30
shutdown: default
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"description": "simple description", "host_reachability": true,
"interface": "nve1", "shutdown": true, "source_interface": "loopback0",
"source_interface_hold_down_time": "30"}
existing:
description: k/v pairs of existing VXLAN VTEP configuration
returned: verbose mode
type: dict
sample: {}
end_state:
description: k/v pairs of VXLAN VTEP configuration after module execution
returned: verbose mode
type: dict
sample: {"description": "simple description", "host_reachability": true,
"interface": "nve1", "shutdown": true, "source_interface": "loopback0",
"source_interface_hold_down_time": "30"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["interface nve1", "source-interface loopback0",
"source-interface hold-down-time 30", "description simple description",
"shutdown", "host-reachability protocol bgp"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
BOOL_PARAMS = [
'shutdown',
'host_reachability'
]
PARAM_TO_COMMAND_KEYMAP = {
'description': 'description',
'host_reachability': 'host-reachability protocol bgp',
'interface': 'interface',
'shutdown': 'shutdown',
'source_interface': 'source-interface',
'source_interface_hold_down_time': 'source-interface hold-down-time'
}
PARAM_TO_DEFAULT_KEYMAP = {
'description': False,
'shutdown': True,
}
WARNINGS = []
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
if arg in BOOL_PARAMS:
REGEX = re.compile(r'\s+{0}\s*$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
NO_SHUT_REGEX = re.compile(r'\s+no shutdown\s*$', re.M)
value = False
if arg == 'shutdown':
try:
if NO_SHUT_REGEX.search(config):
value = False
elif REGEX.search(config):
value = True
except TypeError:
value = False
else:
try:
if REGEX.search(config):
value = True
except TypeError:
value = False
else:
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
NO_DESC_REGEX = re.compile(r'\s+{0}\s*$'.format('no description'), re.M)
SOURCE_INTF_REGEX = re.compile(r'(?:{0}\s)(?P<value>\S+)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if arg == 'description':
if NO_DESC_REGEX.search(config):
value = ''
elif PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value').strip()
elif arg == 'source_interface':
for line in config.splitlines():
try:
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = SOURCE_INTF_REGEX.search(config).group('value').strip()
break
except AttributeError:
value = ''
else:
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
value = REGEX.search(config).group('value').strip()
return value
def get_existing(module, args):
existing = {}
netcfg = get_config(module)
interface_string = 'interface {0}'.format(module.params['interface'].lower())
parents = [interface_string]
config = netcfg.get_section(parents)
if config:
for arg in args:
existing[arg] = get_value(arg, config, module)
existing['interface'] = module.params['interface'].lower()
else:
if interface_string in str(netcfg):
existing['interface'] = module.params['interface'].lower()
for arg in args:
existing[arg] = ''
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def fix_commands(commands, module):
source_interface_command = ''
no_source_interface_command = ''
for command in commands:
if 'no source-interface hold-down-time' in command:
pass
elif 'source-interface hold-down-time' in command:
pass
elif 'no source-interface' in command:
no_source_interface_command = command
elif 'source-interface' in command:
source_interface_command = command
if source_interface_command:
commands.pop(commands.index(source_interface_command))
commands.insert(0, source_interface_command)
if no_source_interface_command:
commands.pop(commands.index(no_source_interface_command))
commands.append(no_source_interface_command)
return commands
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default':
if existing_commands.get(key):
existing_value = existing_commands.get(key)
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS:
commands.append('no {0}'.format(key.lower()))
module.exit_json(commands=commands)
else:
command = '{0} {1}'.format(key, value.lower())
commands.append(command)
if commands:
commands = fix_commands(commands, module)
parents = ['interface {0}'.format(module.params['interface'].lower())]
candidate.add(commands, parents=parents)
else:
if not existing and module.params['interface']:
commands = ['interface {0}'.format(module.params['interface'].lower())]
candidate.add(commands, parents=[])
def state_absent(module, existing, proposed, candidate):
commands = ['no interface {0}'.format(module.params['interface'].lower())]
candidate.add(commands, parents=[])
def main():
argument_spec = dict(
interface=dict(required=True, type='str'),
description=dict(required=False, type='str'),
host_reachability=dict(required=False, type='bool'),
shutdown=dict(required=False, type='bool'),
source_interface=dict(required=False, type='str'),
source_interface_hold_down_time=dict(required=False, type='str'),
m_facts=dict(required=False, default=False, type='bool'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
state = module.params['state']
interface = module.params['interface'].lower()
args = [
'interface',
'description',
'host_reachability',
'shutdown',
'source_interface',
'source_interface_hold_down_time'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key != 'interface':
if str(value).lower() == 'true':
value = True
elif str(value).lower() == 'false':
value = False
elif str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key)
if value is None:
if key in BOOL_PARAMS:
value = False
else:
value = 'default'
if existing.get(key) or (not existing.get(key) and value):
proposed[key] = value
result = {}
if state == 'present' or (state == 'absent' and existing):
if not existing:
WARNINGS.append("The proposed NVE interface did not exist. "
"It's recommended to use nxos_interface to create "
"all logical interfaces.")
candidate = CustomNetworkConfig(indent=3)
invoke('state_%s' % state, module, existing, proposed, candidate)
try:
response = load_config(module, candidate)
result.update(response)
except ShellError:
exc = get_exception()
module.fail_json(msg=str(exc))
else:
result['updates'] = []
if module._verbosity > 0:
end_state = invoke('get_existing', module, args)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed_args
if WARNINGS:
result['warnings'] = WARNINGS
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
kudlav/dnf | dnf/pycomp.py | 3 | 2953 | # pycomp.py
# Python 2 and Python 3 compatibility module
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from gettext import NullTranslations
from sys import version_info
import base64
import itertools
import locale
import types
PY3 = version_info.major >= 3
if PY3:
from io import StringIO
import queue
import urllib.parse
# functions renamed in py3
Queue = queue.Queue
basestring = unicode = str
filterfalse = itertools.filterfalse
long = int
NullTranslations.ugettext = NullTranslations.gettext
NullTranslations.ungettext = NullTranslations.ngettext
xrange = range
raw_input = input
base64_decodebytes = base64.decodebytes
urlparse = urllib.parse
urllib_quote = urlparse.quote
# string helpers
def is_py2str_py3bytes(o):
return isinstance(o, bytes)
def is_py3bytes(o):
return isinstance(o, bytes)
# functions that don't take unicode arguments in py2
ModuleType = lambda m: types.ModuleType(m)
format = locale.format
def setlocale(category, loc=None):
locale.setlocale(category, loc)
def write_to_file(f, content):
f.write(content)
else:
# functions renamed in py3
from __builtin__ import unicode, basestring, long, xrange, raw_input
from StringIO import StringIO
import Queue
import urllib
import urlparse
Queue = Queue.Queue
filterfalse = itertools.ifilterfalse
base64_decodebytes = base64.decodestring
urllib_quote = urllib.quote
# string helpers
def is_py2str_py3bytes(o):
return isinstance(o, str)
def is_py3bytes(o):
return False
# functions that don't take unicode arguments in py2
ModuleType = lambda m: types.ModuleType(m.encode('utf-8'))
def format(percent, *args, **kwargs):
return locale.format(percent.encode('utf-8'), *args, **kwargs)
def setlocale(category, loc=None):
locale.setlocale(category, loc.encode('utf-8'))
def write_to_file(f, content):
f.write(content.encode('utf-8'))
| gpl-2.0 |
40423127/2017springcd_hw | plugin/liquid_tags/video.py | 296 | 2360 | """
Video Tag
---------
This implements a Liquid-style video tag for Pelican,
based on the octopress video tag [1]_
Syntax
------
{% video url/to/video [width height] [url/to/poster] %}
Example
-------
{% video http://site.com/video.mp4 720 480 http://site.com/poster-frame.jpg %}
Output
------
<video width='720' height='480' preload='none' controls poster='http://site.com/poster-frame.jpg'>
<source src='http://site.com/video.mp4' type='video/mp4; codecs=\"avc1.42E01E, mp4a.40.2\"'/>
</video>
[1] https://github.com/imathis/octopress/blob/master/plugins/video_tag.rb
"""
import os
import re
from .mdx_liquid_tags import LiquidTags
SYNTAX = "{% video url/to/video [url/to/video] [url/to/video] [width height] [url/to/poster] %}"
VIDEO = re.compile(r'(/\S+|https?:\S+)(\s+(/\S+|https?:\S+))?(\s+(/\S+|https?:\S+))?(\s+(\d+)\s(\d+))?(\s+(/\S+|https?:\S+))?')
VID_TYPEDICT = {'.mp4':"type='video/mp4; codecs=\"avc1.42E01E, mp4a.40.2\"'",
'.ogv':"type='video/ogg; codecs=theora, vorbis'",
'.webm':"type='video/webm; codecs=vp8, vorbis'"}
@LiquidTags.register('video')
def video(preprocessor, tag, markup):
videos = []
width = None
height = None
poster = None
match = VIDEO.search(markup)
if match:
groups = match.groups()
videos = [g for g in groups[0:6:2] if g]
width = groups[6]
height = groups[7]
poster = groups[9]
if any(videos):
video_out = """
<div class="videobox">
<video width="{width}" height="{height}" preload="none" controls poster="{poster}">
""".format(width=width, height=height, poster=poster).strip()
for vid in videos:
base, ext = os.path.splitext(vid)
if ext not in VID_TYPEDICT:
raise ValueError("Unrecognized video extension: "
"{0}".format(ext))
video_out += ("<source src='{0}' "
"{1}>".format(vid, VID_TYPEDICT[ext]))
video_out += "</video></div>"
else:
raise ValueError("Error processing input, "
"expected syntax: {0}".format(SYNTAX))
return video_out
#----------------------------------------------------------------------
# This import allows image tag to be a Pelican plugin
from liquid_tags import register
| agpl-3.0 |
ClovisIRex/Snake-django | env/lib/python3.6/site-packages/pylint/test/regrtest_data/huge.py | 9 | 100798 | a,b=2
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
a,b=2,3
| mit |
abdullah2891/remo | vendor-local/lib/python/unidecode/x060.py | 250 | 4642 | data = (
'Huai ', # 0x00
'Tai ', # 0x01
'Song ', # 0x02
'Wu ', # 0x03
'Ou ', # 0x04
'Chang ', # 0x05
'Chuang ', # 0x06
'Ju ', # 0x07
'Yi ', # 0x08
'Bao ', # 0x09
'Chao ', # 0x0a
'Min ', # 0x0b
'Pei ', # 0x0c
'Zuo ', # 0x0d
'Zen ', # 0x0e
'Yang ', # 0x0f
'Kou ', # 0x10
'Ban ', # 0x11
'Nu ', # 0x12
'Nao ', # 0x13
'Zheng ', # 0x14
'Pa ', # 0x15
'Bu ', # 0x16
'Tie ', # 0x17
'Gu ', # 0x18
'Hu ', # 0x19
'Ju ', # 0x1a
'Da ', # 0x1b
'Lian ', # 0x1c
'Si ', # 0x1d
'Chou ', # 0x1e
'Di ', # 0x1f
'Dai ', # 0x20
'Yi ', # 0x21
'Tu ', # 0x22
'You ', # 0x23
'Fu ', # 0x24
'Ji ', # 0x25
'Peng ', # 0x26
'Xing ', # 0x27
'Yuan ', # 0x28
'Ni ', # 0x29
'Guai ', # 0x2a
'Fu ', # 0x2b
'Xi ', # 0x2c
'Bi ', # 0x2d
'You ', # 0x2e
'Qie ', # 0x2f
'Xuan ', # 0x30
'Cong ', # 0x31
'Bing ', # 0x32
'Huang ', # 0x33
'Xu ', # 0x34
'Chu ', # 0x35
'Pi ', # 0x36
'Xi ', # 0x37
'Xi ', # 0x38
'Tan ', # 0x39
'Koraeru ', # 0x3a
'Zong ', # 0x3b
'Dui ', # 0x3c
'[?] ', # 0x3d
'Ki ', # 0x3e
'Yi ', # 0x3f
'Chi ', # 0x40
'Ren ', # 0x41
'Xun ', # 0x42
'Shi ', # 0x43
'Xi ', # 0x44
'Lao ', # 0x45
'Heng ', # 0x46
'Kuang ', # 0x47
'Mu ', # 0x48
'Zhi ', # 0x49
'Xie ', # 0x4a
'Lian ', # 0x4b
'Tiao ', # 0x4c
'Huang ', # 0x4d
'Die ', # 0x4e
'Hao ', # 0x4f
'Kong ', # 0x50
'Gui ', # 0x51
'Heng ', # 0x52
'Xi ', # 0x53
'Xiao ', # 0x54
'Shu ', # 0x55
'S ', # 0x56
'Kua ', # 0x57
'Qiu ', # 0x58
'Yang ', # 0x59
'Hui ', # 0x5a
'Hui ', # 0x5b
'Chi ', # 0x5c
'Jia ', # 0x5d
'Yi ', # 0x5e
'Xiong ', # 0x5f
'Guai ', # 0x60
'Lin ', # 0x61
'Hui ', # 0x62
'Zi ', # 0x63
'Xu ', # 0x64
'Chi ', # 0x65
'Xiang ', # 0x66
'Nu ', # 0x67
'Hen ', # 0x68
'En ', # 0x69
'Ke ', # 0x6a
'Tong ', # 0x6b
'Tian ', # 0x6c
'Gong ', # 0x6d
'Quan ', # 0x6e
'Xi ', # 0x6f
'Qia ', # 0x70
'Yue ', # 0x71
'Peng ', # 0x72
'Ken ', # 0x73
'De ', # 0x74
'Hui ', # 0x75
'E ', # 0x76
'Kyuu ', # 0x77
'Tong ', # 0x78
'Yan ', # 0x79
'Kai ', # 0x7a
'Ce ', # 0x7b
'Nao ', # 0x7c
'Yun ', # 0x7d
'Mang ', # 0x7e
'Yong ', # 0x7f
'Yong ', # 0x80
'Yuan ', # 0x81
'Pi ', # 0x82
'Kun ', # 0x83
'Qiao ', # 0x84
'Yue ', # 0x85
'Yu ', # 0x86
'Yu ', # 0x87
'Jie ', # 0x88
'Xi ', # 0x89
'Zhe ', # 0x8a
'Lin ', # 0x8b
'Ti ', # 0x8c
'Han ', # 0x8d
'Hao ', # 0x8e
'Qie ', # 0x8f
'Ti ', # 0x90
'Bu ', # 0x91
'Yi ', # 0x92
'Qian ', # 0x93
'Hui ', # 0x94
'Xi ', # 0x95
'Bei ', # 0x96
'Man ', # 0x97
'Yi ', # 0x98
'Heng ', # 0x99
'Song ', # 0x9a
'Quan ', # 0x9b
'Cheng ', # 0x9c
'Hui ', # 0x9d
'Wu ', # 0x9e
'Wu ', # 0x9f
'You ', # 0xa0
'Li ', # 0xa1
'Liang ', # 0xa2
'Huan ', # 0xa3
'Cong ', # 0xa4
'Yi ', # 0xa5
'Yue ', # 0xa6
'Li ', # 0xa7
'Nin ', # 0xa8
'Nao ', # 0xa9
'E ', # 0xaa
'Que ', # 0xab
'Xuan ', # 0xac
'Qian ', # 0xad
'Wu ', # 0xae
'Min ', # 0xaf
'Cong ', # 0xb0
'Fei ', # 0xb1
'Bei ', # 0xb2
'Duo ', # 0xb3
'Cui ', # 0xb4
'Chang ', # 0xb5
'Men ', # 0xb6
'Li ', # 0xb7
'Ji ', # 0xb8
'Guan ', # 0xb9
'Guan ', # 0xba
'Xing ', # 0xbb
'Dao ', # 0xbc
'Qi ', # 0xbd
'Kong ', # 0xbe
'Tian ', # 0xbf
'Lun ', # 0xc0
'Xi ', # 0xc1
'Kan ', # 0xc2
'Kun ', # 0xc3
'Ni ', # 0xc4
'Qing ', # 0xc5
'Chou ', # 0xc6
'Dun ', # 0xc7
'Guo ', # 0xc8
'Chan ', # 0xc9
'Liang ', # 0xca
'Wan ', # 0xcb
'Yuan ', # 0xcc
'Jin ', # 0xcd
'Ji ', # 0xce
'Lin ', # 0xcf
'Yu ', # 0xd0
'Huo ', # 0xd1
'He ', # 0xd2
'Quan ', # 0xd3
'Tan ', # 0xd4
'Ti ', # 0xd5
'Ti ', # 0xd6
'Nie ', # 0xd7
'Wang ', # 0xd8
'Chuo ', # 0xd9
'Bu ', # 0xda
'Hun ', # 0xdb
'Xi ', # 0xdc
'Tang ', # 0xdd
'Xin ', # 0xde
'Wei ', # 0xdf
'Hui ', # 0xe0
'E ', # 0xe1
'Rui ', # 0xe2
'Zong ', # 0xe3
'Jian ', # 0xe4
'Yong ', # 0xe5
'Dian ', # 0xe6
'Ju ', # 0xe7
'Can ', # 0xe8
'Cheng ', # 0xe9
'De ', # 0xea
'Bei ', # 0xeb
'Qie ', # 0xec
'Can ', # 0xed
'Dan ', # 0xee
'Guan ', # 0xef
'Duo ', # 0xf0
'Nao ', # 0xf1
'Yun ', # 0xf2
'Xiang ', # 0xf3
'Zhui ', # 0xf4
'Die ', # 0xf5
'Huang ', # 0xf6
'Chun ', # 0xf7
'Qiong ', # 0xf8
'Re ', # 0xf9
'Xing ', # 0xfa
'Ce ', # 0xfb
'Bian ', # 0xfc
'Hun ', # 0xfd
'Zong ', # 0xfe
'Ti ', # 0xff
)
| bsd-3-clause |
samskeller/zeroclickinfo-fathead | lib/fathead/UNCLEAN/parse_sqlserverfunction.py | 1 | 1576 | from BeautifulSoup import BeautifulSoup
import re
import os
import sys
import string
openclosetags = re.compile('''<.*?>|</.*?>''',re.DOTALL)
spaces = re.compile('''\s+''',re.DOTALL)
files = []
for file in os.listdir('./docs/sqlserver/functions/'):
if '.html' in file:
files.append('./docs/sqlserver/functions/%s'%(file))
for file in files:
filecontents = ''
for line in open(file):
line = ''.join(filter(lambda x:x in string.printable, line))
filecontents = "%s %s"%(filecontents,line.strip())
soup = BeautifulSoup(filecontents)
name = soup.findAll('h1')[0].string.replace('(Transact-SQL)','')
desc = openclosetags.sub('',str(soup.findAll(attrs={"class" : "introduction"})[0].findAll('p')[0]))
synopsis = soup.findAll(attrs={"class":"LW_CodeSnippetContainerCodeCollection"})[0].findAll('pre')[0].string.strip()
url = "http://msdn.microsoft.com/en-us/library/%s"%(file.replace('./docs/sqlserver/functions/library','').replace('.html',''))
url = url.replace('./docs/sqlserver/functions/','')
if len(sys.argv) == 1 or sys.argv[1].lower() == 'tsv':
print "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s"%(name,'',url,desc,synopsis,'','sqlserverfunction','en')
if sys.argv[1].lower() == 'sql':
print '''INSERT INTO functions (`id`, `name`, `namespace`, `url`, `description`, `synopsis`, `detail`, `type`, `lang`) VALUES (NULL, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');'''%(name,'',url,desc.replace("'","\\'"),synopsis.replace("'","\\'"),'sql server sqlserver sqlserver2008 2008','sqlserverfunction','en')
| apache-2.0 |
rosmo/ansible | lib/ansible/modules/cloud/azure/azure_rm_postgresqlfirewallrule_facts.py | 17 | 5890 | #!/usr/bin/python
#
# Copyright (c) 2018 Zim Kalinowski, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_postgresqlfirewallrule_facts
version_added: "2.8"
short_description: Get Azure PostgreSQL Firewall Rule facts.
description:
- Get facts of Azure PostgreSQL Firewall Rule.
options:
resource_group:
description:
- The name of the resource group.
required: True
server_name:
description:
- The name of the server.
required: True
name:
description:
- The name of the server firewall rule.
extends_documentation_fragment:
- azure
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Get instance of PostgreSQL Firewall Rule
azure_rm_postgresqlfirewallrule_facts:
resource_group: myResourceGroup
server_name: server_name
name: firewall_rule_name
- name: List instances of PostgreSQL Firewall Rule
azure_rm_postgresqlfirewallrule_facts:
resource_group: myResourceGroup
server_name: server_name
'''
RETURN = '''
rules:
description: A list of dictionaries containing facts for PostgreSQL Firewall Rule.
returned: always
type: complex
contains:
id:
description:
- Resource ID
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/TestGroup/providers/Microsoft.DBforPostgreSQL/servers/testserver/fire
wallRules/rule1"
server_name:
description:
- The name of the server.
returned: always
type: str
sample: testserver
name:
description:
- Resource name.
returned: always
type: str
sample: rule1
start_ip_address:
description:
- The start IP address of the PostgreSQL firewall rule.
returned: always
type: str
sample: 10.0.0.16
end_ip_address:
description:
- The end IP address of the PostgreSQL firewall rule.
returned: always
type: str
sample: 10.0.0.18
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.rdbms.postgresql import PostgreSQLManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMPostgreSQLFirewallRulesFacts(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.mgmt_client = None
self.resource_group = None
self.server_name = None
self.name = None
super(AzureRMPostgreSQLFirewallRulesFacts, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(PostgreSQLManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if (self.name is not None):
self.results['rules'] = self.get()
else:
self.results['rules'] = self.list_by_server()
return self.results
def get(self):
response = None
results = []
try:
response = self.mgmt_client.firewall_rules.get(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
results.append(self.format_item(response))
return results
def list_by_server(self):
response = None
results = []
try:
response = self.mgmt_client.firewall_rules.list_by_server(resource_group_name=self.resource_group,
server_name=self.server_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for FirewallRules.')
if response is not None:
for item in response:
results.append(self.format_item(item))
return results
def format_item(self, item):
d = item.as_dict()
d = {
'resource_group': self.resource_group,
'id': d['id'],
'server_name': self.server_name,
'name': d['name'],
'start_ip_address': d['start_ip_address'],
'end_ip_address': d['end_ip_address']
}
return d
def main():
AzureRMPostgreSQLFirewallRulesFacts()
if __name__ == '__main__':
main()
| gpl-3.0 |
asajeffrey/servo | tests/wpt/web-platform-tests/tools/manifest/download.py | 4 | 6428 | from __future__ import absolute_import
import argparse
import bz2
import gzip
import json
import io
import os
from datetime import datetime, timedelta
from six.moves.urllib.request import urlopen
try:
import zstandard
except ImportError:
zstandard = None
from .utils import git
from . import log
MYPY = False
if MYPY:
# MYPY is set to True when run under Mypy.
from typing import Any
from typing import Callable
from typing import List
from typing import Optional
from typing import Text
here = os.path.dirname(__file__)
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
logger = log.get_logger()
def abs_path(path):
# type: (Text) -> Text
return os.path.abspath(os.path.expanduser(path))
def should_download(manifest_path, rebuild_time=timedelta(days=5)):
# type: (Text, timedelta) -> bool
if not os.path.exists(manifest_path):
return True
mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path))
if mtime < datetime.now() - rebuild_time:
return True
logger.info("Skipping manifest download because existing file is recent")
return False
def merge_pr_tags(repo_root, max_count=50):
# type: (Text, int) -> List[Text]
gitfunc = git(repo_root)
tags = [] # type: List[Text]
if gitfunc is None:
return tags
for line in gitfunc("log", "--format=%D", "--max-count=%s" % max_count).split("\n"):
for ref in line.split(", "):
if ref.startswith("tag: merge_pr_"):
tags.append(ref[5:])
return tags
def score_name(name):
# type: (Text) -> Optional[int]
"""Score how much we like each filename, lower wins, None rejects"""
# Accept both ways of naming the manifest asset, even though
# there's no longer a reason to include the commit sha.
if name.startswith("MANIFEST-") or name.startswith("MANIFEST."):
if zstandard and name.endswith("json.zst"):
return 1
if name.endswith(".json.bz2"):
return 2
if name.endswith(".json.gz"):
return 3
return None
def github_url(tags):
# type: (List[Text]) -> Optional[List[Text]]
for tag in tags:
url = "https://api.github.com/repos/web-platform-tests/wpt/releases/tags/%s" % tag
try:
resp = urlopen(url)
except Exception:
logger.warning("Fetching %s failed" % url)
continue
if resp.code != 200:
logger.warning("Fetching %s failed; got HTTP status %d" % (url, resp.code))
continue
try:
release = json.load(resp.fp)
except ValueError:
logger.warning("Response was not valid JSON")
return None
candidates = []
for item in release["assets"]:
score = score_name(item["name"])
if score is not None:
candidates.append((score, item["browser_download_url"]))
return [item[1] for item in sorted(candidates)]
return None
def download_manifest(
manifest_path, # type: Text
tags_func, # type: Callable[[], List[Text]]
url_func, # type: Callable[[List[Text]], Optional[List[Text]]]
force=False # type: bool
):
# type: (...) -> bool
if not force and not should_download(manifest_path):
return False
tags = tags_func()
urls = url_func(tags)
if not urls:
logger.warning("No generated manifest found")
return False
for url in urls:
logger.info("Downloading manifest from %s" % url)
try:
resp = urlopen(url)
except Exception:
logger.warning("Downloading pregenerated manifest failed")
continue
if resp.code != 200:
logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" %
resp.code)
continue
if url.endswith(".zst"):
if not zstandard:
continue
try:
dctx = zstandard.ZstdDecompressor()
decompressed = dctx.decompress(resp.read())
except IOError:
logger.warning("Failed to decompress downloaded file")
continue
elif url.endswith(".bz2"):
try:
decompressed = bz2.decompress(resp.read())
except IOError:
logger.warning("Failed to decompress downloaded file")
continue
elif url.endswith(".gz"):
fileobj = io.BytesIO(resp.read())
try:
with gzip.GzipFile(fileobj=fileobj) as gzf:
data = read_gzf(gzf) # type: ignore
decompressed = data
except IOError:
logger.warning("Failed to decompress downloaded file")
continue
else:
logger.warning("Unknown file extension: %s" % url)
continue
break
else:
return False
try:
with open(manifest_path, "wb") as f:
f.write(decompressed)
except Exception:
logger.warning("Failed to write manifest")
return False
logger.info("Manifest downloaded")
return True
def read_gzf(gzf): # type: ignore
# This is working around a mypy problem in Python 2:
# "Call to untyped function "read" in typed context"
return gzf.read()
def create_parser():
# type: () -> argparse.ArgumentParser
parser = argparse.ArgumentParser()
parser.add_argument(
"-p", "--path", type=abs_path, help="Path to manifest file.")
parser.add_argument(
"--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
parser.add_argument(
"--force", action="store_true",
help="Always download, even if the existing manifest is recent")
return parser
def download_from_github(path, tests_root, force=False):
# type: (Text, Text, bool) -> bool
return download_manifest(path, lambda: merge_pr_tags(tests_root), github_url,
force=force)
def run(**kwargs):
# type: (**Any) -> int
if kwargs["path"] is None:
path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
else:
path = kwargs["path"]
success = download_from_github(path, kwargs["tests_root"], kwargs["force"])
return 0 if success else 1
| mpl-2.0 |
bbenko/shinkicker | django/utils/importlib.py | 445 | 1229 | # Taken from Python 2.7 with permission from/by the original author.
import sys
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in xrange(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
| bsd-3-clause |
radicalbit/ambari | ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py | 3 | 10876 | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import hashlib
import os, sys
import zipfile
import glob
import pprint
class KeeperException(Exception):
pass
class ResourceFilesKeeper():
"""
This class encapsulates all utility methods for resource files maintenance.
"""
HOOKS_DIR="hooks"
PACKAGE_DIR="package"
STACKS_DIR="stacks"
COMMON_SERVICES_DIR="common-services"
CUSTOM_ACTIONS_DIR="custom_actions"
HOST_SCRIPTS_DIR="host_scripts"
DASHBOARDS_DIR="dashboards"
EXTENSIONS_DIR="extensions"
# For these directories archives are created
ARCHIVABLE_DIRS = [HOOKS_DIR, PACKAGE_DIR]
HASH_SUM_FILE=".hash"
ARCHIVE_NAME="archive.zip"
PYC_EXT=".pyc"
METAINFO_XML = "metainfo.xml"
BUFFER = 1024 * 32
# Change that to True to see debug output at stderr
DEBUG=False
def __init__(self, resources_dir, stacks_dir, verbose=False, nozip=False):
"""
nozip = create only hash files and skip creating zip archives
"""
self.resources_dir = resources_dir
self.stacks_root = stacks_dir
self.verbose = verbose
self.nozip = nozip
def perform_housekeeping(self):
"""
Performs housekeeping operations on resource files
"""
self.update_directory_archives()
# probably, later we will need some additional operations
def _iter_update_directory_archive(self, subdirs_list):
for subdir in subdirs_list:
for root, dirs, _ in os.walk(subdir, followlinks=True):
for d in dirs:
if d in self.ARCHIVABLE_DIRS:
full_path = os.path.abspath(os.path.join(root, d))
self.update_directory_archive(full_path)
def _update_resources_subdir_archive(self, subdir):
archive_root = os.path.join(self.resources_dir, subdir)
self.dbg_out("Updating archive for {0} dir at {1}...".format(subdir, archive_root))
# update the directories so that the .hash is generated
self.update_directory_archive(archive_root)
def update_directory_archives(self):
"""
Please see AMBARI-4481 for more details
"""
# archive stacks
self.dbg_out("Updating archives for stack dirs at {0}...".format(self.stacks_root))
valid_stacks = self.list_stacks(self.stacks_root)
self.dbg_out("Stacks: {0}".format(pprint.pformat(valid_stacks)))
# Iterate over stack directories
self._iter_update_directory_archive(valid_stacks)
# archive common services
common_services_root = os.path.join(self.resources_dir, self.COMMON_SERVICES_DIR)
self.dbg_out("Updating archives for common services dirs at {0}...".format(common_services_root))
valid_common_services = self.list_common_services(common_services_root)
self.dbg_out("Common Services: {0}".format(pprint.pformat(valid_common_services)))
# Iterate over common services directories
self._iter_update_directory_archive(valid_common_services)
# archive extensions
extensions_root = os.path.join(self.resources_dir, self.EXTENSIONS_DIR)
self.dbg_out("Updating archives for extensions dirs at {0}...".format(extensions_root))
valid_extensions = self.list_extensions(extensions_root)
self.dbg_out("Extensions: {0}".format(pprint.pformat(valid_extensions)))
# Iterate over extension directories
self._iter_update_directory_archive(valid_extensions)
# custom actions
self._update_resources_subdir_archive(self.CUSTOM_ACTIONS_DIR)
# agent host scripts
self._update_resources_subdir_archive(self.HOST_SCRIPTS_DIR)
# custom service dashboards
self._update_resources_subdir_archive(self.DASHBOARDS_DIR)
def _list_metainfo_dirs(self, root_dir):
valid_items = [] # Format: <stack_dir, ignore(True|False)>
glob_pattern = "{0}/*/*".format(root_dir)
dirs = glob.glob(glob_pattern)
for directory in dirs:
metainfo_file = os.path.join(directory, self.METAINFO_XML)
if os.path.exists(metainfo_file):
valid_items.append(directory)
return valid_items
def list_stacks(self, root_dir):
"""
Builds a list of stack directories
"""
try:
return self._list_metainfo_dirs(root_dir)
except Exception, err:
raise KeeperException("Can not list stacks: {0}".format(str(err)))
def list_common_services(self, root_dir):
"""
Builds a list of common services directories
"""
try:
return self._list_metainfo_dirs(root_dir)
except Exception, err:
raise KeeperException("Can not list common services: {0}".format(str(err)))
def list_extensions(self, root_dir):
"""
Builds a list of extension directories
"""
try:
return self._list_metainfo_dirs(root_dir)
except Exception, err:
raise KeeperException("Can not list extensions: {0}".format(str(err)))
def update_directory_archive(self, directory):
"""
If hash sum for directory is not present or differs from saved value,
recalculates hash sum and creates directory archive. The archive is
also created if the existing archive does not exist, even if the
saved and current hash sums are matching.
"""
skip_empty_directory = True
cur_hash = self.count_hash_sum(directory)
saved_hash = self.read_hash_sum(directory)
directory_archive_name = os.path.join(directory, self.ARCHIVE_NAME)
if cur_hash != saved_hash:
if not self.nozip:
self.zip_directory(directory, skip_empty_directory)
# Skip generation of .hash file is directory is empty
if (skip_empty_directory and (not os.path.exists(directory) or not os.listdir(directory))):
self.dbg_out("Empty directory. Skipping generation of hash file for {0}".format(directory))
else:
self.write_hash_sum(directory, cur_hash)
pass
elif not os.path.isfile(directory_archive_name):
self.zip_directory(directory, skip_empty_directory)
def count_hash_sum(self, directory):
"""
Recursively counts hash sum of all files in directory and subdirectories.
Files and directories are processed in alphabetical order.
Ignores previously created directory archives and files containing
previously calculated hashes. Compiled pyc files are also ignored
"""
try:
sha1 = hashlib.sha1()
file_list = []
for root, dirs, files in os.walk(directory):
for f in files:
if not self.is_ignored(f):
full_path = os.path.abspath(os.path.join(root, f))
file_list.append(full_path)
file_list.sort()
for path in file_list:
self.dbg_out("Counting hash of {0}".format(path))
with open(path, 'rb') as fh:
while True:
data = fh.read(self.BUFFER)
if not data:
break
sha1.update(data)
return sha1.hexdigest()
except Exception, err:
raise KeeperException("Can not calculate directory "
"hash: {0}".format(str(err)))
def read_hash_sum(self, directory):
"""
Tries to read a hash sum from previously generated file. Returns string
containing hash or None
"""
hash_file = os.path.join(directory, self.HASH_SUM_FILE)
if os.path.isfile(hash_file):
try:
with open(hash_file) as fh:
return fh.readline().strip()
except Exception, err:
raise KeeperException("Can not read file {0} : {1}".format(hash_file,
str(err)))
else:
return None
def write_hash_sum(self, directory, new_hash):
"""
Tries to read a hash sum from previously generated file. Returns string
containing hash or None
"""
hash_file = os.path.join(directory, self.HASH_SUM_FILE)
try:
with open(hash_file, "w") as fh:
fh.write(new_hash)
os.chmod(hash_file, 0o755)
except Exception, err:
raise KeeperException("Can not write to file {0} : {1}".format(hash_file,
str(err)))
def zip_directory(self, directory, skip_if_empty = False):
"""
Packs entire directory into zip file. Hash file is also packaged
into archive
"""
self.dbg_out("creating archive for directory {0}".format(directory))
try:
if skip_if_empty:
if not os.path.exists(directory) or not os.listdir(directory):
self.dbg_out("Empty directory. Skipping archive creation for {0}".format(directory))
return
zip_file_path = os.path.join(directory, self.ARCHIVE_NAME)
zf = zipfile.ZipFile(zip_file_path, "w")
abs_src = os.path.abspath(directory)
for root, dirs, files in os.walk(directory):
for filename in files:
# Avoid zipping previous archive and hash file and binary pyc files
if not self.is_ignored(filename):
absname = os.path.abspath(os.path.join(root, filename))
arcname = absname[len(abs_src) + 1:]
self.dbg_out('zipping %s as %s' % (os.path.join(root, filename),
arcname))
zf.write(absname, arcname)
zf.close()
os.chmod(zip_file_path, 0o755)
except Exception, err:
raise KeeperException("Can not create zip archive of "
"directory {0} : {1}".format(directory, str(err)))
def is_ignored(self, filename):
"""
returns True if filename is ignored when calculating hashing or archiving
"""
return filename in [self.HASH_SUM_FILE, self.ARCHIVE_NAME] or \
filename.endswith(self.PYC_EXT)
def dbg_out(self, text):
if self.DEBUG:
sys.stderr.write("{0}\n".format(text))
if not self.DEBUG and self.verbose:
print text
def main(argv=None):
"""
This method is called by maven during rpm creation.
Params:
1: Path to resources root directory
"""
res_path = argv[1]
if len(argv) >= 3:
stacks_path = argv[2]
else:
stacks_path = os.path.join(res_path, ResourceFilesKeeper.STACKS_DIR)
resource_files_keeper = ResourceFilesKeeper(res_path, stacks_path, nozip=True)
resource_files_keeper.perform_housekeeping()
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 |
oarriaga/single_shot_multibox_detector | src/models/SSD300.py | 1 | 4616 | from keras.layers import Conv2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import ZeroPadding2D
from keras.layers import GlobalAveragePooling2D
from keras.layers import Activation
from keras.models import Model
from .layers import Conv2DNormalization
from .ssd_utils import add_ssd_modules
def SSD300(input_shape=(300, 300, 3), num_classes=21,
num_priors=[4, 6, 6, 6, 4, 4], weights_path=None,
return_base=False):
image = Input(shape=input_shape)
# Block 1 -----------------------------------------------------------------
conv1_1 = Conv2D(64, (3, 3), padding='same', activation='relu')(image)
conv1_2 = Conv2D(64, (3, 3), padding='same', activation='relu')(conv1_1)
pool1 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
padding='same', )(conv1_2)
# Block 2 -----------------------------------------------------------------
conv2_1 = Conv2D(128, (3, 3), padding='same', activation='relu')(pool1)
conv2_2 = Conv2D(128, (3, 3), padding='same', activation='relu')(conv2_1)
pool2 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
padding='same')(conv2_2)
# Block 3 -----------------------------------------------------------------
conv3_1 = Conv2D(256, (3, 3), padding='same', activation='relu')(pool2)
conv3_2 = Conv2D(256, (3, 3), padding='same', activation='relu')(conv3_1)
conv3_3 = Conv2D(256, (3, 3), padding='same', activation='relu')(conv3_2)
pool3 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
padding='same')(conv3_3)
# Block 4 -----------------------------------------------------------------
conv4_1 = Conv2D(512, (3, 3), padding='same', activation='relu')(pool3)
conv4_2 = Conv2D(512, (3, 3), padding='same', activation='relu')(conv4_1)
conv4_3 = Conv2D(512, (3, 3), padding='same', activation='relu')(conv4_2)
conv4_3_norm = Conv2DNormalization(20, name='branch_1')(conv4_3)
pool4 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
padding='same')(conv4_3)
# Block 5 -----------------------------------------------------------------
conv5_1 = Conv2D(512, (3, 3), padding='same', activation='relu')(pool4)
conv5_2 = Conv2D(512, (3, 3), padding='same', activation='relu')(conv5_1)
conv5_3 = Conv2D(512, (3, 3), padding='same', activation='relu')(conv5_2)
pool5 = MaxPooling2D(pool_size=(3, 3), strides=(1, 1),
padding='same')(conv5_3)
# Dense 6/7 ------------------------------------------
pool5z = ZeroPadding2D(padding=(6, 6))(pool5)
fc6 = Conv2D(1024, (3, 3), dilation_rate=(6, 6), padding='valid',
activation='relu')(pool5z)
fc7 = Conv2D(1024, (1, 1), padding='same', activation='relu',
name='branch_2')(fc6)
# EXTRA layers in SSD -----------------------------------------------------
# Block 6 -----------------------------------------------------------------
conv6_1 = Conv2D(256, (1, 1), padding='same', activation='relu')(fc7)
conv6_1z = ZeroPadding2D()(conv6_1)
conv6_2 = Conv2D(512, (3, 3), strides=(2, 2), padding='valid',
activation='relu', name='branch_3')(conv6_1z)
# Block 7 -----------------------------------------------------------------
conv7_1 = Conv2D(128, (1, 1), padding='same', activation='relu')(conv6_2)
conv7_1z = ZeroPadding2D()(conv7_1)
conv7_2 = Conv2D(256, (3, 3), padding='valid', strides=(2, 2),
activation='relu', name='branch_4')(conv7_1z)
# Block 8 -----------------------------------------------------------------
conv8_1 = Conv2D(128, (1, 1), padding='same', activation='relu')(conv7_2)
conv8_2 = Conv2D(256, (3, 3), padding='valid', strides=(1, 1),
activation='relu', name='branch_5')(conv8_1)
# Block 9 -----------------------------------------------------------------
conv9_1 = Conv2D(128, (1, 1), padding='same', activation='relu')(conv8_2)
conv9_2 = Conv2D(256, (3, 3), padding='valid', strides=(1, 1),
activation='relu', name='branch_6')(conv9_1)
if return_base:
output_tensor = fc7
else:
ssd_tenors = [conv4_3_norm, fc7, conv6_2, conv7_2, conv8_2, conv9_2]
output_tensor = add_ssd_modules(
ssd_tenors, num_classes, num_priors, with_batch_norm=False)
model = Model(inputs=image, outputs=output_tensor)
if weights_path is not None:
model.load_weights(weights_path, by_name=True)
return model
| mit |
frankyao47/open-hackathon | open-hackathon-server/src/hackathon/azureformation/resourceBase.py | 5 | 1611 | # -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Yifu Huang'
import sys
sys.path.append("..")
from hackathon.azureformation.service import (
Service,
)
from hackathon.azureformation.subscription import (
Subscription,
)
from hackathon import Component
class ResourceBase(Component):
def __init__(self, azure_key_id):
self.azure_key_id = azure_key_id
self.service = Service(self.azure_key_id)
self.subscription = Subscription(self.service) | mit |
diagramsoftware/odoo | addons/crm/res_partner.py | 24 | 5161 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
def _opportunity_meeting_phonecall_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict(map(lambda x: (x,{'opportunity_count': 0, 'meeting_count': 0}), ids))
# the user may not have access rights for opportunities or meetings
try:
for partner in self.browse(cr, uid, ids, context):
if partner.is_company:
operator = 'child_of'
else:
operator = '='
opp_ids = self.pool['crm.lead'].search(cr, uid, [('partner_id', operator, partner.id), ('type', '=', 'opportunity'), ('probability', '<', '100')], context=context)
res[partner.id] = {
'opportunity_count': len(opp_ids),
'meeting_count': len(partner.meeting_ids),
}
except:
pass
return res
def _phonecall_count(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for partner in self.browse(cr, uid, ids, context):
res[partner.id] = len(partner.phonecall_ids)
return res
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
'opportunity_ids': fields.one2many('crm.lead', 'partner_id',\
'Leads and Opportunities', domain=[('probability', 'not in', ['0', '100'])]),
'meeting_ids': fields.many2many('calendar.event', 'calendar_event_res_partner_rel','res_partner_id', 'calendar_event_id',
'Meetings', copy=False),
'phonecall_ids': fields.one2many('crm.phonecall', 'partner_id',\
'Phonecalls'),
'opportunity_count': fields.function(_opportunity_meeting_phonecall_count, string="Opportunity", type='integer', multi='opp_meet'),
'meeting_count': fields.function(_opportunity_meeting_phonecall_count, string="# Meetings", type='integer', multi='opp_meet'),
'phonecall_count': fields.function(_phonecall_count, string="Phonecalls", type="integer"),
}
def redirect_partner_form(self, cr, uid, partner_id, context=None):
search_view = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'view_res_partner_filter')
value = {
'domain': "[]",
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'res.partner',
'res_id': int(partner_id),
'view_id': False,
'context': context,
'type': 'ir.actions.act_window',
'search_view_id': search_view and search_view[1] or False
}
return value
def make_opportunity(self, cr, uid, ids, opportunity_summary, planned_revenue=0.0, probability=0.0, partner_id=None, context=None):
categ_obj = self.pool.get('crm.case.categ')
categ_ids = categ_obj.search(cr, uid, [('object_id.model','=','crm.lead')])
lead_obj = self.pool.get('crm.lead')
opportunity_ids = {}
for partner in self.browse(cr, uid, ids, context=context):
if not partner_id:
partner_id = partner.id
opportunity_id = lead_obj.create(cr, uid, {
'name' : opportunity_summary,
'planned_revenue' : planned_revenue,
'probability' : probability,
'partner_id' : partner_id,
'categ_ids' : categ_ids and categ_ids[0:1] or [],
'type': 'opportunity'
}, context=context)
opportunity_ids[partner_id] = opportunity_id
return opportunity_ids
def schedule_meeting(self, cr, uid, ids, context=None):
partner_ids = list(ids)
partner_ids.append(self.pool.get('res.users').browse(cr, uid, uid).partner_id.id)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'calendar', 'action_calendar_event', context)
res['context'] = {
'search_default_partner_ids': list(ids),
'default_partner_ids': partner_ids,
}
return res
| agpl-3.0 |
markYoungH/chromium.src | tools/site_compare/site_compare.py | 179 | 6504 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""SiteCompare component to handle bulk scrapes.
Invokes a list of browsers and sends them to a list of URLs,
saving the rendered results to a specified directory, then
performs comparison operations on the resulting bitmaps and
saves the results
"""
# This line is necessary to work around a QEMU bug
import _imaging
import os # Functions for walking the directory tree
import types # Runtime type-checking
import command_line # command-line parsing
import drivers # Functions for driving keyboard/mouse/windows, OS-specific
import operators # Functions that, given two bitmaps as input, produce
# output depending on the performance of an operation
import scrapers # Functions that know how to capture a render from
# particular browsers
import commands.compare2 # compare one page in two versions of same browser
import commands.maskmaker # generate a mask based on repeated scrapes
import commands.measure # measure length of time a page takes to load
import commands.scrape # scrape a URL or series of URLs to a bitmap
# The timeload command is obsolete (too flaky); it may be reinstated
# later but for now it's been superceded by "measure"
# import commands.timeload # measure length of time a page takes to load
def Scrape(browsers, urls, window_size=(1024, 768),
window_pos=(0, 0), timeout=20, save_path=None, **kwargs):
"""Invoke one or more browsers over one or more URLs, scraping renders.
Args:
browsers: browsers to invoke with optional version strings
urls: URLs to visit
window_size: size of the browser window to display
window_pos: location of browser window
timeout: time (in seconds) to wait for page to load
save_path: root of save path, automatically appended with browser and
version
kwargs: miscellaneous keyword args, passed to scraper
Returns:
None
@TODO(jhaas): more parameters, or perhaps an indefinite dictionary
parameter, for things like length of time to wait for timeout, speed
of mouse clicks, etc. Possibly on a per-browser, per-URL, or
per-browser-per-URL basis
"""
if type(browsers) in types.StringTypes: browsers = [browsers]
if save_path is None:
# default save path is "scrapes" off the current root
save_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
for browser in browsers:
# Browsers should be tuples of (browser, version)
if type(browser) in types.StringTypes: browser = (browser, None)
scraper = scrapers.GetScraper(browser)
full_path = os.path.join(save_path, browser[0], scraper.version)
drivers.windowing.PreparePath(full_path)
scraper.Scrape(urls, full_path, window_size, window_pos, timeout, kwargs)
def Compare(base, compare, ops, root_path=None, out_path=None):
"""Compares a series of scrapes using a series of operators.
Args:
base: (browser, version) tuple of version to consider the baseline
compare: (browser, version) tuple of version to compare to
ops: list of operators plus operator arguments
root_path: root of the scrapes
out_path: place to put any output from the operators
Returns:
None
@TODO(jhaas): this method will likely change, to provide a robust and
well-defined way of chaining operators, applying operators conditionally,
and full-featured scripting of the operator chain. There also needs
to be better definition of the output; right now it's to stdout and
a log.txt file, with operator-dependent images saved for error output
"""
if root_path is None:
# default save path is "scrapes" off the current root
root_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
if out_path is None:
out_path = os.path.join(os.path.split(__file__)[0], "Compares")
if type(base) in types.StringTypes: base = (base, None)
if type(compare) in types.StringTypes: compare = (compare, None)
if type(ops) in types.StringTypes: ops = [ops]
base_dir = os.path.join(root_path, base[0])
compare_dir = os.path.join(root_path, compare[0])
if base[1] is None:
# base defaults to earliest capture
base = (base[0], max(os.listdir(base_dir)))
if compare[1] is None:
# compare defaults to latest capture
compare = (compare[0], min(os.listdir(compare_dir)))
out_path = os.path.join(out_path, base[0], base[1], compare[0], compare[1])
drivers.windowing.PreparePath(out_path)
# TODO(jhaas): right now we're just dumping output to a log file
# (and the console), which works as far as it goes but isn't nearly
# robust enough. Change this after deciding exactly what we want to
# change it to.
out_file = open(os.path.join(out_path, "log.txt"), "w")
description_string = ("Comparing %s %s to %s %s" %
(base[0], base[1], compare[0], compare[1]))
out_file.write(description_string)
print description_string
base_dir = os.path.join(base_dir, base[1])
compare_dir = os.path.join(compare_dir, compare[1])
for filename in os.listdir(base_dir):
out_file.write("%s: " % filename)
if not os.path.isfile(os.path.join(compare_dir, filename)):
out_file.write("Does not exist in target directory\n")
print "File %s does not exist in target directory" % filename
continue
base_filename = os.path.join(base_dir, filename)
compare_filename = os.path.join(compare_dir, filename)
for op in ops:
if type(op) in types.StringTypes: op = (op, None)
module = operators.GetOperator(op[0])
ret = module.Compare(base_filename, compare_filename)
if ret is None:
print "%s: OK" % (filename,)
out_file.write("OK\n")
else:
print "%s: %s" % (filename, ret[0])
out_file.write("%s\n" % (ret[0]))
ret[1].save(os.path.join(out_path, filename))
out_file.close()
def main():
"""Main executable. Parse the command line and invoke the command."""
cmdline = command_line.CommandLine()
# The below two commands are currently unstable so have been disabled
# commands.compare2.CreateCommand(cmdline)
# commands.maskmaker.CreateCommand(cmdline)
commands.measure.CreateCommand(cmdline)
commands.scrape.CreateCommand(cmdline)
cmdline.ParseCommandLine()
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.