repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
SNET-Entrance/Entrance-KEX | src/bootstrap/oauth.py | 1 | 3710 | import json
from datetime import datetime, timedelta
from flask import request, render_template, make_response, current_app
from flask.ext.login import current_user
from flask.ext.oauthlib.provider import OAuth2Provider
from flask.ext.user import login_required, passwords
from werkzeug.security import gen_salt
from bootstrap import app, db, login_manager
from models import Client, Grant, Token, User
oauth = OAuth2Provider(app)
@login_manager.request_loader
def load_user_from_request(request):
if not 'access_token' in request.args:
return None
token = load_token(request.args['access_token'])
if token:
return token.user
return None
@oauth.clientgetter
def load_client(client_id):
return Client.query.filter_by(client_id=client_id).first()
@oauth.grantgetter
def load_grant(client_id, code):
return Grant.query.filter_by(client_id=client_id, code=code).first()
@oauth.grantsetter
@login_required
def save_grant(client_id, code, request, *args, **kwargs):
expires = datetime.utcnow() + timedelta(seconds=100)
grant = Grant(
client_id=client_id,
code=code['code'],
redirect_uri=request.redirect_uri,
_scopes=' '.join(request.scopes),
user=current_user,
expires=expires
)
db.session.add(grant)
db.session.commit()
return grant
@oauth.tokengetter
def load_token(access_token=None, refresh_token=None):
if access_token:
return Token.query.filter_by(access_token=access_token).first()
elif refresh_token:
return Token.query.filter_by(refresh_token=refresh_token).first()
@oauth.tokensetter
def save_token(token, request, *args, **kwargs):
toks = Token.query.filter_by(client_id=request.client.client_id, user_id=request.user.id)
for t in toks:
db.session.delete(t)
expires_in = token.get('expires_in')
expires = datetime.utcnow() + timedelta(seconds=expires_in)
tok = Token(
access_token=token['access_token'],
refresh_token=token['refresh_token'],
token_type=token['token_type'],
_scopes=token['scope'],
expires=expires,
client_id=request.client.client_id,
user_id=request.user.id,
)
db.session.add(tok)
db.session.commit()
return tok
@oauth.usergetter
def get_user(username, password, *args, **kwargs):
user = User.query.filter_by(username=username).first()
if not user:
return None
if passwords.verify_password(current_app.user_manager, password, user.password):
return user
return None
@app.route('/oauth/authorize', methods=['GET', 'POST'])
@login_required
@oauth.authorize_handler
def authorize(*args, **kwargs):
if request.method == 'GET':
client_id = kwargs.get('client_id')
client = Client.query.filter_by(client_id=client_id).first()
kwargs['client'] = client
return render_template('authorize.html', **kwargs)
confirm = request.form.get('confirm', 'no')
return confirm == 'yes'
@app.route('/oauth/token', methods=['GET', 'POST'])
@oauth.token_handler
def access_token():
return None
@app.route('/oauth/revoke', methods=['POST'])
@oauth.revoke_handler
def revoke_token():
pass
@app.route('/oauth/test')
@oauth.require_oauth()
def me():
user = request.oauth.user
return user.username
@app.route('/client')
def client():
item = Client(
client_id=gen_salt(40),
client_secret=gen_salt(50),
_redirect_uris='/',
is_confidential=True
)
db.session.add(item)
db.session.commit()
return make_response(json.dumps(item.dict()), 200)
| apache-2.0 |
xiandiancloud/ji | lms/djangoapps/courseware/features/certificates.py | 20 | 9780 | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from lettuce.django import django_url
from course_modes.models import CourseMode
from nose.tools import assert_equal
UPSELL_LINK_CSS = '.message-upsell a.action-upgrade[href*="edx/999/Certificates"]'
def create_cert_course():
world.clear_courses()
org = 'edx'
number = '999'
name = 'Certificates'
course_id = '{org}/{number}/{name}'.format(
org=org, number=number, name=name)
world.scenario_dict['course_id'] = course_id
world.scenario_dict['COURSE'] = world.CourseFactory.create(
org=org, number=number, display_name=name)
audit_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='audit',
mode_display_name='audit course',
min_price=0,
)
assert isinstance(audit_mode, CourseMode)
verfied_mode = world.CourseModeFactory.create(
course_id=course_id,
mode_slug='verified',
mode_display_name='verified cert course',
min_price=16,
suggested_prices='32,64,128',
currency='usd',
)
assert isinstance(verfied_mode, CourseMode)
def register():
url = 'courses/{org}/{number}/{name}/about'.format(
org='edx', number='999', name='Certificates')
world.browser.visit(django_url(url))
world.css_click('section.intro a.register')
assert world.is_css_present('section.wrapper h3.title')
@step(u'the course has an honor mode')
def the_course_has_an_honor_mode(step):
create_cert_course()
honor_mode = world.CourseModeFactory.create(
course_id=world.scenario_dict['course_id'],
mode_slug='honor',
mode_display_name='honor mode',
min_price=0,
)
assert isinstance(honor_mode, CourseMode)
@step(u'I select the audit track$')
def select_the_audit_track(step):
create_cert_course()
register()
btn_css = 'input[value="Select Audit"]'
world.wait(1) # TODO remove this after troubleshooting JZ
world.css_find(btn_css)
world.css_click(btn_css)
def select_contribution(amount=32):
radio_css = 'input[value="{}"]'.format(amount)
world.css_click(radio_css)
assert world.css_find(radio_css).selected
def click_verified_track_button():
world.wait_for_ajax_complete()
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
@step(u'I select the verified track for upgrade')
def select_verified_track_upgrade(step):
select_contribution(32)
world.wait_for_ajax_complete()
btn_css = 'input[value="Upgrade Your Registration"]'
world.css_click(btn_css)
# TODO: might want to change this depending on the changes for upgrade
assert world.is_css_present('section.progress')
@step(u'I select the verified track$')
def select_the_verified_track(step):
create_cert_course()
register()
select_contribution(32)
click_verified_track_button()
assert world.is_css_present('section.progress')
@step(u'I should see the course on my dashboard$')
def should_see_the_course_on_my_dashboard(step):
course_css = 'li.course-item'
assert world.is_css_present(course_css)
@step(u'I go to step "([^"]*)"$')
def goto_next_step(step, step_num):
btn_css = {
'1': '#face_next_button',
'2': '#face_next_link',
'3': '#photo_id_next_link',
'4': '#pay_button',
}
next_css = {
'1': 'div#wrapper-facephoto.carousel-active',
'2': 'div#wrapper-idphoto.carousel-active',
'3': 'div#wrapper-review.carousel-active',
'4': 'div#wrapper-review.carousel-active',
}
world.css_click(btn_css[step_num])
# Pressing the button will advance the carousel to the next item
# and give the wrapper div the "carousel-active" class
assert world.css_find(next_css[step_num])
@step(u'I capture my "([^"]*)" photo$')
def capture_my_photo(step, name):
# Hard coded red dot image
image_data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
snapshot_script = "$('#{}_image')[0].src = '{}';".format(name, image_data)
# Mirror the javascript of the photo_verification.html page
world.browser.execute_script(snapshot_script)
world.browser.execute_script("$('#{}_capture_button').hide();".format(name))
world.browser.execute_script("$('#{}_reset_button').show();".format(name))
world.browser.execute_script("$('#{}_approve_button').show();".format(name))
assert world.css_find('#{}_approve_button'.format(name))
@step(u'I approve my "([^"]*)" photo$')
def approve_my_photo(step, name):
button_css = {
'face': 'div#wrapper-facephoto li.control-approve',
'photo_id': 'div#wrapper-idphoto li.control-approve',
}
wrapper_css = {
'face': 'div#wrapper-facephoto',
'photo_id': 'div#wrapper-idphoto',
}
# Make sure that the carousel is in the right place
assert world.css_has_class(wrapper_css[name], 'carousel-active')
assert world.css_find(button_css[name])
# HACK: for now don't bother clicking the approve button for
# id_photo, because it is sending you back to Step 1.
# Come back and figure it out later. JZ Aug 29 2013
if name=='face':
world.css_click(button_css[name])
# Make sure you didn't advance the carousel
assert world.css_has_class(wrapper_css[name], 'carousel-active')
@step(u'I select a contribution amount$')
def select_contribution_amount(step):
select_contribution(32)
@step(u'I confirm that the details match$')
def confirm_details_match(step):
# First you need to scroll down on the page
# to make the element visible?
# Currently chrome is failing with ElementNotVisibleException
world.browser.execute_script("window.scrollTo(0,1024)")
cb_css = 'input#confirm_pics_good'
world.css_click(cb_css)
assert world.css_find(cb_css).checked
@step(u'I am at the payment page')
def at_the_payment_page(step):
world.wait_for_present('input[name=transactionSignature]')
@step(u'I submit valid payment information$')
def submit_payment(step):
# First make sure that the page is done if it still executing
# an ajax query.
world.wait_for_ajax_complete()
button_css = 'input[value=Submit]'
world.css_click(button_css)
@step(u'I have submitted face and ID photos$')
def submitted_face_and_id_photos(step):
step.given('I am logged in')
step.given('I select the verified track')
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
@step(u'I have submitted photos to verify my identity')
def submitted_photos_to_verify_my_identity(step):
step.given('I have submitted face and ID photos')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I submit my photos and confirm')
def submit_photos_and_confirm(step):
step.given('I go to step "1"')
step.given('I capture my "face" photo')
step.given('I approve my "face" photo')
step.given('I go to step "2"')
step.given('I capture my "photo_id" photo')
step.given('I approve my "photo_id" photo')
step.given('I go to step "3"')
step.given('I select a contribution amount')
step.given('I confirm that the details match')
step.given('I go to step "4"')
@step(u'I see that my payment was successful')
def see_that_my_payment_was_successful(step):
title = world.css_find('div.wrapper-content-main h3.title')
assert_equal(title.text, u'Congratulations! You are now verified on edX.')
@step(u'I navigate to my dashboard')
def navigate_to_my_dashboard(step):
world.css_click('span.avatar')
assert world.css_find('section.my-courses')
@step(u'I see the course on my dashboard')
def see_the_course_on_my_dashboard(step):
course_link_css = 'section.my-courses a[href*="edx/999/Certificates"]'
assert world.is_css_present(course_link_css)
@step(u'I see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_present(course_link_css)
@step(u'I do not see the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
course_link_css = UPSELL_LINK_CSS
assert world.is_css_not_present(course_link_css)
@step(u'I select the upsell link on my dashboard')
def see_upsell_link_on_my_dashboard(step):
# expand the upsell section
world.css_click('.message-upsell')
course_link_css = UPSELL_LINK_CSS
# click the actual link
world.css_click(course_link_css)
@step(u'I see that I am on the verified track')
def see_that_i_am_on_the_verified_track(step):
id_verified_css = 'li.course-item article.course.verified'
assert world.is_css_present(id_verified_css)
@step(u'I leave the flow and return$')
def leave_the_flow_and_return(step):
world.visit('verify_student/verified/edx/999/Certificates/')
@step(u'I am at the verified page$')
def see_the_payment_page(step):
assert world.css_find('button#pay_button')
@step(u'I edit my name$')
def edit_my_name(step):
btn_css = 'a.retake-photos'
world.css_click(btn_css)
@step(u'I select the honor code option$')
def give_a_reason_why_i_cannot_pay(step):
register()
link_css = 'h5 i.expandable-icon'
world.css_click(link_css)
cb_css = 'input#honor-code'
world.css_click(cb_css)
btn_css = 'input[value="Select Certificate"]'
world.css_click(btn_css)
| agpl-3.0 |
danwent/libvirt-ovs | examples/domain-events/events-python/event-test.py | 2 | 19937 | #!/usr/bin/python -u
#
#
#
#################################################################################
# Start off by implementing a general purpose event loop for anyones use
#################################################################################
import sys
import getopt
import os
import libvirt
import select
import errno
import time
import threading
# For the sake of demonstration, this example program includes
# an implementation of a pure python event loop. Most applications
# would be better off just using the default libvirt event loop
# APIs, instead of implementing this in python. The exception is
# where an application wants to integrate with an existing 3rd
# party event loop impl
#
# Change this to 'False' to make the demo use the native
# libvirt event loop impl
use_pure_python_event_loop = True
do_debug = False
def debug(msg):
global do_debug
if do_debug:
print msg
#
# This general purpose event loop will support waiting for file handle
# I/O and errors events, as well as scheduling repeatable timers with
# a fixed interval.
#
# It is a pure python implementation based around the poll() API
#
class virEventLoopPure:
# This class contains the data we need to track for a
# single file handle
class virEventLoopPureHandle:
def __init__(self, handle, fd, events, cb, opaque):
self.handle = handle
self.fd = fd
self.events = events
self.cb = cb
self.opaque = opaque
def get_id(self):
return self.handle
def get_fd(self):
return self.fd
def get_events(self):
return self.events
def set_events(self, events):
self.events = events
def dispatch(self, events):
self.cb(self.handle,
self.fd,
events,
self.opaque)
# This class contains the data we need to track for a
# single periodic timer
class virEventLoopPureTimer:
def __init__(self, timer, interval, cb, opaque):
self.timer = timer
self.interval = interval
self.cb = cb
self.opaque = opaque
self.lastfired = 0
def get_id(self):
return self.timer
def get_interval(self):
return self.interval
def set_interval(self, interval):
self.interval = interval
def get_last_fired(self):
return self.lastfired
def set_last_fired(self, now):
self.lastfired = now
def dispatch(self):
self.cb(self.timer,
self.opaque)
def __init__(self):
self.poll = select.poll()
self.pipetrick = os.pipe()
self.pendingWakeup = False
self.runningPoll = False
self.nextHandleID = 1
self.nextTimerID = 1
self.handles = []
self.timers = []
self.quit = False
# The event loop can be used from multiple threads at once.
# Specifically while the main thread is sleeping in poll()
# waiting for events to occur, another thread may come along
# and add/update/remove a file handle, or timer. When this
# happens we need to interrupt the poll() sleep in the other
# thread, so that it'll see the file handle / timer changes.
#
# Using OS level signals for this is very unreliable and
# hard to implement correctly. Thus we use the real classic
# "self pipe" trick. A anonymous pipe, with one end registered
# with the event loop for input events. When we need to force
# the main thread out of a poll() sleep, we simple write a
# single byte of data to the other end of the pipe.
debug("Self pipe watch %d write %d" %(self.pipetrick[0], self.pipetrick[1]))
self.poll.register(self.pipetrick[0], select.POLLIN)
# Calculate when the next timeout is due to occurr, returning
# the absolute timestamp for the next timeout, or 0 if there is
# no timeout due
def next_timeout(self):
next = 0
for t in self.timers:
last = t.get_last_fired()
interval = t.get_interval()
if interval < 0:
continue
if next == 0 or (last + interval) < next:
next = last + interval
return next
# Lookup a virEventLoopPureHandle object based on file descriptor
def get_handle_by_fd(self, fd):
for h in self.handles:
if h.get_fd() == fd:
return h
return None
# Lookup a virEventLoopPureHandle object based on its event loop ID
def get_handle_by_id(self, handleID):
for h in self.handles:
if h.get_id() == handleID:
return h
return None
# This is the heart of the event loop, performing one single
# iteration. It asks when the next timeout is due, and then
# calcuates the maximum amount of time it is able to sleep
# for in poll() pending file handle events.
#
# It then goes into the poll() sleep.
#
# When poll() returns, there will zero or more file handle
# events which need to be dispatched to registered callbacks
# It may also be time to fire some periodic timers.
#
# Due to the coarse granularity of schedular timeslices, if
# we ask for a sleep of 500ms in order to satisfy a timer, we
# may return upto 1 schedular timeslice early. So even though
# our sleep timeout was reached, the registered timer may not
# technically be at its expiry point. This leads to us going
# back around the loop with a crazy 5ms sleep. So when checking
# if timeouts are due, we allow a margin of 20ms, to avoid
# these pointless repeated tiny sleeps.
def run_once(self):
sleep = -1
self.runningPoll = True
next = self.next_timeout()
debug("Next timeout due at %d" % next)
if next > 0:
now = int(time.time() * 1000)
if now >= next:
sleep = 0
else:
sleep = (next - now) / 1000.0
debug("Poll with a sleep of %d" % sleep)
events = self.poll.poll(sleep)
# Dispatch any file handle events that occurred
for (fd, revents) in events:
# See if the events was from the self-pipe
# telling us to wakup. if so, then discard
# the data just continue
if fd == self.pipetrick[0]:
self.pendingWakeup = False
data = os.read(fd, 1)
continue
h = self.get_handle_by_fd(fd)
if h:
debug("Dispatch fd %d handle %d events %d" % (fd, h.get_id(), revents))
h.dispatch(self.events_from_poll(revents))
now = int(time.time() * 1000)
for t in self.timers:
interval = t.get_interval()
if interval < 0:
continue
want = t.get_last_fired() + interval
# Deduct 20ms, since schedular timeslice
# means we could be ever so slightly early
if now >= (want-20):
debug("Dispatch timer %d now %s want %s" % (t.get_id(), str(now), str(want)))
t.set_last_fired(now)
t.dispatch()
self.runningPoll = False
# Actually the event loop forever
def run_loop(self):
self.quit = False
while not self.quit:
self.run_once()
def interrupt(self):
if self.runningPoll and not self.pendingWakeup:
self.pendingWakeup = True
os.write(self.pipetrick[1], 'c')
# Registers a new file handle 'fd', monitoring for 'events' (libvirt
# event constants), firing the callback cb() when an event occurs.
# Returns a unique integer identier for this handle, that should be
# used to later update/remove it
def add_handle(self, fd, events, cb, opaque):
handleID = self.nextHandleID + 1
self.nextHandleID = self.nextHandleID + 1
h = self.virEventLoopPureHandle(handleID, fd, events, cb, opaque)
self.handles.append(h)
self.poll.register(fd, self.events_to_poll(events))
self.interrupt()
debug("Add handle %d fd %d events %d" % (handleID, fd, events))
return handleID
# Registers a new timer with periodic expiry at 'interval' ms,
# firing cb() each time the timer expires. If 'interval' is -1,
# then the timer is registered, but not enabled
# Returns a unique integer identier for this handle, that should be
# used to later update/remove it
def add_timer(self, interval, cb, opaque):
timerID = self.nextTimerID + 1
self.nextTimerID = self.nextTimerID + 1
h = self.virEventLoopPureTimer(timerID, interval, cb, opaque)
self.timers.append(h)
self.interrupt()
debug("Add timer %d interval %d" % (timerID, interval))
return timerID
# Change the set of events to be monitored on the file handle
def update_handle(self, handleID, events):
h = self.get_handle_by_id(handleID)
if h:
h.set_events(events)
self.poll.unregister(h.get_fd())
self.poll.register(h.get_fd(), self.events_to_poll(events))
self.interrupt()
debug("Update handle %d fd %d events %d" % (handleID, h.get_fd(), events))
# Change the periodic frequency of the timer
def update_timer(self, timerID, interval):
for h in self.timers:
if h.get_id() == timerID:
h.set_interval(interval);
self.interrupt()
debug("Update timer %d interval %d" % (timerID, interval))
break
# Stop monitoring for events on the file handle
def remove_handle(self, handleID):
handles = []
for h in self.handles:
if h.get_id() == handleID:
self.poll.unregister(h.get_fd())
debug("Remove handle %d fd %d" % (handleID, h.get_fd()))
else:
handles.append(h)
self.handles = handles
self.interrupt()
# Stop firing the periodic timer
def remove_timer(self, timerID):
timers = []
for h in self.timers:
if h.get_id() != timerID:
timers.append(h)
debug("Remove timer %d" % timerID)
self.timers = timers
self.interrupt()
# Convert from libvirt event constants, to poll() events constants
def events_to_poll(self, events):
ret = 0
if events & libvirt.VIR_EVENT_HANDLE_READABLE:
ret |= select.POLLIN
if events & libvirt.VIR_EVENT_HANDLE_WRITABLE:
ret |= select.POLLOUT
if events & libvirt.VIR_EVENT_HANDLE_ERROR:
ret |= select.POLLERR;
if events & libvirt.VIR_EVENT_HANDLE_HANGUP:
ret |= select.POLLHUP;
return ret
# Convert from poll() event constants, to libvirt events constants
def events_from_poll(self, events):
ret = 0;
if events & select.POLLIN:
ret |= libvirt.VIR_EVENT_HANDLE_READABLE;
if events & select.POLLOUT:
ret |= libvirt.VIR_EVENT_HANDLE_WRITABLE;
if events & select.POLLNVAL:
ret |= libvirt.VIR_EVENT_HANDLE_ERROR;
if events & select.POLLERR:
ret |= libvirt.VIR_EVENT_HANDLE_ERROR;
if events & select.POLLHUP:
ret |= libvirt.VIR_EVENT_HANDLE_HANGUP;
return ret;
###########################################################################
# Now glue an instance of the general event loop into libvirt's event loop
###########################################################################
# This single global instance of the event loop wil be used for
# monitoring libvirt events
eventLoop = virEventLoopPure()
# This keeps track of what thread is running the event loop,
# (if it is run in a background thread)
eventLoopThread = None
# These next set of 6 methods are the glue between the official
# libvirt events API, and our particular impl of the event loop
#
# There is no reason why the 'virEventLoopPure' has to be used.
# An application could easily may these 6 glue methods hook into
# another event loop such as GLib's, or something like the python
# Twisted event framework.
def virEventAddHandleImpl(fd, events, cb, opaque):
global eventLoop
return eventLoop.add_handle(fd, events, cb, opaque)
def virEventUpdateHandleImpl(handleID, events):
global eventLoop
return eventLoop.update_handle(handleID, events)
def virEventRemoveHandleImpl(handleID):
global eventLoop
return eventLoop.remove_handle(handleID)
def virEventAddTimerImpl(interval, cb, opaque):
global eventLoop
return eventLoop.add_timer(interval, cb, opaque)
def virEventUpdateTimerImpl(timerID, interval):
global eventLoop
return eventLoop.update_timer(timerID, interval)
def virEventRemoveTimerImpl(timerID):
global eventLoop
return eventLoop.remove_timer(timerID)
# This tells libvirt what event loop implementation it
# should use
def virEventLoopPureRegister():
libvirt.virEventRegisterImpl(virEventAddHandleImpl,
virEventUpdateHandleImpl,
virEventRemoveHandleImpl,
virEventAddTimerImpl,
virEventUpdateTimerImpl,
virEventRemoveTimerImpl)
# Directly run the event loop in the current thread
def virEventLoopPureRun():
global eventLoop
eventLoop.run_loop()
def virEventLoopNativeRun():
while True:
libvirt.virEventRunDefaultImpl()
# Spawn a background thread to run the event loop
def virEventLoopPureStart():
global eventLoopThread
virEventLoopPureRegister()
eventLoopThread = threading.Thread(target=virEventLoopPureRun, name="libvirtEventLoop")
eventLoopThread.setDaemon(True)
eventLoopThread.start()
def virEventLoopNativeStart():
global eventLoopThread
libvirt.virEventRegisterDefaultImpl()
eventLoopThread = threading.Thread(target=virEventLoopNativeRun, name="libvirtEventLoop")
eventLoopThread.setDaemon(True)
eventLoopThread.start()
##########################################################################
# Everything that now follows is a simple demo of domain lifecycle events
##########################################################################
def eventToString(event):
eventStrings = ( "Defined",
"Undefined",
"Started",
"Suspended",
"Resumed",
"Stopped",
"Shutdown" );
return eventStrings[event];
def detailToString(event, detail):
eventStrings = (
( "Added", "Updated" ),
( "Removed" ),
( "Booted", "Migrated", "Restored", "Snapshot" ),
( "Paused", "Migrated", "IOError", "Watchdog" ),
( "Unpaused", "Migrated"),
( "Shutdown", "Destroyed", "Crashed", "Migrated", "Saved", "Failed", "Snapshot"),
( "Finished" )
)
return eventStrings[event][detail]
def myDomainEventCallback1 (conn, dom, event, detail, opaque):
print "myDomainEventCallback1 EVENT: Domain %s(%s) %s %s" % (dom.name(), dom.ID(),
eventToString(event),
detailToString(event, detail))
def myDomainEventCallback2 (conn, dom, event, detail, opaque):
print "myDomainEventCallback2 EVENT: Domain %s(%s) %s %s" % (dom.name(), dom.ID(),
eventToString(event),
detailToString(event, detail))
def myDomainEventRebootCallback(conn, dom, opaque):
print "myDomainEventRebootCallback: Domain %s(%s)" % (dom.name(), dom.ID())
def myDomainEventRTCChangeCallback(conn, dom, utcoffset, opaque):
print "myDomainEventRTCChangeCallback: Domain %s(%s) %d" % (dom.name(), dom.ID(), utcoffset)
def myDomainEventWatchdogCallback(conn, dom, action, opaque):
print "myDomainEventWatchdogCallback: Domain %s(%s) %d" % (dom.name(), dom.ID(), action)
def myDomainEventIOErrorCallback(conn, dom, srcpath, devalias, action, opaque):
print "myDomainEventIOErrorCallback: Domain %s(%s) %s %s %d" % (dom.name(), dom.ID(), srcpath, devalias, action)
def myDomainEventGraphicsCallback(conn, dom, phase, localAddr, remoteAddr, authScheme, subject, opaque):
print "myDomainEventGraphicsCallback: Domain %s(%s) %d %s" % (dom.name(), dom.ID(), phase, authScheme)
def myDomainEventDiskChangeCallback(conn, dom, oldSrcPath, newSrcPath, devAlias, reason, opaque):
print "myDomainEventDiskChangeCallback: Domain %s(%s) disk change oldSrcPath: %s newSrcPath: %s devAlias: %s reason: %s" % (
dom.name(), dom.ID(), oldSrcPath, newSrcPath, devAlias, reason)
def usage(out=sys.stderr):
print >>out, "usage: "+os.path.basename(sys.argv[0])+" [-hdl] [uri]"
print >>out, " uri will default to qemu:///system"
print >>out, " --help, -h Print this help message"
print >>out, " --debug, -d Print debug output"
print >>out, " --loop, -l Toggle event-loop-implementation"
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hdl", ["help", "debug", "loop"])
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
usage(sys.stdout)
sys.exit()
if o in ("-d", "--debug"):
global do_debug
do_debug = True
if o in ("-l", "--loop"):
global use_pure_python_event_loop
use_pure_python_event_loop ^= True
if len(args) >= 1:
uri = args[0]
else:
uri = "qemu:///system"
print "Using uri:" + uri
# Run a background thread with the event loop
if use_pure_python_event_loop:
virEventLoopPureStart()
else:
virEventLoopNativeStart()
vc = libvirt.openReadOnly(uri)
# Close connection on exit (to test cleanup paths)
old_exitfunc = getattr(sys, 'exitfunc', None)
def exit():
print "Closing " + str(vc)
vc.close()
if (old_exitfunc): old_exitfunc()
sys.exitfunc = exit
#Add 2 callbacks to prove this works with more than just one
vc.domainEventRegister(myDomainEventCallback1,None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE, myDomainEventCallback2, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_REBOOT, myDomainEventRebootCallback, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_RTC_CHANGE, myDomainEventRTCChangeCallback, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_IO_ERROR, myDomainEventIOErrorCallback, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_WATCHDOG, myDomainEventWatchdogCallback, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_GRAPHICS, myDomainEventGraphicsCallback, None)
vc.domainEventRegisterAny(None, libvirt.VIR_DOMAIN_EVENT_ID_DISK_CHANGE, myDomainEventDiskChangeCallback, None)
vc.setKeepAlive(5, 3)
# The rest of your app would go here normally, but for sake
# of demo we'll just go to sleep. The other option is to
# run the event loop in your main thread if your app is
# totally event based.
while vc.isAlive() == 1:
time.sleep(1)
if __name__ == "__main__":
main()
| lgpl-2.1 |
dparlevliet/zelenka-report-storage | server-local/twisted/web/twcgi.py | 17 | 10780 | # -*- test-case-name: twisted.web.test.test_cgi -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
I hold resource classes and helper classes that deal with CGI scripts.
"""
# System Imports
import string
import os
import urllib
# Twisted Imports
from twisted.web import http
from twisted.internet import reactor, protocol
from twisted.spread import pb
from twisted.python import log, filepath
from twisted.web import resource, server, static
class CGIDirectory(resource.Resource, filepath.FilePath):
def __init__(self, pathname):
resource.Resource.__init__(self)
filepath.FilePath.__init__(self, pathname)
def getChild(self, path, request):
fnp = self.child(path)
if not fnp.exists():
return static.File.childNotFound
elif fnp.isdir():
return CGIDirectory(fnp.path)
else:
return CGIScript(fnp.path)
return resource.NoResource()
def render(self, request):
notFound = resource.NoResource(
"CGI directories do not support directory listing.")
return notFound.render(request)
class CGIScript(resource.Resource):
"""
L{CGIScript} is a resource which runs child processes according to the CGI
specification.
The implementation is complex due to the fact that it requires asynchronous
IPC with an external process with an unpleasant protocol.
"""
isLeaf = 1
def __init__(self, filename, registry=None):
"""
Initialize, with the name of a CGI script file.
"""
self.filename = filename
def render(self, request):
"""
Do various things to conform to the CGI specification.
I will set up the usual slew of environment variables, then spin off a
process.
@type request: L{twisted.web.http.Request}
@param request: An HTTP request.
"""
script_name = "/"+string.join(request.prepath, '/')
serverName = string.split(request.getRequestHostname(), ':')[0]
env = {"SERVER_SOFTWARE": server.version,
"SERVER_NAME": serverName,
"GATEWAY_INTERFACE": "CGI/1.1",
"SERVER_PROTOCOL": request.clientproto,
"SERVER_PORT": str(request.getHost().port),
"REQUEST_METHOD": request.method,
"SCRIPT_NAME": script_name, # XXX
"SCRIPT_FILENAME": self.filename,
"REQUEST_URI": request.uri,
}
client = request.getClient()
if client is not None:
env['REMOTE_HOST'] = client
ip = request.getClientIP()
if ip is not None:
env['REMOTE_ADDR'] = ip
pp = request.postpath
if pp:
env["PATH_INFO"] = "/"+string.join(pp, '/')
if hasattr(request, "content"):
# request.content is either a StringIO or a TemporaryFile, and
# the file pointer is sitting at the beginning (seek(0,0))
request.content.seek(0,2)
length = request.content.tell()
request.content.seek(0,0)
env['CONTENT_LENGTH'] = str(length)
qindex = string.find(request.uri, '?')
if qindex != -1:
qs = env['QUERY_STRING'] = request.uri[qindex+1:]
if '=' in qs:
qargs = []
else:
qargs = [urllib.unquote(x) for x in qs.split('+')]
else:
env['QUERY_STRING'] = ''
qargs = []
# Propogate HTTP headers
for title, header in request.getAllHeaders().items():
envname = string.upper(string.replace(title, '-', '_'))
if title not in ('content-type', 'content-length'):
envname = "HTTP_" + envname
env[envname] = header
# Propogate our environment
for key, value in os.environ.items():
if key not in env:
env[key] = value
# And they're off!
self.runProcess(env, request, qargs)
return server.NOT_DONE_YET
def runProcess(self, env, request, qargs=[]):
"""
Run the cgi script.
@type env: A C{dict} of C{str}, or C{None}
@param env: The environment variables to pass to the processs that will
get spawned. See
L{twisted.internet.interfaces.IReactorProcess.spawnProcess} for more
information about environments and process creation.
@type request: L{twisted.web.http.Request}
@param request: An HTTP request.
@type qargs: A C{list} of C{str}
@param qargs: The command line arguments to pass to the process that
will get spawned.
"""
p = CGIProcessProtocol(request)
reactor.spawnProcess(p, self.filename, [self.filename] + qargs, env,
os.path.dirname(self.filename))
class FilteredScript(CGIScript):
"""
I am a special version of a CGI script, that uses a specific executable.
This is useful for interfacing with other scripting languages that adhere to
the CGI standard. My C{filter} attribute specifies what executable to run,
and my C{filename} init parameter describes which script to pass to the
first argument of that script.
To customize me for a particular location of a CGI interpreter, override
C{filter}.
@type filter: C{str}
@ivar filter: The absolute path to the executable.
"""
filter = '/usr/bin/cat'
def runProcess(self, env, request, qargs=[]):
"""
Run a script through the C{filter} executable.
@type env: A C{dict} of C{str}, or C{None}
@param env: The environment variables to pass to the processs that will
get spawned. See
L{twisted.internet.interfaces.IReactorProcess.spawnProcess} for more
information about environments and process creation.
@type request: L{twisted.web.http.Request}
@param request: An HTTP request.
@type qargs: A C{list} of C{str}
@param qargs: The command line arguments to pass to the process that
will get spawned.
"""
p = CGIProcessProtocol(request)
reactor.spawnProcess(p, self.filter,
[self.filter, self.filename] + qargs, env,
os.path.dirname(self.filename))
class CGIProcessProtocol(protocol.ProcessProtocol, pb.Viewable):
handling_headers = 1
headers_written = 0
headertext = ''
errortext = ''
# Remotely relay producer interface.
def view_resumeProducing(self, issuer):
self.resumeProducing()
def view_pauseProducing(self, issuer):
self.pauseProducing()
def view_stopProducing(self, issuer):
self.stopProducing()
def resumeProducing(self):
self.transport.resumeProducing()
def pauseProducing(self):
self.transport.pauseProducing()
def stopProducing(self):
self.transport.loseConnection()
def __init__(self, request):
self.request = request
def connectionMade(self):
self.request.registerProducer(self, 1)
self.request.content.seek(0, 0)
content = self.request.content.read()
if content:
self.transport.write(content)
self.transport.closeStdin()
def errReceived(self, error):
self.errortext = self.errortext + error
def outReceived(self, output):
"""
Handle a chunk of input
"""
# First, make sure that the headers from the script are sorted
# out (we'll want to do some parsing on these later.)
if self.handling_headers:
text = self.headertext + output
headerEnds = []
for delimiter in '\n\n','\r\n\r\n','\r\r', '\n\r\n':
headerend = text.find(delimiter)
if headerend != -1:
headerEnds.append((headerend, delimiter))
if headerEnds:
# The script is entirely in control of response headers; disable the
# default Content-Type value normally provided by
# twisted.web.server.Request.
self.request.defaultContentType = None
headerEnds.sort()
headerend, delimiter = headerEnds[0]
self.headertext = text[:headerend]
# This is a final version of the header text.
linebreak = delimiter[:len(delimiter)//2]
headers = self.headertext.split(linebreak)
for header in headers:
br = header.find(': ')
if br == -1:
log.msg( 'ignoring malformed CGI header: %s' % header )
else:
headerName = header[:br].lower()
headerText = header[br+2:]
if headerName == 'location':
self.request.setResponseCode(http.FOUND)
if headerName == 'status':
try:
statusNum = int(headerText[:3]) #"XXX <description>" sometimes happens.
except:
log.msg( "malformed status header" )
else:
self.request.setResponseCode(statusNum)
else:
# Don't allow the application to control these required headers.
if headerName.lower() not in ('server', 'date'):
self.request.responseHeaders.addRawHeader(headerName, headerText)
output = text[headerend+len(delimiter):]
self.handling_headers = 0
if self.handling_headers:
self.headertext = text
if not self.handling_headers:
self.request.write(output)
def processEnded(self, reason):
if reason.value.exitCode != 0:
log.msg("CGI %s exited with exit code %s" %
(self.request.uri, reason.value.exitCode))
if self.errortext:
log.msg("Errors from CGI %s: %s" % (self.request.uri, self.errortext))
if self.handling_headers:
log.msg("Premature end of headers in %s: %s" % (self.request.uri, self.headertext))
self.request.write(
resource.ErrorPage(http.INTERNAL_SERVER_ERROR,
"CGI Script Error",
"Premature end of script headers.").render(self.request))
self.request.unregisterProducer()
self.request.finish()
| lgpl-3.0 |
rmatsuda/invesalius3 | invesalius/data/bases.py | 5 | 8288 | import numpy as np
import invesalius.data.coordinates as dco
import invesalius.data.transformations as tr
import invesalius.data.coregistration as dcr
def angle_calculation(ap_axis, coil_axis):
"""
Calculate angle between two given axis (in degrees)
:param ap_axis: anterior posterior axis represented
:param coil_axis: tms coil axis
:return: angle between the two given axes
"""
ap_axis = np.array([ap_axis[0], ap_axis[1]])
coil_axis = np.array([float(coil_axis[0]), float(coil_axis[1])])
angle = np.rad2deg(np.arccos((np.dot(ap_axis, coil_axis))/(
np.linalg.norm(ap_axis)*np.linalg.norm(coil_axis))))
return float(angle)
def base_creation_old(fiducials):
"""
Calculate the origin and matrix for coordinate system transformation.
q: origin of coordinate system
g1, g2, g3: orthogonal vectors of coordinate system
:param fiducials: array of 3 rows (p1, p2, p3) and 3 columns (x, y, z) with fiducials coordinates
:return: matrix and origin for base transformation
"""
p1 = fiducials[0, :]
p2 = fiducials[1, :]
p3 = fiducials[2, :]
sub1 = p2 - p1
sub2 = p3 - p1
lamb = (sub1[0]*sub2[0]+sub1[1]*sub2[1]+sub1[2]*sub2[2])/np.dot(sub1, sub1)
q = p1 + lamb*sub1
g1 = p1 - q
g2 = p3 - q
if not g1.any():
g1 = p2 - q
g3 = np.cross(g2, g1)
g1 = g1/np.sqrt(np.dot(g1, g1))
g2 = g2/np.sqrt(np.dot(g2, g2))
g3 = g3/np.sqrt(np.dot(g3, g3))
m = np.matrix([[g1[0], g1[1], g1[2]],
[g2[0], g2[1], g2[2]],
[g3[0], g3[1], g3[2]]])
m_inv = m.I
return m, q, m_inv
def base_creation(fiducials):
"""
Calculate the origin and matrix for coordinate system
transformation.
q: origin of coordinate system
g1, g2, g3: orthogonal vectors of coordinate system
:param fiducials: array of 3 rows (p1, p2, p3) and 3 columns (x, y, z) with fiducials coordinates
:return: matrix and origin for base transformation
"""
p1 = fiducials[0, :]
p2 = fiducials[1, :]
p3 = fiducials[2, :]
sub1 = p2 - p1
sub2 = p3 - p1
lamb = np.dot(sub1, sub2)/np.dot(sub1, sub1)
q = p1 + lamb*sub1
g1 = p3 - q
g2 = p1 - q
if not g1.any():
g1 = p2 - q
g3 = np.cross(g1, g2)
g1 = g1/np.sqrt(np.dot(g1, g1))
g2 = g2/np.sqrt(np.dot(g2, g2))
g3 = g3/np.sqrt(np.dot(g3, g3))
m = np.zeros([3, 3])
m[:, 0] = g1/np.sqrt(np.dot(g1, g1))
m[:, 1] = g2/np.sqrt(np.dot(g2, g2))
m[:, 2] = g3/np.sqrt(np.dot(g3, g3))
return m, q
def calculate_fre(fiducials_raw, fiducials, ref_mode_id, m_change, m_icp=None):
"""
Calculate the Fiducial Registration Error for neuronavigation.
:param fiducials_raw: array of 6 rows (tracker probe and reference) and 3 columns (x, y, z) with coordinates
:type fiducials_raw: numpy.ndarray
:param fiducials: array of 6 rows (image and tracker fiducials) and 3 columns (x, y, z) with coordinates
:type fiducials: numpy.ndarray
:param ref_mode_id: Reference mode ID
:type ref_mode_id: int
:param m_change: 3x3 array representing change of basis from head in tracking system to vtk head system
:type m_change: numpy.ndarray
:param m_icp: list with icp flag and 3x3 affine array
:type m_icp: list[int, numpy.ndarray]
:return: float number of fiducial registration error
"""
if m_icp is not None:
icp = [True, m_icp]
else:
icp = [False, None]
dist = np.zeros([3, 1])
for i in range(0, 6, 2):
p_m, _ = dcr.corregistrate_dynamic((m_change, 0), fiducials_raw[i:i+2], ref_mode_id, icp)
dist[int(i/2)] = np.sqrt(np.sum(np.power((p_m[:3] - fiducials[int(i/2), :]), 2)))
return float(np.sqrt(np.sum(dist ** 2) / 3))
# The function flip_x_m is deprecated and was replaced by a simple minus multiplication of the Y coordinate as follows:
# coord_flip = list(coord)
# coord_flip[1] = -coord_flip[1]
# def flip_x_m(point):
# """
# Rotate coordinates of a vector by pi around X axis in static reference frame.
#
# InVesalius also require to multiply the z coordinate by (-1). Possibly
# because the origin of coordinate system of imagedata is
# located in superior left corner and the origin of VTK scene coordinate
# system (polygonal surface) is in the interior left corner. Second
# possibility is the order of slice stacking
#
# :param point: list of coordinates x, y and z
# :return: rotated coordinates
# """
#
# point_4 = np.hstack((point, 1.)).reshape(4, 1)
# point_4[2, 0] = -point_4[2, 0]
#
# m_rot = tr.euler_matrix(np.pi, 0, 0)
#
# point_rot = m_rot @ point_4
#
# return point_rot
def transform_icp(m_img, m_icp):
coord_img = [m_img[0, -1], -m_img[1, -1], m_img[2, -1], 1]
m_img[0, -1], m_img[1, -1], m_img[2, -1], _ = m_icp @ coord_img
m_img[0, -1], m_img[1, -1], m_img[2, -1] = m_img[0, -1], -m_img[1, -1], m_img[2, -1]
return m_img
def object_registration(fiducials, orients, coord_raw, m_change):
"""
:param fiducials: 3x3 array of fiducials translations
:param orients: 3x3 array of fiducials orientations in degrees
:param coord_raw: nx6 array of coordinates from tracking device where n = 1 is the reference attached to the head
:param m_change: 3x3 array representing change of basis from head in tracking system to vtk head system
:return:
"""
coords_aux = np.hstack((fiducials, orients))
mask = np.ones(len(coords_aux), dtype=bool)
mask[[3]] = False
coords = coords_aux[mask]
fids_dyn = np.zeros([4, 6])
fids_img = np.zeros([4, 6])
fids_raw = np.zeros([3, 3])
# compute fiducials of object with reference to the fixed probe in source frame
for ic in range(0, 3):
fids_raw[ic, :] = dco.dynamic_reference_m2(coords[ic, :], coords[3, :])[:3]
# compute initial alignment of probe fixed in the object in source frame
t_s0_raw = tr.translation_matrix(coords[3, :3])
r_s0_raw = tr.euler_matrix(np.radians(coords[3, 3]), np.radians(coords[3, 4]),
np.radians(coords[3, 5]), 'rzyx')
s0_raw = tr.concatenate_matrices(t_s0_raw, r_s0_raw)
# compute change of basis for object fiducials in source frame
base_obj_raw, q_obj_raw = base_creation(fids_raw[:3, :3])
r_obj_raw = np.identity(4)
r_obj_raw[:3, :3] = base_obj_raw[:3, :3]
t_obj_raw = tr.translation_matrix(q_obj_raw)
m_obj_raw = tr.concatenate_matrices(t_obj_raw, r_obj_raw)
for ic in range(0, 4):
if coord_raw.any():
# compute object fiducials in reference frame
fids_dyn[ic, :] = dco.dynamic_reference_m2(coords[ic, :], coord_raw[1, :])
else:
# compute object fiducials in source frame
fids_dyn[ic, :] = coords[ic, :]
fids_dyn[ic, 2] = -fids_dyn[ic, 2]
# compute object fiducials in vtk head frame
a, b, g = np.radians(fids_dyn[ic, 3:])
T_p = tr.translation_matrix(fids_dyn[ic, :3])
R_p = tr.euler_matrix(a, b, g, 'rzyx')
M_p = tr.concatenate_matrices(T_p, R_p)
M_img = m_change @ M_p
angles_img = np.degrees(np.asarray(tr.euler_from_matrix(M_img, 'rzyx')))
coord_img = list(M_img[:3, -1])
coord_img[1] = -coord_img[1]
fids_img[ic, :] = np.hstack((coord_img, angles_img))
# compute object base change in vtk head frame
base_obj_img, _ = base_creation(fids_img[:3, :3])
r_obj_img = np.identity(4)
r_obj_img[:3, :3] = base_obj_img[:3, :3]
# compute initial alignment of probe fixed in the object in reference (or static) frame
s0_trans_dyn = tr.translation_matrix(fids_dyn[3, :3])
s0_rot_dyn = tr.euler_matrix(np.radians(fids_dyn[3, 3]), np.radians(fids_dyn[3, 4]),
np.radians(fids_dyn[3, 5]), 'rzyx')
s0_dyn = tr.concatenate_matrices(s0_trans_dyn, s0_rot_dyn)
return t_obj_raw, s0_raw, r_s0_raw, s0_dyn, m_obj_raw, r_obj_img
| gpl-2.0 |
rcbops/opencenter-agent | tests/plugins/output/test_broken.py | 1 | 1755 | #!/usr/bin/env python
# OpenCenter(TM) is Copyright 2013 by Rackspace US, Inc.
##############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This
# version of OpenCenter includes Rackspace trademarks and logos, and in
# accordance with Section 6 of the License, the provision of commercial
# support services in conjunction with a version of OpenCenter which includes
# Rackspace trademarks and logos is prohibited. OpenCenter source code and
# details are available at: # https://github.com/rcbops/opencenter or upon
# written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this
# notice, is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the # specific language governing permissions and limitations
# under the License.
#
##############################################################################
name = 'output'
class State():
def __init__(self):
self.output_setup_called = False
self.output_teardown_called = False
self.output_handler_called = False
state = State()
def setup(config={}):
state.output_setup_called = True
# TODO: This isn't currently called
def teardown():
state.output_teardown_called = True
# This won't get called
def handler(input_data):
state.output_handler_called = True
| apache-2.0 |
COCS4950G7/COSC4950 | Source/Network/GUI_WXPythonExamples/GUI_KeyEvent.py | 1 | 1124 | __author__ = 'Chris HAmm'
#GUI_KeyEvent
#when we press a key on our keyboard, wx.KeyEvent is generated. This event is sent to the widget that currently has focus
#three different key handlers
#wx.EVT_KEY_DOWN
#wx.EVT_KEY_UP
#wx.EVT_CHAR
#common request is to close the application when the ESC key is pressed
import wx
class example(wx.Frame):
def __init__(self, *args, **kw):
super(example, self).__init__(*args, **kw)
self.InitUI()
def InitUI(self):
pnl= wx.Panel(self)
pnl.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
pnl.SetFocus()
self.SetSize((250, 180))
self.SetTitle('Key Event')
self.Centre()
self.Show(True)
def OnKeyDown(self, e):
key = e.GetKeyCode()
if key ==wx.WXK_ESCAPE: #get keycode for the pressed key, if it is the ESCAPE key....
ret = wx.MessageBox('Are you sure you want to quit?', 'Question',wx.YES_NO|wx.NO_DEFAULT, self)
if ret == wx.YES:
self.Close()
def main():
ex =wx.App()
example(None)
ex.MainLoop()
if __name__ == '__main__':
main()
| gpl-3.0 |
JasonKessler/scattertext | scattertext/test/test_PriorFactory.py | 1 | 4207 | from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext import LogOddsRatioInformativeDirichletPrior
from scattertext.PriorFactory import PriorFactory
from scattertext.test.test_semioticSquare import get_test_corpus
class TestPriorFactory(TestCase):
def test_all_categories(self):
corpus = get_test_corpus()
priors, my_corpus = (PriorFactory(corpus, starting_count=0, category='hamlet')
.use_all_categories()
.build())
tdf = corpus.get_term_freq_df()
self.assertEqual(len(priors), len(tdf))
np.testing.assert_equal(priors.values,
corpus.get_term_freq_df().sum(axis=1).values)
def test_neutral_categories(self):
corpus = get_test_corpus()
priors = (PriorFactory(corpus, 'hamlet', starting_count=0.001,
not_categories=['swift'])
.use_neutral_categories()
.get_priors())
self.assertEqual(priors.min(), 0.001)
self.assertEqual(priors.shape[0], corpus._X.shape[1])
corpus = get_test_corpus()
priors = (PriorFactory(corpus, 'hamlet', starting_count=0.001,
not_categories=['swift'])
.use_neutral_categories()
.drop_zero_priors()
.get_priors())
jzcnts = corpus.get_term_freq_df()['jay-z/r. kelly freq'].where(lambda x: x > 0).dropna()
np.testing.assert_equal(priors.values,
jzcnts.values + 0.001)
def test_get_general_term_frequencies(self):
corpus = get_test_corpus()
fact = (PriorFactory(corpus,
category='hamlet',
not_categories=['swift'],
starting_count=0)
.use_general_term_frequencies()
.use_all_categories()
)
priors, clean_corpus = fact.build()
expected_prior = pd.merge(corpus.get_term_doc_count_df(),
corpus.get_term_and_background_counts()[['background']],
left_index=True,
right_index=True,
how='left').fillna(0.).sum(axis=1)
np.testing.assert_allclose(priors.values, expected_prior.values)
def test_align_to_target(self):
full_corpus = get_test_corpus()
corpus = full_corpus.remove_categories(['swift'])
priors = PriorFactory(full_corpus).use_all_categories().get_priors()
with self.assertRaises(ValueError):
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
priors = (PriorFactory(full_corpus)
.use_all_categories()
.align_to_target(corpus)
.get_priors())
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
def test_use_categories(self):
full_corpus = get_test_corpus()
priors = PriorFactory(full_corpus).use_categories(['swift']).get_priors()
corpus = full_corpus.remove_categories(['swift'])
with self.assertRaises(ValueError):
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
priors = (PriorFactory(full_corpus)
.use_all_categories()
.align_to_target(corpus)
.get_priors())
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
def test_get_custom_term_frequencies(self):
corpus = get_test_corpus()
fact = (PriorFactory(corpus, starting_count=0.04)
.use_custom_term_frequencies(pd.Series({'halt': 3, 'i': 8}))
.drop_zero_priors()
)
priors, clean_corpus = fact.build()
self.assertEqual(set(clean_corpus.get_terms()), {'i', 'halt'})
np.testing.assert_equal(priors.sort_values().values, [3.04, 8.04])
| apache-2.0 |
robbwagoner/ansible-modules-core | files/assemble.py | 11 | 6786 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
import shutil
import tempfile
import re
DOCUMENTATION = '''
---
module: assemble
short_description: Assembles a configuration file from fragments
description:
- Assembles a configuration file from fragments. Often a particular
program will take a single configuration file and does not support a
C(conf.d) style structure where it is easy to build up the configuration
from multiple sources. M(assemble) will take a directory of files that can be
local or have already been transferred to the system, and concatenate them
together to produce a destination file. Files are assembled in string sorting order.
Puppet calls this idea I(fragments).
version_added: "0.5"
options:
src:
description:
- An already existing directory full of source files.
required: true
default: null
aliases: []
dest:
description:
- A file to create using the concatenation of all of the source files.
required: true
default: null
backup:
description:
- Create a backup file (if C(yes)), including the timestamp information so
you can get the original file back if you somehow clobbered it
incorrectly.
required: false
choices: [ "yes", "no" ]
default: "no"
delimiter:
description:
- A delimiter to separate the file contents.
version_added: "1.4"
required: false
default: null
remote_src:
description:
- If False, it will search for src at originating/master machine, if True it will
go to the remote/target machine for the src. Default is True.
choices: [ "True", "False" ]
required: false
default: "True"
version_added: "1.4"
regexp:
description:
- Assemble files only if C(regex) matches the filename. If not set,
all files are assembled. All "\\" (backslash) must be escaped as
"\\\\" to comply yaml syntax. Uses Python regular expressions; see
U(http://docs.python.org/2/library/re.html).
required: false
default: null
author: "Stephen Fromm (@sfromm)"
extends_documentation_fragment: files
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- assemble: src=/etc/someapp/fragments dest=/etc/someapp/someapp.conf
# When a delimiter is specified, it will be inserted in between each fragment
- assemble: src=/etc/someapp/fragments dest=/etc/someapp/someapp.conf delimiter='### START FRAGMENT ###'
'''
# ===========================================
# Support method
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd,'w')
delimit_me = False
add_newline = False
for f in sorted(os.listdir(src_path)):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = "%s/%s" % (src_path, f)
if not os.path.isfile(fragment):
continue
fragment_content = file(fragment).read()
# always put a newline between fragments if the previous fragment didn't end with a newline.
if add_newline:
tmp.write('\n')
# delimiters should only appear between fragments
if delimit_me:
if delimiter:
# un-escape anything like newlines
delimiter = delimiter.decode('unicode-escape')
tmp.write(delimiter)
# always make sure there's a newline after the
# delimiter, so lines don't run together
if delimiter[-1] != '\n':
tmp.write('\n')
tmp.write(fragment_content)
delimit_me = True
if fragment_content.endswith('\n'):
add_newline = False
else:
add_newline = True
tmp.close()
return temp_path
# ==============================================================
# main
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=True),
delimiter = dict(required=False),
dest = dict(required=True),
backup=dict(default=False, type='bool'),
remote_src=dict(default=False, type='bool'),
regexp = dict(required=False),
),
add_file_common_args=True
)
changed = False
path_md5 = None # Deprecated
path_hash = None
dest_hash = None
src = os.path.expanduser(module.params['src'])
dest = os.path.expanduser(module.params['dest'])
backup = module.params['backup']
delimiter = module.params['delimiter']
regexp = module.params['regexp']
compiled_regexp = None
if not os.path.exists(src):
module.fail_json(msg="Source (%s) does not exist" % src)
if not os.path.isdir(src):
module.fail_json(msg="Source (%s) is not a directory" % src)
if regexp != None:
try:
compiled_regexp = re.compile(regexp)
except re.error, e:
module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (e, regexp))
path = assemble_from_fragments(src, delimiter, compiled_regexp)
path_hash = module.sha1(path)
if os.path.exists(dest):
dest_hash = module.sha1(dest)
if path_hash != dest_hash:
if backup and dest_hash is not None:
module.backup_local(dest)
shutil.copy(path, dest)
changed = True
# Backwards compat. This won't return data if FIPS mode is active
try:
pathmd5 = module.md5(path)
except ValueError:
pathmd5 = None
os.remove(path)
file_args = module.load_file_common_arguments(module.params)
changed = module.set_fs_attributes_if_different(file_args, changed)
# Mission complete
module.exit_json(src=src, dest=dest, md5sum=pathmd5, checksum=path_hash, changed=changed, msg="OK")
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
jostep/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/mixture_test.py | 9 | 35750 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Mixture distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import numpy as np
from scipy import stats
from tensorflow.contrib import distributions
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
distributions_py = distributions
def _swap_first_last_axes(array):
rank = len(array.shape)
transpose = [rank - 1] + list(range(0, rank - 1))
return array.transpose(transpose)
def _mixture_stddev_np(pi_vector, mu_vector, sigma_vector):
"""Computes the standard deviation of a univariate mixture distribution.
Acts upon `np.array`s (not `tf.Tensor`s).
Args:
pi_vector: A `np.array` of mixture weights. Shape `[batch, components]`.
mu_vector: A `np.array` of means. Shape `[batch, components]`
sigma_vector: A `np.array` of stddevs. Shape `[batch, components]`.
Returns:
A `np.array` containing the batch of standard deviations.
"""
pi_vector = np.expand_dims(pi_vector, axis=1)
mean_wa = np.matmul(pi_vector, np.expand_dims(mu_vector, axis=2))
var_wa = np.matmul(pi_vector, np.expand_dims(sigma_vector**2, axis=2))
mid_term = np.matmul(pi_vector, np.expand_dims(mu_vector**2, axis=2))
mixture_variance = (
np.squeeze(var_wa) + np.squeeze(mid_term) - np.squeeze(mean_wa**2))
return np.sqrt(mixture_variance)
@contextlib.contextmanager
def _test_capture_mvndiag_sample_outputs():
"""Use monkey-patching to capture the output of an MVNDiag _sample_n."""
data_container = []
true_mvndiag_sample_n = distributions_py.MultivariateNormalDiag._sample_n
def _capturing_mvndiag_sample_n(self, n, seed=None):
samples = true_mvndiag_sample_n(self, n=n, seed=seed)
data_container.append(samples)
return samples
distributions_py.MultivariateNormalDiag._sample_n = (
_capturing_mvndiag_sample_n)
yield data_container
distributions_py.MultivariateNormalDiag._sample_n = true_mvndiag_sample_n
@contextlib.contextmanager
def _test_capture_normal_sample_outputs():
"""Use monkey-patching to capture the output of an Normal _sample_n."""
data_container = []
true_normal_sample_n = distributions_py.Normal._sample_n
def _capturing_normal_sample_n(self, n, seed=None):
samples = true_normal_sample_n(self, n=n, seed=seed)
data_container.append(samples)
return samples
distributions_py.Normal._sample_n = _capturing_normal_sample_n
yield data_container
distributions_py.Normal._sample_n = true_normal_sample_n
def make_univariate_mixture(batch_shape, num_components):
batch_shape = ops.convert_to_tensor(batch_shape, dtypes.int32)
logits = random_ops.random_uniform(
array_ops.concat((batch_shape, [num_components]), axis=0),
-1, 1, dtype=dtypes.float32) - 50.
components = [
distributions_py.Normal(
loc=random_ops.random_normal(batch_shape),
scale=10 * random_ops.random_uniform(batch_shape))
for _ in range(num_components)
]
cat = distributions_py.Categorical(logits, dtype=dtypes.int32)
return distributions_py.Mixture(cat, components)
def make_multivariate_mixture(batch_shape, num_components, event_shape,
batch_shape_tensor=None):
if batch_shape_tensor is None:
batch_shape_tensor = batch_shape
batch_shape_tensor = ops.convert_to_tensor(batch_shape_tensor, dtypes.int32)
logits = random_ops.random_uniform(
array_ops.concat((batch_shape_tensor, [num_components]), 0),
-1, 1, dtype=dtypes.float32) - 50.
logits.set_shape(
tensor_shape.TensorShape(batch_shape).concatenate(num_components))
static_batch_and_event_shape = (
tensor_shape.TensorShape(batch_shape).concatenate(event_shape))
event_shape = ops.convert_to_tensor(event_shape, dtypes.int32)
batch_and_event_shape = array_ops.concat((batch_shape_tensor, event_shape), 0)
def create_component():
loc = random_ops.random_normal(batch_and_event_shape)
scale_diag = 10 * random_ops.random_uniform(batch_and_event_shape)
loc.set_shape(static_batch_and_event_shape)
scale_diag.set_shape(static_batch_and_event_shape)
return distributions_py.MultivariateNormalDiag(
loc=loc, scale_diag=scale_diag)
components = [create_component() for _ in range(num_components)]
cat = distributions_py.Categorical(logits, dtype=dtypes.int32)
return distributions_py.Mixture(cat, components)
class MixtureTest(test.TestCase):
def testShapes(self):
with self.test_session():
for batch_shape in ([], [1], [2, 3, 4]):
dist = make_univariate_mixture(batch_shape, num_components=10)
self.assertAllEqual(batch_shape, dist.batch_shape)
self.assertAllEqual(batch_shape, dist.batch_shape_tensor().eval())
self.assertAllEqual([], dist.event_shape)
self.assertAllEqual([], dist.event_shape_tensor().eval())
for event_shape in ([1], [2]):
dist = make_multivariate_mixture(
batch_shape, num_components=10, event_shape=event_shape)
self.assertAllEqual(batch_shape, dist.batch_shape)
self.assertAllEqual(batch_shape, dist.batch_shape_tensor().eval())
self.assertAllEqual(event_shape, dist.event_shape)
self.assertAllEqual(event_shape, dist.event_shape_tensor().eval())
def testBrokenShapesStatic(self):
with self.assertRaisesWithPredicateMatch(ValueError,
r"cat.num_classes != len"):
distributions_py.Mixture(
distributions_py.Categorical([0.1, 0.5]), # 2 classes
[distributions_py.Normal(loc=1.0, scale=2.0)])
with self.assertRaisesWithPredicateMatch(
ValueError, r"\(\) and \(2,\) are not compatible"):
# The value error is raised because the batch shapes of the
# Normals are not equal. One is a scalar, the other is a
# vector of size (2,).
distributions_py.Mixture(
distributions_py.Categorical([-0.5, 0.5]), # scalar batch
[
distributions_py.Normal(
loc=1.0, scale=2.0), # scalar dist
distributions_py.Normal(
loc=[1.0, 1.0], scale=[2.0, 2.0])
])
with self.assertRaisesWithPredicateMatch(ValueError, r"Could not infer"):
cat_logits = array_ops.placeholder(shape=[1, None], dtype=dtypes.float32)
distributions_py.Mixture(
distributions_py.Categorical(cat_logits),
[distributions_py.Normal(
loc=[1.0], scale=[2.0])])
def testBrokenShapesDynamic(self):
with self.test_session():
d0_param = array_ops.placeholder(dtype=dtypes.float32)
d1_param = array_ops.placeholder(dtype=dtypes.float32)
d = distributions_py.Mixture(
distributions_py.Categorical([0.1, 0.2]), [
distributions_py.Normal(
loc=d0_param, scale=d0_param), distributions_py.Normal(
loc=d1_param, scale=d1_param)
],
validate_args=True)
with self.assertRaisesOpError(r"batch shape must match"):
d.sample().eval(feed_dict={d0_param: [2.0, 3.0], d1_param: [1.0]})
with self.assertRaisesOpError(r"batch shape must match"):
d.sample().eval(feed_dict={d0_param: [2.0, 3.0], d1_param: 1.0})
def testBrokenTypes(self):
with self.assertRaisesWithPredicateMatch(TypeError, "Categorical"):
distributions_py.Mixture(None, [])
cat = distributions_py.Categorical([0.3, 0.2])
# components must be a list of distributions
with self.assertRaisesWithPredicateMatch(
TypeError, "all .* must be Distribution instances"):
distributions_py.Mixture(cat, [None])
with self.assertRaisesWithPredicateMatch(TypeError, "same dtype"):
distributions_py.Mixture(
cat, [
distributions_py.Normal(loc=[1.0], scale=[2.0]),
distributions_py.Normal(loc=[np.float16(1.0)],
scale=[np.float16(2.0)]),
])
with self.assertRaisesWithPredicateMatch(ValueError, "non-empty list"):
distributions_py.Mixture(distributions_py.Categorical([0.3, 0.2]), None)
# TODO(ebrevdo): once distribution Domains have been added, add a
# test to ensure that the domains of the distributions in a
# mixture are checked for equivalence.
def testMeanUnivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=2)
mean = dist.mean()
self.assertEqual(batch_shape, mean.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_means = [d.mean() for d in dist.components]
mean_value, cat_probs_value, dist_means_value = sess.run(
[mean, cat_probs, dist_means])
self.assertEqual(batch_shape, mean_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
true_mean = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_means_value)])
self.assertAllClose(true_mean, mean_value)
def testMeanMultivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape, num_components=2, event_shape=(4,))
mean = dist.mean()
self.assertEqual(batch_shape + (4,), mean.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_means = [d.mean() for d in dist.components]
mean_value, cat_probs_value, dist_means_value = sess.run(
[mean, cat_probs, dist_means])
self.assertEqual(batch_shape + (4,), mean_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
# Add a new innermost dimension for broadcasting to mvn vector shape
cat_probs_value = [np.expand_dims(c_p, -1) for c_p in cat_probs_value]
true_mean = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_means_value)])
self.assertAllClose(true_mean, mean_value)
def testStddevShapeUnivariate(self):
num_components = 2
# This is the same shape test which is done in 'testMeanUnivariate'.
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=num_components)
dev = dist.stddev()
self.assertEqual(batch_shape, dev.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_devs = [d.stddev() for d in dist.components]
dist_means = [d.mean() for d in dist.components]
res = sess.run([dev, cat_probs, dist_devs, dist_means])
dev_value, cat_probs_values, dist_devs_values, dist_means_values = res
# Manual computation of stddev.
batch_shape_res = cat_probs_values.shape[:-1]
event_shape_res = dist_devs_values[0].shape[len(batch_shape_res):]
stacked_mean_res = np.stack(dist_means_values, -1)
stacked_dev_res = np.stack(dist_devs_values, -1)
# Broadcast cat probs over event dimensions.
for _ in range(len(event_shape_res)):
cat_probs_values = np.expand_dims(cat_probs_values, len(batch_shape))
cat_probs_values = cat_probs_values + np.zeros_like(stacked_dev_res) # pylint: disable=g-no-augmented-assignment
# Perform stddev computation on a flattened batch.
flat_batch_manual_dev = _mixture_stddev_np(
np.reshape(cat_probs_values, [-1, num_components]),
np.reshape(stacked_mean_res, [-1, num_components]),
np.reshape(stacked_dev_res, [-1, num_components]))
# Reshape to full shape.
full_shape_res = list(batch_shape_res) + list(event_shape_res)
manual_dev = np.reshape(flat_batch_manual_dev, full_shape_res)
self.assertEqual(batch_shape, dev_value.shape)
self.assertAllClose(manual_dev, dev_value)
def testStddevShapeMultivariate(self):
num_components = 2
# This is the same shape test which is done in 'testMeanMultivariate'.
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape,
num_components=num_components,
event_shape=(4,))
dev = dist.stddev()
self.assertEqual(batch_shape + (4,), dev.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_devs = [d.stddev() for d in dist.components]
dist_means = [d.mean() for d in dist.components]
res = sess.run([dev, cat_probs, dist_devs, dist_means])
dev_value, cat_probs_values, dist_devs_values, dist_means_values = res
# Manual computation of stddev.
batch_shape_res = cat_probs_values.shape[:-1]
event_shape_res = dist_devs_values[0].shape[len(batch_shape_res):]
stacked_mean_res = np.stack(dist_means_values, -1)
stacked_dev_res = np.stack(dist_devs_values, -1)
# Broadcast cat probs over event dimensions.
for _ in range(len(event_shape_res)):
cat_probs_values = np.expand_dims(cat_probs_values, len(batch_shape))
cat_probs_values = cat_probs_values + np.zeros_like(stacked_dev_res) # pylint: disable=g-no-augmented-assignment
# Perform stddev computation on a flattened batch.
flat_batch_manual_dev = _mixture_stddev_np(
np.reshape(cat_probs_values, [-1, num_components]),
np.reshape(stacked_mean_res, [-1, num_components]),
np.reshape(stacked_dev_res, [-1, num_components]))
# Reshape to full shape.
full_shape_res = list(batch_shape_res) + list(event_shape_res)
manual_dev = np.reshape(flat_batch_manual_dev, full_shape_res)
self.assertEqual(tuple(full_shape_res), dev_value.shape)
self.assertAllClose(manual_dev, dev_value)
def testSpecificStddevValue(self):
cat_probs = np.array([0.5, 0.5])
component_means = np.array([-10, 0.1])
component_devs = np.array([0.05, 2.33])
ground_truth_stddev = 5.3120805
mixture_dist = distributions_py.Mixture(
cat=distributions_py.Categorical(probs=cat_probs),
components=[
distributions_py.Normal(loc=component_means[0],
scale=component_devs[0]),
distributions_py.Normal(loc=component_means[1],
scale=component_devs[1]),
])
mix_dev = mixture_dist.stddev()
with self.test_session() as sess:
actual_stddev = sess.run(mix_dev)
self.assertAllClose(actual_stddev, ground_truth_stddev)
def testProbScalarUnivariate(self):
with self.test_session() as sess:
dist = make_univariate_mixture(batch_shape=[], num_components=2)
for x in [
np.array(
[1.0, 2.0], dtype=np.float32), np.array(
1.0, dtype=np.float32),
np.random.randn(3, 4).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape, p_x.get_shape())
cat_probs = nn_ops.softmax([dist.cat.logits])[0]
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape, p_x_value.shape)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbScalarMultivariate(self):
with self.test_session() as sess:
dist = make_multivariate_mixture(
batch_shape=[], num_components=2, event_shape=[3])
for x in [
np.array(
[[-1.0, 0.0, 1.0], [0.5, 1.0, -0.3]], dtype=np.float32), np.array(
[-1.0, 0.0, 1.0], dtype=np.float32),
np.random.randn(2, 2, 3).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape[:-1], p_x.get_shape())
cat_probs = nn_ops.softmax([dist.cat.logits])[0]
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape[:-1], p_x_value.shape)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbBatchUnivariate(self):
with self.test_session() as sess:
dist = make_univariate_mixture(batch_shape=[2, 3], num_components=2)
for x in [
np.random.randn(2, 3).astype(np.float32),
np.random.randn(4, 2, 3).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape, p_x.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape, p_x_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbBatchMultivariate(self):
with self.test_session() as sess:
dist = make_multivariate_mixture(
batch_shape=[2, 3], num_components=2, event_shape=[4])
for x in [
np.random.randn(2, 3, 4).astype(np.float32),
np.random.randn(4, 2, 3, 4).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape[:-1], p_x.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape[:-1], p_x_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testSampleScalarBatchUnivariate(self):
with self.test_session() as sess:
num_components = 3
batch_shape = []
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=num_components)
n = 4
with _test_capture_normal_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4,), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4,), sample_values.shape)
for c in range(num_components):
which_c = np.where(cat_sample_values == c)[0]
size_c = which_c.size
# Scalar Batch univariate case: batch_size == 1, rank 1
which_dist_samples = dist_sample_values[c][:size_c]
self.assertAllClose(which_dist_samples, sample_values[which_c])
# Test that sampling with the same seed twice gives the same results.
def testSampleMultipleTimes(self):
# 5 component mixture.
logits = [-10.0, -5.0, 0.0, 5.0, 10.0]
mus = [-5.0, 0.0, 5.0, 4.0, 20.0]
sigmas = [0.1, 5.0, 3.0, 0.2, 4.0]
with self.test_session():
n = 100
random_seed.set_random_seed(654321)
components = [
distributions_py.Normal(
loc=mu, scale=sigma) for mu, sigma in zip(mus, sigmas)
]
cat = distributions_py.Categorical(
logits, dtype=dtypes.int32, name="cat1")
dist1 = distributions_py.Mixture(cat, components, name="mixture1")
samples1 = dist1.sample(n, seed=123456).eval()
random_seed.set_random_seed(654321)
components2 = [
distributions_py.Normal(
loc=mu, scale=sigma) for mu, sigma in zip(mus, sigmas)
]
cat2 = distributions_py.Categorical(
logits, dtype=dtypes.int32, name="cat2")
dist2 = distributions_py.Mixture(cat2, components2, name="mixture2")
samples2 = dist2.sample(n, seed=123456).eval()
self.assertAllClose(samples1, samples2)
def testSampleScalarBatchMultivariate(self):
with self.test_session() as sess:
num_components = 3
dist = make_multivariate_mixture(
batch_shape=[], num_components=num_components, event_shape=[2])
n = 4
with _test_capture_mvndiag_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4, 2), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4, 2), sample_values.shape)
for c in range(num_components):
which_c = np.where(cat_sample_values == c)[0]
size_c = which_c.size
# Scalar Batch multivariate case: batch_size == 1, rank 2
which_dist_samples = dist_sample_values[c][:size_c, :]
self.assertAllClose(which_dist_samples, sample_values[which_c, :])
def testSampleBatchUnivariate(self):
with self.test_session() as sess:
num_components = 3
dist = make_univariate_mixture(
batch_shape=[2, 3], num_components=num_components)
n = 4
with _test_capture_normal_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4, 2, 3), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4, 2, 3), sample_values.shape)
for c in range(num_components):
which_c_s, which_c_b0, which_c_b1 = np.where(cat_sample_values == c)
size_c = which_c_s.size
# Batch univariate case: batch_size == [2, 3], rank 3
which_dist_samples = dist_sample_values[c][range(size_c), which_c_b0,
which_c_b1]
self.assertAllClose(which_dist_samples,
sample_values[which_c_s, which_c_b0, which_c_b1])
def _testSampleBatchMultivariate(self, fully_known_batch_shape):
with self.test_session() as sess:
num_components = 3
if fully_known_batch_shape:
batch_shape = [2, 3]
batch_shape_tensor = [2, 3]
else:
batch_shape = [None, 3]
batch_shape_tensor = array_ops.placeholder(dtype=dtypes.int32)
dist = make_multivariate_mixture(
batch_shape=batch_shape,
num_components=num_components, event_shape=[4],
batch_shape_tensor=batch_shape_tensor)
n = 5
with _test_capture_mvndiag_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
if fully_known_batch_shape:
self.assertEqual((5, 2, 3, 4), samples.get_shape())
else:
self.assertEqual([5, None, 3, 4], samples.get_shape().as_list())
cat_samples = dist.cat.sample(n, seed=123)
if fully_known_batch_shape:
feed_dict = {}
else:
feed_dict = {batch_shape_tensor: [2, 3]}
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples],
feed_dict=feed_dict)
self.assertEqual((5, 2, 3, 4), sample_values.shape)
for c in range(num_components):
which_c_s, which_c_b0, which_c_b1 = np.where(cat_sample_values == c)
size_c = which_c_s.size
# Batch univariate case: batch_size == [2, 3], rank 4 (multivariate)
which_dist_samples = dist_sample_values[c][range(size_c), which_c_b0,
which_c_b1, :]
self.assertAllClose(which_dist_samples,
sample_values[which_c_s, which_c_b0, which_c_b1, :])
def testSampleBatchMultivariateFullyKnownBatchShape(self):
self._testSampleBatchMultivariate(fully_known_batch_shape=True)
def testSampleBatchMultivariateNotFullyKnownBatchShape(self):
self._testSampleBatchMultivariate(fully_known_batch_shape=False)
def testEntropyLowerBoundMultivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape, num_components=2, event_shape=(4,))
entropy_lower_bound = dist.entropy_lower_bound()
self.assertEqual(batch_shape, entropy_lower_bound.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_entropy = [d.entropy() for d in dist.components]
entropy_lower_bound_value, cat_probs_value, dist_entropy_value = (
sess.run([entropy_lower_bound, cat_probs, dist_entropy]))
self.assertEqual(batch_shape, entropy_lower_bound_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
# entropy_lower_bound = sum_i pi_i entropy_i
# for i in num_components, batchwise.
true_entropy_lower_bound = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_entropy_value)])
self.assertAllClose(true_entropy_lower_bound, entropy_lower_bound_value)
def testCdfScalarUnivariate(self):
"""Tests CDF against scipy for a mixture of seven gaussians."""
# Construct a mixture of gaussians with seven components.
n_components = 7
# pre-softmax mixture probabilities.
mixture_weight_logits = np.random.uniform(
low=-1, high=1, size=(n_components,)).astype(np.float32)
def _scalar_univariate_softmax(x):
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum()
# Construct the distributions_py.Mixture object.
mixture_weights = _scalar_univariate_softmax(mixture_weight_logits)
means = [np.random.uniform(low=-10, high=10, size=()).astype(np.float32)
for _ in range(n_components)]
sigmas = [np.ones(shape=(), dtype=np.float32) for _ in range(n_components)]
cat_tf = distributions_py.Categorical(probs=mixture_weights)
components_tf = [distributions_py.Normal(loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
mixture_tf = distributions_py.Mixture(cat=cat_tf, components=components_tf)
x_tensor = array_ops.placeholder(shape=(), dtype=dtypes.float32)
# These are two test cases to verify.
xs_to_check = [
np.array(1.0, dtype=np.float32),
np.array(np.random.randn()).astype(np.float32)
]
# Carry out the test for both d.cdf and exp(d.log_cdf).
x_cdf_tf = mixture_tf.cdf(x_tensor)
x_log_cdf_tf = mixture_tf.log_cdf(x_tensor)
with self.test_session() as sess:
for x_feed in xs_to_check:
x_cdf_tf_result, x_log_cdf_tf_result = sess.run(
[x_cdf_tf, x_log_cdf_tf], feed_dict={x_tensor: x_feed})
# Compute the cdf with scipy.
scipy_component_cdfs = [stats.norm.cdf(x=x_feed, loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
scipy_cdf_result = np.dot(mixture_weights,
np.array(scipy_component_cdfs))
self.assertAllClose(x_cdf_tf_result, scipy_cdf_result)
self.assertAllClose(np.exp(x_log_cdf_tf_result), scipy_cdf_result)
def testCdfBatchUnivariate(self):
"""Tests against scipy for a (batch of) mixture(s) of seven gaussians."""
n_components = 7
batch_size = 5
mixture_weight_logits = np.random.uniform(
low=-1, high=1, size=(batch_size, n_components)).astype(np.float32)
def _batch_univariate_softmax(x):
e_x = np.exp(x)
e_x_sum = np.expand_dims(np.sum(e_x, axis=1), axis=1)
return e_x / np.tile(e_x_sum, reps=[1, x.shape[1]])
psize = (batch_size,)
mixture_weights = _batch_univariate_softmax(mixture_weight_logits)
means = [np.random.uniform(low=-10, high=10, size=psize).astype(np.float32)
for _ in range(n_components)]
sigmas = [np.ones(shape=psize, dtype=np.float32)
for _ in range(n_components)]
cat_tf = distributions_py.Categorical(probs=mixture_weights)
components_tf = [distributions_py.Normal(loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
mixture_tf = distributions_py.Mixture(cat=cat_tf, components=components_tf)
x_tensor = array_ops.placeholder(shape=psize, dtype=dtypes.float32)
xs_to_check = [
np.array([1.0, 5.9, -3, 0.0, 0.0], dtype=np.float32),
np.random.randn(batch_size).astype(np.float32)
]
x_cdf_tf = mixture_tf.cdf(x_tensor)
x_log_cdf_tf = mixture_tf.log_cdf(x_tensor)
with self.test_session() as sess:
for x_feed in xs_to_check:
x_cdf_tf_result, x_log_cdf_tf_result = sess.run(
[x_cdf_tf, x_log_cdf_tf],
feed_dict={x_tensor: x_feed})
# Compute the cdf with scipy.
scipy_component_cdfs = [stats.norm.cdf(x=x_feed, loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
weights_and_cdfs = zip(np.transpose(mixture_weights, axes=[1, 0]),
scipy_component_cdfs)
final_cdf_probs_per_component = [
np.multiply(c_p_value, d_cdf_value)
for (c_p_value, d_cdf_value) in weights_and_cdfs]
scipy_cdf_result = np.sum(final_cdf_probs_per_component, axis=0)
self.assertAllClose(x_cdf_tf_result, scipy_cdf_result)
self.assertAllClose(np.exp(x_log_cdf_tf_result), scipy_cdf_result)
class MixtureBenchmark(test.Benchmark):
def _runSamplingBenchmark(self, name, create_distribution, use_gpu,
num_components, batch_size, num_features,
sample_size):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
np.random.seed(127)
with session.Session(config=config, graph=ops.Graph()) as sess:
random_seed.set_random_seed(0)
with ops.device("/device:GPU:0" if use_gpu else "/cpu:0"):
mixture = create_distribution(
num_components=num_components,
batch_size=batch_size,
num_features=num_features)
sample_op = mixture.sample(sample_size).op
sess.run(variables.global_variables_initializer())
reported = self.run_op_benchmark(
sess,
sample_op,
min_iters=10,
name=("%s_%s_components_%d_batch_%d_features_%d_sample_%d" %
(name, use_gpu, num_components, batch_size, num_features,
sample_size)))
logging.vlog(2, "\t".join(["%s", "%d", "%d", "%d", "%d", "%g"]) % (
use_gpu, num_components, batch_size, num_features, sample_size,
reported["wall_time"]))
def benchmarkSamplingMVNDiag(self):
logging.vlog(
2, "mvn_diag\tuse_gpu\tcomponents\tbatch\tfeatures\tsample\twall_time")
def create_distribution(batch_size, num_components, num_features):
cat = distributions_py.Categorical(
logits=np.random.randn(batch_size, num_components))
mus = [
variables.Variable(np.random.randn(batch_size, num_features))
for _ in range(num_components)
]
sigmas = [
variables.Variable(np.random.rand(batch_size, num_features))
for _ in range(num_components)
]
components = list(
distributions_py.MultivariateNormalDiag(
loc=mu, scale_diag=sigma) for (mu, sigma) in zip(mus, sigmas))
return distributions_py.Mixture(cat, components)
for use_gpu in False, True:
if use_gpu and not test.is_gpu_available():
continue
for num_components in 1, 8, 16:
for batch_size in 1, 32:
for num_features in 1, 64, 512:
for sample_size in 1, 32, 128:
self._runSamplingBenchmark(
"mvn_diag",
create_distribution=create_distribution,
use_gpu=use_gpu,
num_components=num_components,
batch_size=batch_size,
num_features=num_features,
sample_size=sample_size)
def benchmarkSamplingMVNFull(self):
logging.vlog(
2, "mvn_full\tuse_gpu\tcomponents\tbatch\tfeatures\tsample\twall_time")
def psd(x):
"""Construct batch-wise PSD matrices."""
return np.stack([np.dot(np.transpose(z), z) for z in x])
def create_distribution(batch_size, num_components, num_features):
cat = distributions_py.Categorical(
logits=np.random.randn(batch_size, num_components))
mus = [
variables.Variable(np.random.randn(batch_size, num_features))
for _ in range(num_components)
]
sigmas = [
variables.Variable(
psd(np.random.rand(batch_size, num_features, num_features)))
for _ in range(num_components)
]
components = list(
distributions_py.MultivariateNormalTriL(
loc=mu, scale_tril=linalg_ops.cholesky(sigma))
for (mu, sigma) in zip(mus, sigmas))
return distributions_py.Mixture(cat, components)
for use_gpu in False, True:
if use_gpu and not test.is_gpu_available():
continue
for num_components in 1, 8, 16:
for batch_size in 1, 32:
for num_features in 1, 64, 512:
for sample_size in 1, 32, 128:
self._runSamplingBenchmark(
"mvn_full",
create_distribution=create_distribution,
use_gpu=use_gpu,
num_components=num_components,
batch_size=batch_size,
num_features=num_features,
sample_size=sample_size)
if __name__ == "__main__":
test.main()
| apache-2.0 |
Orange-OpenSource/cf-php-build-pack | vendor/node-semver/semver/tests/test_increment_version.py | 5 | 2371 | # -*- coding:utf-8 -*-
import pytest
# node-semver/test/index.js
# import logging
# logging.basicConfig(level=logging.DEBUG, format="%(message)s")
cands = [
['1.2.3', 'major', '2.0.0', False],
['1.2.3', 'minor', '1.3.0', False],
['1.2.3', 'patch', '1.2.4', False],
['1.2.3tag', 'major', '2.0.0', True],
['1.2.3-tag', 'major', '2.0.0', False],
['1.2.3', 'fake', None, False],
['1.2.0-0', 'patch', '1.2.0', False],
['fake', 'major', None, False],
['1.2.3-4', 'major', '2.0.0', False],
['1.2.3-4', 'minor', '1.3.0', False],
['1.2.3-4', 'patch', '1.2.3', False],
['1.2.3-alpha.0.beta', 'major', '2.0.0', False],
['1.2.3-alpha.0.beta', 'minor', '1.3.0', False],
['1.2.3-alpha.0.beta', 'patch', '1.2.3', False],
['1.2.4', 'prerelease', '1.2.5-0', False],
['1.2.3-0', 'prerelease', '1.2.3-1', False],
['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1', False],
['1.2.3-alpha.1', 'prerelease', '1.2.3-alpha.2', False],
['1.2.3-alpha.2', 'prerelease', '1.2.3-alpha.3', False],
['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-alpha.1.beta', False],
['1.2.3-alpha.1.beta', 'prerelease', '1.2.3-alpha.2.beta', False],
['1.2.3-alpha.2.beta', 'prerelease', '1.2.3-alpha.3.beta', False],
['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-alpha.10.1.beta', False],
['1.2.3-alpha.10.1.beta', 'prerelease', '1.2.3-alpha.10.2.beta', False],
['1.2.3-alpha.10.2.beta', 'prerelease', '1.2.3-alpha.10.3.beta', False],
['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-alpha.10.beta.1', False],
['1.2.3-alpha.10.beta.1', 'prerelease', '1.2.3-alpha.10.beta.2', False],
['1.2.3-alpha.10.beta.2', 'prerelease', '1.2.3-alpha.10.beta.3', False],
['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-alpha.10.beta', False],
['1.2.3-alpha.10.beta', 'prerelease', '1.2.3-alpha.11.beta', False],
['1.2.3-alpha.11.beta', 'prerelease', '1.2.3-alpha.12.beta', False],
['1.2.0', 'preminor', '1.3.0-0', False],
['1.2.0', 'premajor', '2.0.0-0', False],
['1.2.0', 'preminor', '1.3.0-0', False],
['1.2.0', 'premajor', '2.0.0-0', False]
]
# cands = [
# ['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1', False],
# ]
@pytest.mark.parametrize("pre, what, wanted, loose", cands)
def test_it(pre, what, wanted, loose):
from semver import inc
assert inc(pre, what, loose) == wanted
| apache-2.0 |
henrytao-me/openerp.positionq | openerp/addons/base/ir/workflow/workflow.py | 41 | 8979 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import netsvc
class workflow(osv.osv):
_name = "workflow"
_table = "wkf"
_order = "name"
_columns = {
'name': fields.char('Name', size=64, required=True),
'osv': fields.char('Resource Object', size=64, required=True,select=True),
'on_create': fields.boolean('On Create', select=True),
'activities': fields.one2many('workflow.activity', 'wkf_id', 'Activities'),
}
_defaults = {
'on_create': lambda *a: True
}
def write(self, cr, user, ids, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).write(cr, user, ids, vals, context=context)
def get_active_workitems(self, cr, uid, res, res_id, context=None):
cr.execute('select * from wkf where osv=%s limit 1',(res,))
wkfinfo = cr.dictfetchone()
workitems = []
if wkfinfo:
cr.execute('SELECT id FROM wkf_instance \
WHERE res_id=%s AND wkf_id=%s \
ORDER BY state LIMIT 1',
(res_id, wkfinfo['id']))
inst_id = cr.fetchone()
cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%s group by act_id', (inst_id,))
workitems = dict(cr.fetchall())
return {'wkf': wkfinfo, 'workitems': workitems}
def create(self, cr, user, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).create(cr, user, vals, context=context)
workflow()
class wkf_activity(osv.osv):
_name = "workflow.activity"
_table = "wkf_activity"
_order = "name"
_columns = {
'name': fields.char('Name', size=64, required=True),
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
'split_mode': fields.selection([('XOR', 'Xor'), ('OR','Or'), ('AND','And')], 'Split Mode', size=3, required=True),
'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True),
'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', size=64, required=True),
'action': fields.text('Python Action'),
'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'),
'flow_start': fields.boolean('Flow Start'),
'flow_stop': fields.boolean('Flow Stop'),
'subflow_id': fields.many2one('workflow', 'Subflow'),
'signal_send': fields.char('Signal (subflow.*)', size=32),
'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing Transitions'),
'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming Transitions'),
}
_defaults = {
'kind': lambda *a: 'dummy',
'join_mode': lambda *a: 'XOR',
'split_mode': lambda *a: 'XOR',
}
def unlink(self, cr, uid, ids, context=None):
if context is None: context = {}
if not context.get('_force_unlink') and self.pool.get('workflow.workitem').search(cr, uid, [('act_id', 'in', ids)]):
raise osv.except_osv(_('Operation Forbidden'),
_('Please make sure no workitems refer to an activity before deleting it!'))
super(wkf_activity, self).unlink(cr, uid, ids, context=context)
wkf_activity()
class wkf_transition(osv.osv):
_table = "wkf_transition"
_name = "workflow.transition"
_rec_name = 'signal'
_columns = {
'trigger_model': fields.char('Trigger Object', size=128),
'trigger_expr_id': fields.char('Trigger Expression', size=128),
'signal': fields.char('Signal (Button Name)', size=64,
help="When the operation of transition comes from a button pressed in the client form, "\
"signal tests the name of the pressed button. If signal is NULL, no button is necessary to validate this transition."),
'group_id': fields.many2one('res.groups', 'Group Required',
help="The group that a user must have to be authorized to validate this transition."),
'condition': fields.char('Condition', required=True, size=128,
help="Expression to be satisfied if we want the transition done."),
'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade',
help="Source activity. When this activity is over, the condition is tested to determine if we can start the ACT_TO activity."),
'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade',
help="The destination activity."),
'wkf_id': fields.related('act_from','wkf_id', type='many2one', relation='workflow', string='Workflow', select=True),
}
_defaults = {
'condition': lambda *a: 'True',
}
wkf_transition()
class wkf_instance(osv.osv):
_table = "wkf_instance"
_name = "workflow.instance"
_rec_name = 'res_type'
_log_access = False
_columns = {
'wkf_id': fields.many2one('workflow', 'Workflow', ondelete='cascade', select=True),
'res_id': fields.integer('Resource ID'),
'res_type': fields.char('Resource Object', size=64),
'state': fields.char('Status', size=32),
}
def _auto_init(self, cr, context=None):
super(wkf_instance, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_type_res_id_state_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_instance_res_type_res_id_state_index ON wkf_instance (res_type, res_id, state)')
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_wkf_id_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_instance_res_id_wkf_id_index ON wkf_instance (res_id, wkf_id)')
wkf_instance()
class wkf_workitem(osv.osv):
_table = "wkf_workitem"
_name = "workflow.workitem"
_log_access = False
_rec_name = 'state'
_columns = {
'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade", select=True),
'wkf_id': fields.related('act_id','wkf_id', type='many2one', relation='workflow', string='Workflow'),
'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="cascade", select=True),
'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=True),
'state': fields.char('Status', size=64, select=True),
}
wkf_workitem()
class wkf_triggers(osv.osv):
_table = "wkf_triggers"
_name = "workflow.triggers"
_log_access = False
_columns = {
'res_id': fields.integer('Resource ID', size=128),
'model': fields.char('Object', size=128),
'instance_id': fields.many2one('workflow.instance', 'Destination Instance', ondelete="cascade"),
'workitem_id': fields.many2one('workflow.workitem', 'Workitem', required=True, ondelete="cascade"),
}
def _auto_init(self, cr, context=None):
super(wkf_triggers, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_triggers_res_id_model_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_triggers_res_id_model_index ON wkf_triggers (res_id, model)')
wkf_triggers()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nitroglycerine33/kernel_asus_grouper | Documentation/target/tcm_mod_builder.py | 3119 | 42754 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!(se_nacl_new))\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!(tpg)) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!(" + fabric_mod_port + ")) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "__NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd_to_pool = " + fabric_mod_name + "_release_cmd,\n"
buf += " .release_cmd_direct = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .new_cmd_failure = " + fabric_mod_name + "_new_cmd_failure,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " .pack_lun = " + fabric_mod_name + "_pack_lun,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (!(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return -ENOMEM;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!(" + fabric_mod_name + "_fabric_configfs))\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "#ifdef MODULE\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
buf += "#endif\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric_ops.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!(nacl)) {\n"
buf += " printk(KERN_ERR \"Unable to alocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('release_cmd_to_pool', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('new_cmd_failure\)\(', fo):
buf += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
if re.search('pack_lun\)\(', fo):
buf += "u64 " + fabric_mod_name + "_pack_lun(unsigned int lun)\n"
buf += "{\n"
buf += " WARN_ON(lun >= 256);\n"
buf += " /* Caller wants this byte-swapped */\n"
buf += " return cpu_to_le64((lun & 0xff) << 8);\n"
buf += "}\n\n"
bufi += "u64 " + fabric_mod_name + "_pack_lun(unsigned int);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
cespare/pastedown | vendor/pygments/pygments/console.py | 24 | 1850 | # -*- coding: utf-8 -*-
"""
pygments.console
~~~~~~~~~~~~~~~~
Format colored console output.
:copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
esc = "\x1b["
codes = {}
codes[""] = ""
codes["reset"] = esc + "39;49;00m"
codes["bold"] = esc + "01m"
codes["faint"] = esc + "02m"
codes["standout"] = esc + "03m"
codes["underline"] = esc + "04m"
codes["blink"] = esc + "05m"
codes["overline"] = esc + "06m"
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
"purple", "teal", "lightgray"]
light_colors = ["darkgray", "red", "green", "yellow", "blue",
"fuchsia", "turquoise", "white"]
x = 30
for d, l in zip(dark_colors, light_colors):
codes[d] = esc + "%im" % x
codes[l] = esc + "%i;01m" % x
x += 1
del d, l, x
codes["darkteal"] = codes["turquoise"]
codes["darkyellow"] = codes["brown"]
codes["fuscia"] = codes["fuchsia"]
codes["white"] = codes["bold"]
def reset_color():
return codes["reset"]
def colorize(color_key, text):
return codes[color_key] + text + codes["reset"]
def ansiformat(attr, text):
"""
Format ``text`` with a color and/or some attributes::
color normal color
*color* bold color
_color_ underlined color
+color+ blinking color
"""
result = []
if attr[:1] == attr[-1:] == '+':
result.append(codes['blink'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '*':
result.append(codes['bold'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '_':
result.append(codes['underline'])
attr = attr[1:-1]
result.append(codes[attr])
result.append(text)
result.append(codes['reset'])
return ''.join(result)
| mit |
sgibbes/carbon-budget | gain/cumulative_gain_mangrove.py | 1 | 2558 | ### This script calculates the cumulative above and belowground carbon gain in mangrove forest pixels from 2001-2015.
### It multiplies the annual biomass gain rate by the number of years of gain by the biomass-to-carbon conversion.
import utilities
import datetime
import subprocess
import sys
sys.path.append('../')
import constants_and_names as cn
import universal_util as uu
# Calculates cumulative aboveground carbon gain in mangroves
def cumulative_gain_AGC(tile_id):
print "Calculating cumulative aboveground carbon gain:", tile_id
# Start time
start = datetime.datetime.now()
# Names of the annual gain rate and gain year count tiles
gain_rate_AGB = '{0}_{1}.tif'.format(tile_id, cn.pattern_annual_gain_AGB_mangrove)
gain_year_count = '{0}_{1}.tif'.format(tile_id, cn.pattern_gain_year_count_mangrove)
# Carbon gain uses special mangrove biomass:carbon ratio
accum_calc = '--calc=A*B*{}'.format(cn.biomass_to_c_mangrove)
AGC_accum_outfilename = '{0}_{1}.tif'.format(tile_id, cn.pattern_cumul_gain_AGC_mangrove)
AGC_accum_outfilearg = '--outfile={}'.format(AGC_accum_outfilename)
cmd = ['gdal_calc.py', '-A', gain_rate_AGB, '-B', gain_year_count, accum_calc, AGC_accum_outfilearg, '--NoDataValue=0', '--overwrite', '--co', 'COMPRESS=LZW']
subprocess.check_call(cmd)
# Prints information about the tile that was just processed
uu.end_of_fx_summary(start, tile_id, cn.pattern_cumul_gain_AGC_mangrove)
# Calculates cumulative belowground carbon gain in mangroves
def cumulative_gain_BGC(tile_id):
print "Calculating cumulative belowground carbon gain:", tile_id
# Start time
start = datetime.datetime.now()
# Names of the annual gain rate and gain year count tiles
gain_rate_BGB = '{0}_{1}.tif'.format(tile_id, cn.pattern_annual_gain_BGB_mangrove)
gain_year_count = '{0}_{1}.tif'.format(tile_id, cn.pattern_gain_year_count_mangrove)
# Carbon gain uses special mangrove biomass:carbon ratio
accum_calc = '--calc=A*B*{}'.format(cn.biomass_to_c_mangrove)
BGC_accum_outfilename = '{0}_{1}.tif'.format(tile_id, cn.pattern_cumul_gain_BGC_mangrove)
BGC_accum_outfilearg = '--outfile={}'.format(BGC_accum_outfilename)
cmd = ['gdal_calc.py', '-A', gain_rate_BGB, '-B', gain_year_count, accum_calc, BGC_accum_outfilearg, '--NoDataValue=0', '--overwrite', '--co', 'COMPRESS=LZW']
subprocess.check_call(cmd)
# Prints information about the tile that was just processed
uu.end_of_fx_summary(start, tile_id, cn.pattern_cumul_gain_BGC_mangrove) | apache-2.0 |
egenerat/gae-django | django/contrib/localflavor/ca/ca_provinces.py | 13 | 1455 | """
An alphabetical list of provinces and territories for use as `choices`
in a formfield., and a mapping of province misspellings/abbreviations to
normalized abbreviations
Source: http://www.canada.gc.ca/othergov/prov_e.html
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
PROVINCE_CHOICES = (
('AB', 'Alberta'),
('BC', 'British Columbia'),
('MB', 'Manitoba'),
('NB', 'New Brunswick'),
('NF', 'Newfoundland and Labrador'),
('NT', 'Northwest Territories'),
('NS', 'Nova Scotia'),
('NU', 'Nunavut'),
('ON', 'Ontario'),
('PE', 'Prince Edward Island'),
('QC', 'Quebec'),
('SK', 'Saskatchewan'),
('YK', 'Yukon')
)
PROVINCES_NORMALIZED = {
'ab': 'AB',
'alberta': 'AB',
'bc': 'BC',
'b.c.': 'BC',
'british columbia': 'BC',
'mb': 'MB',
'manitoba': 'MB',
'nb': 'NB',
'new brunswick': 'NB',
'nf': 'NF',
'newfoundland': 'NF',
'newfoundland and labrador': 'NF',
'nt': 'NT',
'northwest territories': 'NT',
'ns': 'NS',
'nova scotia': 'NS',
'nu': 'NU',
'nunavut': 'NU',
'on': 'ON',
'ontario': 'ON',
'pe': 'PE',
'pei': 'PE',
'p.e.i.': 'PE',
'prince edward island': 'PE',
'qc': 'QC',
'quebec': 'QC',
'sk': 'SK',
'saskatchewan': 'SK',
'yk': 'YK',
'yukon': 'YK',
} | mit |
atcemgil/notes | DrawNN.py | 1 | 2429 | #Code from https://gist.github.com/craffel/2d727968c3aaebd10359
import matplotlib.pyplot as plt
def draw_neural_net(ax, left, right, bottom, top, layer_sizes, bias=0, draw_edges=False):
'''
Draw a neural network cartoon using matplotilb.
:usage:
>>> fig = plt.figure(figsize=(12, 12))
>>> draw_neural_net(fig.gca(), .1, .9, .1, .9, [4, 7, 2])
:parameters:
- ax : matplotlib.axes.AxesSubplot
The axes on which to plot the cartoon (get e.g. by plt.gca())
- left : float
The center of the leftmost node(s) will be placed here
- right : float
The center of the rightmost node(s) will be placed here
- bottom : float
The center of the bottommost node(s) will be placed here
- top : float
The center of the topmost node(s) will be placed here
- layer_sizes : list of int
List of layer sizes, including input and output dimensionality
- bias : Boolean
Draw an extra bias node at each layer
- draw_edges : Boolean
If false, omit edge connections
'''
n_layers = len(layer_sizes)
v_spacing = (top - bottom)/float(max(layer_sizes)+bias)
h_spacing = (right - left)/float(len(layer_sizes) - 1)
# Nodes
for n, layer_size in enumerate(layer_sizes):
layer_top = v_spacing*(layer_size - 1)/2. + (top + bottom)/2.
bias_node = (bias if n<len(layer_sizes)-1 else 0)
for m in range(layer_size+bias_node ):
node_color = 'w' if m<layer_size else 'b'
circle = plt.Circle((n*h_spacing + left, layer_top - m*v_spacing), v_spacing/8.,
color=node_color, ec='k', zorder=4)
ax.add_artist(circle)
# Edges
if draw_edges:
for n, (layer_size_a, layer_size_b) in enumerate(zip(layer_sizes[:-1], layer_sizes[1:])):
layer_top_a = v_spacing*(layer_size_a - 1)/2. + (top + bottom)/2.
layer_top_b = v_spacing*(layer_size_b - 1)/2. + (top + bottom)/2.
for m in range(layer_size_a+bias):
for o in range(layer_size_b):
line = plt.Line2D([n*h_spacing + left, (n + 1)*h_spacing + left],
[layer_top_a - m*v_spacing, layer_top_b - o*v_spacing],
c='k')
ax.add_artist(line) | mit |
zfil/ansible-modules-core | cloud/rackspace/rax_network.py | 157 | 4110 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_network
short_description: create / delete an isolated network in Rackspace Public Cloud
description:
- creates / deletes a Rackspace Public Cloud isolated network.
version_added: "1.4"
options:
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
label:
description:
- Label (name) to give the network
default: null
cidr:
description:
- cidr of the network being created
default: null
author:
- "Christopher H. Laco (@claco)"
- "Jesse Keating (@j2sol)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Build an Isolated Network
gather_facts: False
tasks:
- name: Network create request
local_action:
module: rax_network
credentials: ~/.raxpub
label: my-net
cidr: 192.168.3.0/24
state: present
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_network(module, state, label, cidr):
changed = False
network = None
networks = []
if not pyrax.cloud_networks:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
if state == 'present':
if not cidr:
module.fail_json(msg='missing required arguments: cidr')
try:
network = pyrax.cloud_networks.find_network_by_label(label)
except pyrax.exceptions.NetworkNotFound:
try:
network = pyrax.cloud_networks.create(label, cidr=cidr)
changed = True
except Exception, e:
module.fail_json(msg='%s' % e.message)
except Exception, e:
module.fail_json(msg='%s' % e.message)
elif state == 'absent':
try:
network = pyrax.cloud_networks.find_network_by_label(label)
network.delete()
changed = True
except pyrax.exceptions.NetworkNotFound:
pass
except Exception, e:
module.fail_json(msg='%s' % e.message)
if network:
instance = dict(id=network.id,
label=network.label,
cidr=network.cidr)
networks.append(instance)
module.exit_json(changed=changed, networks=networks)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
state=dict(default='present',
choices=['present', 'absent']),
label=dict(required=True),
cidr=dict()
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
state = module.params.get('state')
label = module.params.get('label')
cidr = module.params.get('cidr')
setup_rax_module(module, pyrax)
cloud_network(module, state, label, cidr)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
main()
| gpl-3.0 |
matthijsvk/multimodalSR | code/Experiments/neon-master/examples/whale_calls/make_submission.py | 1 | 2183 | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import numpy as np
from neon import logger as neon_logger
from neon.util.argparser import NeonArgparser
from neon.optimizers import Adadelta
from neon.callbacks.callbacks import Callbacks
from network import create_network
from data import make_train_loader, make_test_loader
subm_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'whale_subm.cfg')
config_files = [subm_config] if os.path.exists(subm_config) else []
parser = NeonArgparser(__doc__, default_config_files=config_files)
parser.add_argument('--submission_file', help='where to write prediction output')
args = parser.parse_args()
model, cost_obj = create_network()
assert 'all' in args.manifest, "Missing train manifest"
assert 'test' in args.manifest, "Missing test manifest"
assert args.submission_file is not None, "Must supply a submission file to output scores to"
neon_logger.display('Performing train and test in submission mode')
train = make_train_loader(args.manifest['all'], args.manifest_root, model.be,
noise_file=args.manifest.get('noise'))
test = make_test_loader(args.manifest['test'], args.manifest_root, model.be)
model.fit(dataset=train,
cost=cost_obj,
optimizer=Adadelta(),
num_epochs=args.epochs,
callbacks=Callbacks(model, **args.callback_args))
preds = model.get_outputs(test)
np.savetxt(args.submission_file, preds[:, 1], fmt='%.5f')
| mit |
Timurdov/bionic | bionic/Lib/site-packages/django/core/management/templates.py | 61 | 13198 | import cgi
import errno
import mimetypes
import os
import posixpath
import re
import shutil
import stat
import sys
import tempfile
from optparse import make_option
from os import path
import django
from django.template import Template, Context
from django.utils import archive
from django.utils.six.moves.urllib.request import urlretrieve
from django.utils._os import rmtree_errorhandler
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import handle_extensions
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
class TemplateCommand(BaseCommand):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
:param style: A color style object (see django.core.management.color).
:param app_or_project: The string 'app' or 'project'.
:param name: The name of the application or project.
:param directory: The directory to which the template should be copied.
:param options: The additional variables passed to project or app templates
"""
args = "[name] [optional destination directory]"
option_list = BaseCommand.option_list + (
make_option('--template',
action='store', dest='template',
help='The path or URL to load the template from.'),
make_option('--extension', '-e', dest='extensions',
action='append', default=['py'],
help='The file extension(s) to render (default: "py"). '
'Separate multiple extensions with commas, or use '
'-e multiple times.'),
make_option('--name', '-n', dest='files',
action='append', default=[],
help='The file name(s) to render. '
'Separate multiple extensions with commas, or use '
'-n multiple times.')
)
requires_system_checks = False
# Can't import settings during this command, because they haven't
# necessarily been created.
can_import_settings = False
# The supported URL schemes
url_schemes = ['http', 'https', 'ftp']
# Can't perform any active locale changes during this command, because
# setting might not be available at all.
leave_locale_alone = True
def handle(self, app_or_project, name, target=None, **options):
self.app_or_project = app_or_project
self.paths_to_remove = []
self.verbosity = int(options.get('verbosity'))
self.validate_name(name, app_or_project)
# if some directory is given, make sure it's nicely expanded
if target is None:
top_dir = path.join(os.getcwd(), name)
try:
os.makedirs(top_dir)
except OSError as e:
if e.errno == errno.EEXIST:
message = "'%s' already exists" % top_dir
else:
message = e
raise CommandError(message)
else:
top_dir = os.path.abspath(path.expanduser(target))
if not os.path.exists(top_dir):
raise CommandError("Destination directory '%s' does not "
"exist, please create it first." % top_dir)
extensions = tuple(
handle_extensions(options.get('extensions'), ignored=()))
extra_files = []
for file in options.get('files'):
extra_files.extend(map(lambda x: x.strip(), file.split(',')))
if self.verbosity >= 2:
self.stdout.write("Rendering %s template files with "
"extensions: %s\n" %
(app_or_project, ', '.join(extensions)))
self.stdout.write("Rendering %s template files with "
"filenames: %s\n" %
(app_or_project, ', '.join(extra_files)))
base_name = '%s_name' % app_or_project
base_subdir = '%s_template' % app_or_project
base_directory = '%s_directory' % app_or_project
if django.VERSION[-2] != 'final':
docs_version = 'dev'
else:
docs_version = '%d.%d' % django.VERSION[:2]
context = Context(dict(options, **{
base_name: name,
base_directory: top_dir,
'docs_version': docs_version,
}), autoescape=False)
# Setup a stub settings environment for template rendering
from django.conf import settings
if not settings.configured:
settings.configure()
template_dir = self.handle_template(options.get('template'),
base_subdir)
prefix_length = len(template_dir) + 1
for root, dirs, files in os.walk(template_dir):
path_rest = root[prefix_length:]
relative_dir = path_rest.replace(base_name, name)
if relative_dir:
target_dir = path.join(top_dir, relative_dir)
if not path.exists(target_dir):
os.mkdir(target_dir)
for dirname in dirs[:]:
if dirname.startswith('.') or dirname == '__pycache__':
dirs.remove(dirname)
for filename in files:
if filename.endswith(('.pyo', '.pyc', '.py.class')):
# Ignore some files as they cause various breakages.
continue
old_path = path.join(root, filename)
new_path = path.join(top_dir, relative_dir,
filename.replace(base_name, name))
if path.exists(new_path):
raise CommandError("%s already exists, overlaying a "
"project or app into an existing "
"directory won't replace conflicting "
"files" % new_path)
# Only render the Python files, as we don't want to
# accidentally render Django templates files
with open(old_path, 'rb') as template_file:
content = template_file.read()
if filename.endswith(extensions) or filename in extra_files:
content = content.decode('utf-8')
template = Template(content)
content = template.render(context)
content = content.encode('utf-8')
with open(new_path, 'wb') as new_file:
new_file.write(content)
if self.verbosity >= 2:
self.stdout.write("Creating %s\n" % new_path)
try:
shutil.copymode(old_path, new_path)
self.make_writeable(new_path)
except OSError:
self.stderr.write(
"Notice: Couldn't set permission bits on %s. You're "
"probably using an uncommon filesystem setup. No "
"problem." % new_path, self.style.NOTICE)
if self.paths_to_remove:
if self.verbosity >= 2:
self.stdout.write("Cleaning up temporary files.\n")
for path_to_remove in self.paths_to_remove:
if path.isfile(path_to_remove):
os.remove(path_to_remove)
else:
shutil.rmtree(path_to_remove,
onerror=rmtree_errorhandler)
def handle_template(self, template, subdir):
"""
Determines where the app or project templates are.
Use django.__path__[0] as the default because we don't
know into which directory Django has been installed.
"""
if template is None:
return path.join(django.__path__[0], 'conf', subdir)
else:
if template.startswith('file://'):
template = template[7:]
expanded_template = path.expanduser(template)
expanded_template = path.normpath(expanded_template)
if path.isdir(expanded_template):
return expanded_template
if self.is_url(template):
# downloads the file and returns the path
absolute_path = self.download(template)
else:
absolute_path = path.abspath(expanded_template)
if path.exists(absolute_path):
return self.extract(absolute_path)
raise CommandError("couldn't handle %s template %s." %
(self.app_or_project, template))
def validate_name(self, name, app_or_project):
if name is None:
raise CommandError("you must provide %s %s name" % (
"an" if app_or_project == "app" else "a", app_or_project))
# If it's not a valid directory name.
if not re.search(r'^[_a-zA-Z]\w*$', name):
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." %
(name, app_or_project, message))
def download(self, url):
"""
Downloads the given URL and returns the file name.
"""
def cleanup_url(url):
tmp = url.rstrip('/')
filename = tmp.split('/')[-1]
if url.endswith('/'):
display_url = tmp + '/'
else:
display_url = url
return filename, display_url
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
self.paths_to_remove.append(tempdir)
filename, display_url = cleanup_url(url)
if self.verbosity >= 2:
self.stdout.write("Downloading %s\n" % display_url)
try:
the_path, info = urlretrieve(url, path.join(tempdir, filename))
except IOError as e:
raise CommandError("couldn't download URL %s to %s: %s" %
(url, filename, e))
used_name = the_path.split('/')[-1]
# Trying to get better name from response headers
content_disposition = info.get('content-disposition')
if content_disposition:
_, params = cgi.parse_header(content_disposition)
guessed_filename = params.get('filename') or used_name
else:
guessed_filename = used_name
# Falling back to content type guessing
ext = self.splitext(guessed_filename)[1]
content_type = info.get('content-type')
if not ext and content_type:
ext = mimetypes.guess_extension(content_type)
if ext:
guessed_filename += ext
# Move the temporary file to a filename that has better
# chances of being recognized by the archive utils
if used_name != guessed_filename:
guessed_path = path.join(tempdir, guessed_filename)
shutil.move(the_path, guessed_path)
return guessed_path
# Giving up
return the_path
def splitext(self, the_path):
"""
Like os.path.splitext, but takes off .tar, too
"""
base, ext = posixpath.splitext(the_path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def extract(self, filename):
"""
Extracts the given file to a temporarily and returns
the path of the directory with the extracted content.
"""
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
self.paths_to_remove.append(tempdir)
if self.verbosity >= 2:
self.stdout.write("Extracting %s\n" % filename)
try:
archive.extract(filename, tempdir)
return tempdir
except (archive.ArchiveException, IOError) as e:
raise CommandError("couldn't extract file %s to %s: %s" %
(filename, tempdir, e))
def is_url(self, template):
"""
Returns True if the name looks like a URL
"""
if ':' not in template:
return False
scheme = template.split(':', 1)[0].lower()
return scheme in self.url_schemes
def make_writeable(self, filename):
"""
Make sure that the file is writeable.
Useful if our source is read-only.
"""
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
| apache-2.0 |
alvin319/CarnotKE | jyhton/lib-python/2.7/lib-tk/Tkinter.py | 43 | 157840 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import Tkinter
from Tkconstants import *
tk = Tkinter.Tk()
frame = Tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = Tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = Tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 81008 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
import FixTk
import _tkinter # If this fails your Python may not be configured for Tk
tkinter = _tkinter # b/w compat for export
TclError = _tkinter.TclError
from types import *
from Tkconstants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])')
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
if isinstance(value, basestring):
value = unicode(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(tuple):
"""Internal function."""
res = ()
for item in tuple:
if type(item) in (TupleType, ListType):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if type(cnfs) is DictionaryType:
return cnfs
elif type(cnfs) in (NoneType, StringType):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError), msg:
print "_cnfmerge: fallback due to:", msg
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit, code
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, basestring):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if type(data) is StringType:
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if type(func) is StringType:
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if not isinstance(item, (basestring, int)):
break
elif isinstance(item, int):
nv.append('%d' % item)
else:
# format it to proper Tcl code if it contains space
nv.append(_stringify(item))
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.im_func
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_type, sys.exc_value, sys.exc_traceback
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) is StringType:
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def __contains__(self, key):
raise TypeError("Tkinter objects don't support 'in' tests.")
def keys(self):
"""Return a list of all resource names of this widget."""
return map(lambda x: x[0][1:],
self.tk.split(self.tk.call(self._w, 'configure')))
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if type(cnf) is StringType and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit, msg:
raise SystemExit, msg
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return map(self._nametowidget, self.tk.call(args))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError, \
"tk.h version (%s) doesn't match libtk.a version (%s)" \
% (_tkinter.TK_VERSION, tk_version)
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError, \
"tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version)
if TkVersion < 4.0:
raise RuntimeError, \
"Tk 4.0 or higher is required; found Tk %s" \
% str(TkVersion)
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls execfile on BASENAME.py and CLASSNAME.py if
such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec 'from Tkinter import *' in dir
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
execfile(class_py, dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
execfile(base_py, dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa ([email protected])
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = []
for k in cnf.keys():
if type(k) is ClassType:
classes.append((k, cnf[k]))
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if type(cnf) in (DictionaryType, TupleType):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError, 'unknown option -'+kwargs.keys()[0]
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError, 'Too early to create image'
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) == StringType and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
try:
text = text + unicode("\nThis should be a cedilla: \347",
"iso-8859-1")
except NameError:
pass # no unicode support
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| apache-2.0 |
satvikdhandhania/vit-11 | moca/mrs/legacy_api.py | 3 | 14520 | from __future__ import absolute_import
try:
import json
except ImportError, e:
import simplejson as json
import logging
import urllib2
import telnetlib
import urllib
from django.conf import settings
from django.template import loader
from django.http import HttpResponse
from django import forms
from django.core.mail import send_mail
from django.shortcuts import render_to_response
from util import enable_logging
from moca.mrs import openmrs, patient, sms
from moca.mrs.api import register_saved_procedure, register_binary, register_binary_chunk
from moca.mrs.models import Notification
def render_json_template(*args, **kwargs):
"""Renders a JSON template, and then calls render_json_response()."""
data = loader.render_to_string(*args, **kwargs)
return render_json_response(data)
def render_json_response(data):
"""Sends an HttpResponse with the X-JSON header and the right mimetype."""
resp = HttpResponse(data, mimetype=("application/json; charset=" +
settings.DEFAULT_CHARSET))
resp['X-JSON'] = data
return resp
def json_fail(message):
response = {
'status': 'FAILURE',
'data': message,
}
return json.dumps(response)
def json_succeed(data):
response = {
'status': 'SUCCESS',
'data': data,
}
return json.dumps(response)
def validate_credentials(request):
try:
username = request.REQUEST.get("username", None)
password = request.REQUEST.get("password", None)
logging.info("username: " + username)
logging.info("pasword: " + password)
response = ''
omrs = openmrs.OpenMRS(username,password,
settings.OPENMRS_SERVER_URL)
if omrs.validate_credentials(username, password):
response = json_succeed("username and password validated!")
else:
response = json_fail("username and password combination incorrect!")
return render_json_response(response)
except Exception, e:
logging.error( "Exception in validate_credentials: " + str(e))
class ProcedureSubmitForm(forms.Form):
username = forms.CharField(required=True, max_length=256)
password = forms.CharField(required=True, max_length=256)
savedproc_guid = forms.CharField(required=True, max_length=512)
procedure_guid = forms.CharField(required=True, max_length=512)
responses = forms.CharField(required=True)
phone = forms.CharField(max_length=255)
@enable_logging
def procedure_submit(request):
try:
logging.info("Received saved procedure submission.")
for key,value in request.REQUEST.items():
print key,value
# if request.method != 'POST':
# return HttpResponse('get')
form = ProcedureSubmitForm(request.REQUEST)
response = ''
if form.is_valid():
result, message = register_saved_procedure(form.cleaned_data['savedproc_guid'],
form.cleaned_data['procedure_guid'],
form.cleaned_data['responses'],
form.cleaned_data['phone'],
form.cleaned_data['username'],
form.cleaned_data['password'])
if result:
response = json_succeed("Successfully saved the procedure: %s" % message)
logging.info("Saved procedure successfully registerd.")
else:
response = json_fail(message)
logging.error("Failed to register procedure: %s" % message)
else:
logging.error("Saved procedure submission was invalid, dumping REQUEST.")
for k,v in request.REQUEST.items():
logging.error("SavedProcedure argument %s:%s" % (k,v))
response = json_fail("Could not parse submission : missing parts or invalid data?")
except Exception, e:
error = "Exception : %s" % e
logging.error(error)
response = json_fail(error)
return render_json_response(response)
class BinaryChunkSubmitForm(forms.Form):
procedure_guid = forms.CharField(required=True, max_length=512)
element_id = forms.CharField(required=True)
element_type = forms.CharField(required=True)
binary_guid = forms.CharField(required=True)
file_size = forms.IntegerField(required=True)
byte_start = forms.IntegerField(required=True)
byte_end = forms.IntegerField(required=True)
#byte_data = forms.CharField(required=True)
byte_data = forms.FileField(required=True)
done = forms.BooleanField(initial=False, required=False)
@enable_logging
def binarychunk_submit(request):
"""Processes an individual chunk of binary data uploaded."""
response = ''
form = BinaryChunkSubmitForm(request.POST, request.FILES)
if form.is_valid():
logging.info("Received valid binarychunk form")
procedure_guid = form.cleaned_data['procedure_guid']
element_id = form.cleaned_data['element_id']
element_type = form.cleaned_data['element_type']
binary_guid = form.cleaned_data['binary_guid']
file_size = form.cleaned_data['file_size']
byte_start = form.cleaned_data['byte_start']
byte_end = form.cleaned_data['byte_end']
byte_data = form.cleaned_data['byte_data']
try:
result, message = register_binary_chunk(procedure_guid,
element_id,
element_type,
binary_guid,
file_size,
byte_start,
byte_end,
byte_data.chunks())
if result:
response = json_succeed("Successfully saved the binary chunk: %s" % message)
else:
response = json_fail("Failed to save the binary chunk: %s" % message)
except Exception, e:
logging.error("registering binary chunk failed: %s" % e)
response = json_fail("Registering binary chunk failed: %s" % e)
logging.info("Finished processing binarychunk form")
else:
logging.error("Received invalid binarychunk form")
for k,v in request.REQUEST.items():
if k == 'byte_data':
logging.debug("%s:(binary length %d)" % (k,len(v)))
else:
logging.debug("%s:%s" % (k,v))
response = json_fail("Could not parse submission. Missing parts?")
return render_json_response(response)
class BinaryChunkHackSubmitForm(forms.Form):
procedure_guid = forms.CharField(required=True, max_length=512)
element_id = forms.CharField(required=True)
element_type = forms.CharField(required=True)
binary_guid = forms.CharField(required=True)
file_size = forms.IntegerField(required=True)
byte_start = forms.IntegerField(required=True)
byte_end = forms.IntegerField(required=True)
byte_data = forms.CharField(required=True)
#byte_data = forms.FileField(required=True)
done = forms.BooleanField(initial=False, required=False)
@enable_logging
def binarychunk_hack_submit(request):
"""Processes an individual chunk of binary data uploaded."""
response = ''
form = BinaryChunkHackSubmitForm(request.POST, request.FILES)
if form.is_valid():
logging.info("Received valid binarychunk-hack form")
procedure_guid = form.cleaned_data['procedure_guid']
element_id = form.cleaned_data['element_id']
element_type = form.cleaned_data['element_type']
binary_guid = form.cleaned_data['binary_guid']
file_size = form.cleaned_data['file_size']
byte_start = form.cleaned_data['byte_start']
byte_end = form.cleaned_data['byte_end']
byte_data = form.cleaned_data['byte_data']
# This hack submits byte_data as base64 encoded, so decode it.
byte_data = byte_data.decode('base64')
try:
result, message = register_binary_chunk(procedure_guid,
element_id,
element_type,
binary_guid,
file_size,
byte_start,
byte_end,
[byte_data,])
if result:
response = json_succeed("Successfully saved the binary chunk: %s" % message)
else:
response = json_fail("Failed to save the binary chunk: %s" % message)
except Exception, e:
logging.error("registering binary chunk failed: %s" % e)
response = json_fail("Registering binary chunk failed: %s" % e)
logging.info("Finished processing binarychunk form")
else:
logging.error("Received invalid binarychunk form")
for k,v in request.REQUEST.items():
if k == 'byte_data':
logging.debug("%s:(binary length %d)" % (k,len(v)))
else:
logging.debug("%s:%s" % (k,v))
response = json_fail("Could not parse submission. Missing parts?")
logging.info("Sending response %s" % response)
return render_json_response(response)
class BinarySubmitForm(forms.Form):
procedure_guid = forms.CharField(required=True, max_length=512)
element_id = forms.CharField(required=True)
#data = forms.FileField(required=True)
@enable_logging
def binary_submit(request):
response = ''
form = BinarySubmitForm(request.REQUEST)
data = request.FILES.get('data',None)
if form.is_valid() and data:
logging.info("Received a valid Binary submission form")
element_id = form.cleaned_data['element_id']
procedure_guid = form.cleaned_data['procedure_guid']
register_binary(procedure_guid, element_id, data)
response = json_succeed("Successfully saved the binary")
logging.info("Done processing Binary submission form")
else:
logging.info("Received an invalid Binary submission form")
response = json_fail("Could not parse submission. Missing parts?")
return render_json_response(response)
class OpenMRSQueryForm(forms.Form):
username = forms.CharField(required=True, max_length=256)
password = forms.CharField(required=True, max_length=256)
@enable_logging
def patient_list(request):
logging.info("entering patient list proc")
username = request.REQUEST.get("username", None)
password = request.REQUEST.get("password", None)
omrs = openmrs.OpenMRS(username,password,
settings.OPENMRS_SERVER_URL)
try:
patients_xml = omrs.get_all_patients()
data = patient.parse_patient_list_xml(patients_xml)
logging.info("we finished getting the patient list")
response = json_succeed(data)
except Exception, e:
logging.error("Got exception while fetching patient list: %s" % e)
response = json_fail("Problem while getting patient list: %s" % e)
return render_json_response(response)
@enable_logging
def patient_get(request, id):
logging.info("entering patient get proc")
username = request.REQUEST.get("username", None)
password = request.REQUEST.get("password", None)
omrs = openmrs.OpenMRS(username,password,
settings.OPENMRS_SERVER_URL)
logging.info("About to getPatient")
try:
patient_xml = omrs.get_patient(id)
data = patient.parse_patient_xml(patient_xml)
response = json_succeed(data)
except Exception, e:
logging.error("Got error %s" % e)
response = json_fail("couldn't get patient")
logging.info("finished patient_get")
return render_json_response(response)
@enable_logging
def notification_submit(request):
phoneId = request.REQUEST.get("phoneIdentifier", None)
text = request.REQUEST.get("notificationText", None)
caseIdentifier = request.REQUEST.get("caseIdentifier", None)
patientIdentifier = request.REQUEST.get("patientIdentifier", None)
delivered = False
logging.info("Notification submit received")
for key,value in request.REQUEST.items():
logging.info("Notification submit %s:%s" % (key,value))
response = json_fail('Failed to register notification.')
if phoneId and text and caseIdentifier and patientIdentifier:
n = Notification(client=phoneId,
patient_id=patientIdentifier,
message=text,
procedure_id=caseIdentifier,
delivered=delivered)
n.save()
response = json_succeed('Successfully registered notification.')
try:
sms.send_sms_notification(n)
except Exception, e:
logging.error("Got error while trying to send notification: %s" % e)
return render_json_response(response)
def email_notification_submit(request):
addresses = request.REQUEST.get("emailAddresses",None)
caseIdentifier = request.REQUEST.get("caseIdentifier", None)
patientId = request.REQUEST.get("patientIdentifier", None)
subject = request.REQUEST.get("subject", "")
message = request.REQUEST.get("notificationText", "")
logging.info("Email notification submit received")
for key,value in request.REQUEST.items():
logging.info("Notification submit %s:%s" % (key,value))
response = json_fail('Failed to register email notification.')
try:
emailAddresses = json.loads(addresses)
except Exception, e:
response = json_fail('Got error when trying to parse email addresses.')
if addresses and caseIdentifier and patientId:
try:
send_mail(subject, message, '[email protected]',
emailAddresses, fail_silently=False)
response = json_succeed('Successfully registered email notification')
except Exception, e:
logging.error('Email could not be sent: %s' % e)
return render_json_response(response)
| bsd-3-clause |
xpansa/odoomrp-wip | procurement_service/models/sale_order.py | 21 | 1623 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.one
def action_button_confirm(self):
procurement_obj = self.env['procurement.order']
procurement_group_obj = self.env['procurement.group']
res = super(SaleOrder, self).action_button_confirm()
for line in self.order_line:
valid = self._validate_service_product_for_procurement(
line.product_id)
if valid:
if not self.procurement_group_id:
vals = self._prepare_procurement_group(self)
group = procurement_group_obj.create(vals)
self.write({'procurement_group_id': group.id})
vals = self._prepare_order_line_procurement(
self, line, group_id=self.procurement_group_id.id)
vals['name'] = self.name + ' - ' + line.product_id.name
procurement_obj.create(vals)
return res
def _validate_service_product_for_procurement(self, product):
routes = product.route_ids.filtered(
lambda r: r.id in (self.env.ref('stock.route_warehouse0_mto').id,
self.env.ref('purchase.route_warehouse0_buy').id
))
return product.type == 'service' and len(routes) == 2
| agpl-3.0 |
andela/codango | codango/community/models.py | 1 | 3713 | from cloudinary.models import CloudinaryField
from django.contrib.auth.models import User
from django.db import models
from multiselectfield import MultiSelectField
group_permissions = (
('INVITE_MEMBER', 'Send invites'),
('DELETE_MEMBER', 'Remove members'),
('BLOCK_MEMBER', 'Block members'),
('SUSPEND_MEMBER', 'Suspend members'),
)
class TimeStampMixin(models.Model):
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Tag(TimeStampMixin):
title = models.CharField(max_length=50)
def __str__(self):
return self.title
class Meta:
ordering = ['-date_modified']
class Permission(TimeStampMixin):
label = models.CharField(max_length=50)
description = models.TextField()
def __str__(self):
return self.label
class Meta:
ordering = ['-date_modified']
class Community(TimeStampMixin):
VISIBILITY_CHOICE = (
('none', 'None'),
('partial', 'Partial'),
('full', 'Full'),
)
name = models.CharField(max_length=50)
logo = CloudinaryField(
'logo', null=True)
description = models.TextField(max_length=1000)
private = models.BooleanField(
default=False, verbose_name='Private (Default is Public)')
visibility = models.CharField(
choices=VISIBILITY_CHOICE, max_length=30, default='full')
creator = models.ForeignKey(User, related_name='communities')
tags = models.ManyToManyField(Tag)
default_group_permissions = MultiSelectField(
choices=group_permissions, default=['BLOCK_MEMBER'],
verbose_name='Default Members Permissions')
def get_no_of_members(self):
return len(self.members.all())
def __str__(self):
return '{}, {} members'.format(self.name, self.get_no_of_members())
class Meta:
ordering = ['-date_modified']
unique_together = ('name', 'creator',)
class CommunityMember(TimeStampMixin):
STATUS_CHOICE = (
('pending', 'Pending'),
('approved', 'Approved'),
('declined', 'Declined'),
)
community = models.ForeignKey(
Community, on_delete=models.CASCADE, related_name='members')
user = models.ForeignKey(User, related_name='member')
invitor = models.ForeignKey(User, related_name='invited_by')
status = models.CharField(
choices=STATUS_CHOICE, max_length=20, default='Pending')
permission = MultiSelectField(
choices=group_permissions, default=['BLOCK_MEMBER'])
def __str__(self):
return '{} ({})'.format(self.user.username, self.community.name)
class Meta:
ordering = ['-date_modified']
unique_together = ('community', 'user',)
class CommunityBlacklist(TimeStampMixin):
BLACKLIST_CHOICE = (
('block', 'Block'),
('suspend', 'Suspend'),
)
user = models.ForeignKey(User, related_name='community_blacklist')
blacklister = models.ForeignKey(User, related_name='community_blacklister')
blacklist_type = models.CharField(
choices=BLACKLIST_CHOICE, max_length=20, default='Block')
community = models.ForeignKey(Community)
def __str__(self):
return '{} {}ed by {} in {}'.format(
self.user.username,
self.blacklist_type,
self.blacklister.username,
self.community.name)
class Meta:
ordering = ['-date_modified']
class AddOn(TimeStampMixin):
name = models.CharField(max_length=50)
communities = models.ManyToManyField(Community)
def __str__(self):
return '{}'.format(self.name)
class Meta:
ordering = ['name']
| mit |
uberpye/gwdetchar | gwdetchar/io/tests/test_html.py | 1 | 22674 | # -*- coding: utf-8 -*-
# Copyright (C) Alex Urban (2019)
#
# This file is part of the GW DetChar python package.
#
# GW DetChar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GW DetChar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gwdetchar. If not, see <http://www.gnu.org/licenses/>.
"""Tests for `gwdetchar.io.html`
"""
import os
import sys
import shutil
import datetime
import sys
from pytz import reference
from getpass import getuser
from MarkupPy import markup
try:
from unittest import mock
except ImportError: # python < 3
import mock
import pytest
from matplotlib import use
use('Agg') # nopep8
from gwpy.segments import (Segment, DataQualityFlag)
from .. import html
from ..._version import get_versions
from ...utils import parse_html
__author__ = 'Alex Urban <[email protected]>'
# global test objects
VERSION = get_versions()['version']
COMMIT = get_versions()['full-revisionid']
NEW_BOOTSTRAP_PAGE = """<!DOCTYPE HTML>
<html lang="en">
<head>
<meta http-equiv="refresh" content="60" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta content="width=device-width, initial-scale=1.0" name="viewport" />
<base href="{base}" />
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet" type="text/css" media="all" />
<link href="https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/jquery.fancybox.min.css" rel="stylesheet" type="text/css" media="all" />
<link href="https://fonts.googleapis.com/css?family=Roboto:300,400%7CRoboto+Mono" rel="stylesheet" type="text/css" media="all" />
<link href="static/bootstrap-ligo.min.css" rel="stylesheet" type="text/css" media="all" />
<link href="static/gwdetchar.min.css" rel="stylesheet" type="text/css" media="all" />
<script src="https://code.jquery.com/jquery-1.12.3.min.js" type="text/javascript"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.13.0/moment.min.js" type="text/javascript"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js" type="text/javascript"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/jquery.fancybox.min.js" type="text/javascript"></script>
<script src="static/bootstrap-ligo.min.js" type="text/javascript"></script>
<script src="static/gwdetchar.min.js" type="text/javascript"></script>
</head>
<body>
<div class="container">
</body>
</html>""" # nopep8
TEST_CONFIGURATION = """[section]
key = value"""
ABOUT = """<div class="row">
<div class="col-md-12">
<h2>On the command-line</h2>
<p>This page was generated with the following command-line call:</p>
<div class="highlight" style="background: #f8f8f8"><pre style="line-height: 125%"><span></span>$ gwdetchar-scattering -i X1
</pre></div>
<p>The install path used was <code>{}</code>.</p>
<h2>Configuration files</h2>
<p>The following INI-format configuration file(s) were passed on the comand-line and are reproduced here in full:</p>
<div class="highlight" style="background: #f8f8f8"><pre style="line-height: 125%"><span></span><span style="color: #008000; font-weight: bold">[section]</span>
<span style="color: #7D9029">key</span> <span style="color: #666666">=</span> <span style="color: #BA2121">value</span>
</pre></div>
<h2>Environment</h2><table class="table table-hover table-condensed table-responsive" id="package-table"><caption>Table of packages installed in the production environment</caption><thead><tr><th scope="col">Name</th><th scope="col">Version</th></tr></thead><tbody><tr><td>gwdetchar</td><td>1.2.3</td></tr><tr><td>gwpy</td><td>1.0.0</td></tr></tbody></table><button class="btn btn-default btn-table" onclick="exportTableToCSV("package-table.csv", "package-table")">Export to CSV</button>
</div>
</div>""".format(sys.prefix) # nopep8
ABOUT_WITH_CONFIG_LIST = """<div class="row">
<div class="col-md-12">
<h2>On the command-line</h2>
<p>This page was generated with the following command-line call:</p>
<div class="highlight" style="background: #f8f8f8"><pre style="line-height: 125%"><span></span>$ gwdetchar-scattering -i X1
</pre></div>
<p>The install path used was <code>{}</code>.</p>
<h2>Configuration files</h2>
<p>The following INI-format configuration file(s) were passed on the comand-line and are reproduced here in full:</p>
<div class="panel-group" id="accordion">
<div class="panel panel-default">
<a href="#file0" data-toggle="collapse" data-parent="#accordion">
<div class="panel-heading">
<h4 class="panel-title">test.ini</h4>
</div>
</a>
<div id="file0" class="panel-collapse collapse">
<div class="panel-body">
<div class="highlight" style="background: #f8f8f8"><pre style="line-height: 125%"><span></span><span style="color: #008000; font-weight: bold">[section]</span>
<span style="color: #7D9029">key</span> <span style="color: #666666">=</span> <span style="color: #BA2121">value</span>
</pre></div>
</div>
</div>
</div>
</div>
<h2>Environment</h2><table class="table table-hover table-condensed table-responsive" id="package-table"><caption>Table of packages installed in the production environment</caption><thead><tr><th scope="col">Name</th><th scope="col">Version</th></tr></thead><tbody><tr><td>gwdetchar</td><td>1.2.3</td></tr><tr><td>gwpy</td><td>1.0.0</td></tr></tbody></table><button class="btn btn-default btn-table" onclick="exportTableToCSV("package-table.csv", "package-table")">Export to CSV</button>
</div>
</div>""".format(sys.prefix) # nopep8
HTML_FOOTER = """<footer class="footer">
<div class="container">
<div class="row">
<div class="col-md-12">
<p>This page was created by {user} at {date}.</p>
<p><a href="https://github.com/gwdetchar/gwdetchar/tree/%s" target="_blank">View gwdetchar-%s on GitHub</a> | <a href="https://github.com/gwdetchar/gwdetchar/issues" target="_blank">Report an issue</a></p>
</div>
</div>
</div>
</footer>""" % (COMMIT, VERSION) # nopep8
HTML_CLOSE = """</div>
%s
</body>
</html>""" % HTML_FOOTER # nopep8
FLAG_CONTENT = """<div class="panel panel-warning">
<div class="panel-heading">
<a class="panel-title" href="#flag0" data-toggle="collapse" data-parent="#accordion">X1:TEST_FLAG</a>
</div>
<div id="flag0" class="panel-collapse collapse">
<div class="panel-body">{plots}
{content}
</div>
</div>
</div>""" # nopep8
FLAG_HTML = FLAG_CONTENT.format(content="""<pre># seg\tstart\tstop\tduration
0\t0\t66\t66.0
</pre>""", plots='')
FLAG_HTML_WITH_PLOTS = FLAG_CONTENT.format(
content='<pre># seg\tstart\tstop\tduration\n0\t0\t66\t66.0\n</pre>',
plots='\n<a id="a_X1-TEST_FLAG_66" target="_blank" title="Known (small) '
'and active (large) analysis segments for X1:TEST_FLAG" '
'class="fancybox" href="plots/X1-TEST_FLAG-0-66.png" '
'data-fancybox-group="images">\n<img id="img_X1-TEST_FLAG_66" '
'alt="X1-TEST_FLAG-0-66.png" class="img-responsive" '
'src="plots/X1-TEST_FLAG-0-66.png" />\n</a>')
FLAG_HTML_NO_SEGMENTS = FLAG_CONTENT.format(
content='<p>No segments were found.</p>', plots='')
FLAG = DataQualityFlag(known=[(0, 66)], active=[(0, 66)], name='X1:TEST_FLAG')
OMEGA_SCAFFOLD = """<div class="panel well panel-default">
<div class="panel-heading clearfix">
<h3 class="panel-title"><a href="https://cis.ligo.org/channel/byname/X1:STRAIN" title="CIS entry for X1:STRAIN" style="font-family: Monaco, "Courier New", monospace; color: black;" target="_blank">X1:STRAIN</a></h3>
</div>
<ul class="list-group">
<li class="list-group-item">
<div class="container">
<div class="row">
<div class="pull-right">
<a href="./1126259462" class="text-dark">[full scan]</a>
</div>
<h4>1126259462</h4>
</div>
<div class="row">
<div class="col-sm-4">
<a href="./1126259462/plots/X1-STRAIN-qscan_whitened-1.png" id="a_X1-STRAIN_1" title="X1-STRAIN-qscan_whitened-1.png" class="fancybox" target="_blank" data-fancybox-group="images">
<img id="img_X1-STRAIN_1" alt="X1-STRAIN-qscan_whitened-1.png" class="img-responsive" src="./1126259462/plots/X1-STRAIN-qscan_whitened-1.png" />
</a>
</div>
<div class="col-sm-4">
<a href="./1126259462/plots/X1-STRAIN-qscan_whitened-4.png" id="a_X1-STRAIN_4" title="X1-STRAIN-qscan_whitened-4.png" class="fancybox" target="_blank" data-fancybox-group="images">
<img id="img_X1-STRAIN_4" alt="X1-STRAIN-qscan_whitened-4.png" class="img-responsive" src="./1126259462/plots/X1-STRAIN-qscan_whitened-4.png" />
</a>
</div>
<div class="col-sm-4">
<a href="./1126259462/plots/X1-STRAIN-qscan_whitened-16.png" id="a_X1-STRAIN_16" title="X1-STRAIN-qscan_whitened-16.png" class="fancybox" target="_blank" data-fancybox-group="images">
<img id="img_X1-STRAIN_16" alt="X1-STRAIN-qscan_whitened-16.png" class="img-responsive" src="./1126259462/plots/X1-STRAIN-qscan_whitened-16.png" />
</a>
</div>
</div>
</div>
</li>
</ul>
</div>""" # nopep8
# -- HTML unit tests ----------------------------------------------------------
def test_fancy_plot():
# create a dummy FancyPlot instance
test = html.FancyPlot('test.png')
assert test.img is 'test.png'
assert test.caption is 'test.png'
# check that its properties are unchanged when the argument
# to FancyPlot() is also a FancyPlot instance
test = html.FancyPlot(test)
assert test.img is 'test.png'
assert test.caption is 'test.png'
def test_finalize_static_urls(tmpdir):
base = str(tmpdir)
static = os.path.join(base, 'static')
css, js = html.finalize_static_urls(
static, base, html.CSS_FILES, html.JS_FILES)
assert css == [
'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/'
'bootstrap.min.css', # nopep8
'https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
'jquery.fancybox.min.css', # nopep8
'https://fonts.googleapis.com/css?'
'family=Roboto:300,400%7CRoboto+Mono', # nopep8
'static/bootstrap-ligo.min.css',
'static/gwdetchar.min.css']
assert js == [
'https://code.jquery.com/jquery-1.12.3.min.js',
'https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.13.0/'
'moment.min.js', # nopep8
'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js',
'https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
'jquery.fancybox.min.js', # nopep8
'static/bootstrap-ligo.min.js',
'static/gwdetchar.min.js']
shutil.rmtree(str(tmpdir), ignore_errors=True)
def test_new_bootstrap_page():
base = os.path.abspath(os.path.curdir)
page = html.new_bootstrap_page(base=base, topbtn=False, refresh=True)
assert parse_html(str(page)) == parse_html(
NEW_BOOTSTRAP_PAGE.format(base=base))
def test_navbar():
navbar = html.navbar(['test'], collapse=False)
assert parse_html(navbar) == parse_html(
'<header class="navbar navbar-fixed-top">\n'
'<div class="container">\n<div class="navbar-header">\n'
'</div>\n<nav>\n<ul class="nav navbar-nav">\n<li>\ntest\n</li>\n'
'</ul>\n</nav>\n</div>\n</header>')
def test_dropdown():
menu = html.dropdown('test', [])
assert parse_html(str(menu)) == parse_html(
'<a href="#" class="dropdown-toggle" data-toggle="dropdown">\n'
'test\n<b class="caret"></b>\n</a>\n<ul class="dropdown-menu">\n</ul>')
menu = html.dropdown('test', ['test', '#'], active=0)
assert parse_html(str(menu)) == parse_html(
'<a href="#" class="dropdown-toggle" data-toggle="dropdown">\n'
'test\n<b class="caret"></b>\n</a>\n<ul class="dropdown-menu">\n'
'<li class="active">\ntest\n</li>\n<li>\n#\n</li>\n</ul>')
menu = html.dropdown('test', ['test', '#'], active=[0, 1])
assert parse_html(str(menu)) == parse_html(
'<a href="#" class="dropdown-toggle" data-toggle="dropdown">\n'
'test\n<b class="caret"></b>\n</a>\n<ul class="dropdown-menu">\n'
'<li>\ntest\n</li>\n<li>\n#\n</li>\n</ul>')
def test_dropdown_link():
page = markup.page()
html.dropdown_link(page, None)
assert parse_html(str(page)) == parse_html(
'<li class="divider">\n</li>')
page = markup.page()
html.dropdown_link(page, 'test', active=True)
assert parse_html(str(page)) == parse_html(
'<li class="active">\ntest\n</li>')
page = markup.page()
html.dropdown_link(page, 'test')
assert parse_html(str(page)) == parse_html(
'<li>\ntest\n</li>')
def test_get_brand():
(brand, class_) = html.get_brand('H1', 'Test', 0, about='about')
assert class_ == 'navbar navbar-fixed-top navbar-h1'
assert parse_html(brand) == parse_html(
'<div class="navbar-brand">H1</div>\n'
'<div class="navbar-brand">Test</div>\n'
'<div class="btn-group pull-right ifo-links">\n'
'<a class="navbar-brand dropdown-toggle" href="#" '
'data-toggle="dropdown">\nLinks\n<b class="caret"></b>\n</a>\n'
'<ul class="dropdown-menu">\n'
'<li class="dropdown-header">Internal</li>\n'
'<li>\n<a href="about">About this page</a>\n</li>\n'
'<li class="divider"></li>\n'
'<li class="dropdown-header">External</li>\n'
'<li>\n<a href="https://ldas-jobs.ligo-wa.caltech.edu/~detchar/'
'summary/day/19800106" target="_blank">LHO Summary Pages</a>\n'
'</li>\n<li>\n<a href="https://alog.ligo-wa.caltech.edu/aLOG" '
'target="_blank">LHO Logbook</a>\n</li>\n</ul>\n</div>')
@mock.patch(
"gwdetchar.io.html.package_list",
return_value=[
{"name": "gwpy", "version": "1.0.0"},
{"name": "gwdetchar", "version": "1.2.3"},
],
)
def test_about_this_page(package_list, tmpdir):
outdir = str(tmpdir)
config_file = os.path.join(outdir, 'test.ini')
with open(config_file, 'w') as fobj:
fobj.write(TEST_CONFIGURATION)
testargs = ['/opt/bin/gwdetchar-scattering', '-i', 'X1']
with mock.patch.object(sys, 'argv', testargs):
# test with a single config file
about = html.about_this_page(config_file)
assert parse_html(about) == parse_html(ABOUT)
# test with a list of config files
about = html.about_this_page([config_file])
assert parse_html(about) == parse_html(ABOUT_WITH_CONFIG_LIST)
# clean up
shutil.rmtree(outdir, ignore_errors=True)
def test_write_param():
page = html.write_param('test', 'test')
assert parse_html(str(page)) == parse_html(
'<p>\n<strong>test: </strong>\ntest\n</p>')
def test_get_command_line():
testargs = ['/opt/bin/gwdetchar-conlog', '-i', 'X1']
with mock.patch.object(sys, 'argv', testargs):
cmdline = html.get_command_line()
assert parse_html(cmdline) == parse_html(
'<p>This page was generated with the following command-line call:'
'</p>\n<div class="highlight" style="background: #f8f8f8">'
'<pre style="line-height: 125%"><span></span>$ gwdetchar-conlog '
'-i X1\n</pre></div>\n\n<p>The install path used was <code>{}'
'</code>.</p>'.format(sys.prefix))
def test_get_command_line_module():
testargs = ['__main__.py', '--html-only']
with mock.patch.object(sys, 'argv', testargs):
cmdline = html.get_command_line()
assert parse_html(cmdline) == parse_html(
'<p>This page was generated with the following command-line call:'
'</p>\n<div class="highlight" style="background: #f8f8f8">'
'<pre style="line-height: 125%"><span></span>$ python -m '
'gwdetchar.io.tests.test_html\n</pre></div>\n\n'
'<p>The install path used was <code>{}</code>.</p>'.format(
sys.prefix))
@pytest.mark.parametrize('args, kwargs, result', [
(('test.html', 'Test link'), {},
'<a href="test.html" target="_blank">Test link</a>'),
(('test.html', 'Test link'), {'class_': 'test-case'},
'<a class="test-case" href="test.html" target="_blank">Test link</a>'),
])
def test_html_link(args, kwargs, result):
h1 = parse_html(html.html_link(*args, **kwargs))
h2 = parse_html(result)
assert h1 == h2
def test_cis_link():
h1 = parse_html(html.cis_link('X1:TEST-CHANNEL'))
h2 = parse_html(
'<a style="font-family: Monaco, "Courier New", '
'monospace; color: black;" href="https://cis.ligo.org/channel/byname/'
'X1:TEST-CHANNEL" target="_blank" title="CIS entry for '
'X1:TEST-CHANNEL">X1:TEST-CHANNEL</a>'
)
assert h1 == h2
def test_fancybox_img():
img = html.FancyPlot('X1-TEST_AUX-test-4.png')
out = html.fancybox_img(img)
assert parse_html(out) == parse_html(
'<a class="fancybox" href="X1-TEST_AUX-test-4.png" target="_blank" '
'data-fancybox-group="images" id="a_X1-TEST_AUX_4" '
'title="X1-TEST_AUX-test-4.png">\n'
'<img class="img-responsive" alt="X1-TEST_AUX-test-4.png" '
'src="X1-TEST_AUX-test-4.png" id="img_X1-TEST_AUX_4"/>\n'
'</a>')
def test_scaffold_plots():
h1 = parse_html(html.scaffold_plots([
html.FancyPlot('X1-TEST_AUX-test-4.png'),
html.FancyPlot('X1-TEST_AUX-test-16.png')], nperrow=2))
h2 = parse_html(
'<div class="row">\n'
'<div class="col-sm-6">\n'
'<a class="fancybox" href="X1-TEST_AUX-test-4.png" target="_blank" '
'id="a_X1-TEST_AUX_4" data-fancybox-group="images" '
'title="X1-TEST_AUX-test-4.png">\n'
'<img class="img-responsive" alt="X1-TEST_AUX-test-4.png" '
'id="img_X1-TEST_AUX_4" src="X1-TEST_AUX-test-4.png" />\n'
'</a>\n'
'</div>\n'
'<div class="col-sm-6">\n'
'<a class="fancybox" href="X1-TEST_AUX-test-16.png" target="_blank"'
' id="a_X1-TEST_AUX_16" data-fancybox-group="images" '
'title="X1-TEST_AUX-test-16.png">\n'
'<img class="img-responsive" alt="X1-TEST_AUX-test-16.png" '
'id="img_X1-TEST_AUX_16" src="X1-TEST_AUX-test-16.png" />\n'
'</a>\n'
'</div>\n'
'</div>')
assert h1 == h2
def test_write_arguments():
page = html.write_arguments([('test', 'test')], 0, 1, flag='X1:TEST')
assert '<h2 id="parameters">Parameters</h2>' in page
assert '<strong>Start time: </strong>\n0 (1980-01-06 00:00:00)' in page
assert '<strong>End time: </strong>\n1 (1980-01-06 00:00:01)' in page
assert '<strong>State flag: </strong>\nX1:TEST' in page
assert '<strong>test: </strong>\ntest' in page
assert '<strong>Command-line: </strong>' in page
def test_table():
headers = ['Test']
data = [['test']]
caption = 'This is a test table.'
page = html.table(headers=headers, data=data, caption=caption, id='test')
assert parse_html(page) == parse_html(
'<table class="table table-hover table-condensed table-responsive" '
'id="test"><caption>This is a test table.</caption><thead><tr>'
'<th scope="col">Test</th></tr></thead><tbody><tr><td>test</td></tr>'
'</tbody></table><button class="btn btn-default btn-table" '
'onclick="exportTableToCSV("test.csv", "test")">'
'Export to CSV</button>')
def test_write_flag_html():
page = html.write_flag_html(FLAG)
assert parse_html(str(page)) == parse_html(FLAG_HTML)
page2 = html.write_flag_html(
DataQualityFlag(known=[], active=[], name='X1:TEST_FLAG'))
assert parse_html(str(page2)) == parse_html(FLAG_HTML_NO_SEGMENTS)
def test_write_flag_html_with_plots(tmpdir):
tmpdir.mkdir('plots')
os.chdir(str(tmpdir))
page = html.write_flag_html(FLAG, span=Segment(0, 66), plotdir='plots')
assert parse_html(str(page)) == parse_html(FLAG_HTML_WITH_PLOTS)
shutil.rmtree(str(tmpdir), ignore_errors=True)
def test_scaffold_omega_scans():
times = [1126259462]
channel = 'X1:STRAIN'
page = html.scaffold_omega_scans(times, channel)
assert parse_html(page) == parse_html(OMEGA_SCAFFOLD)
def test_write_footer():
now = datetime.datetime.now()
tz = reference.LocalTimezone().tzname(now)
date = now.strftime('%H:%M {} on %d %B %Y'.format(tz))
out = html.write_footer()
assert parse_html(str(out)) == parse_html(
HTML_FOOTER.format(user=getuser(), date=date))
def test_close_page(tmpdir):
target = os.path.join(str(tmpdir), 'test.html')
now = datetime.datetime.now()
tz = reference.LocalTimezone().tzname(now)
date = now.strftime('%H:%M {} on %d %B %Y'.format(tz))
page = html.close_page(html.markup.page(), target)
assert parse_html(str(page)) == parse_html(
HTML_CLOSE.format(user=getuser(), date=str(date)))
assert os.path.isfile(target)
with open(target, 'r') as fp:
assert fp.read() == str(page)
shutil.rmtree(target, ignore_errors=True)
@mock.patch("{}.Path.is_dir".format(html.Path.__module__))
@mock.patch("subprocess.check_output", return_value="{\"key\": 0}")
@pytest.mark.parametrize("isdir, cmd", [
pytest.param(
False,
"{} -m pip list installed --format json".format(sys.executable),
id="pip",
),
pytest.param(
True,
"conda list --prefix {} --json".format(sys.prefix),
id="conda",
),
])
def test_package_list(check_output, is_dir, isdir, cmd):
is_dir.return_value = isdir
assert html.package_list() == {"key": 0}
check_output.assert_called_with(cmd.split())
@mock.patch(
"gwdetchar.io.html.package_list",
return_value=[
{"name": "gwpy", "version": "1.0.0"},
{"name": "gwdetchar", "version": "1.2.3"},
],
)
def test_package_table(package_list):
assert parse_html(
html.package_table(class_="test", caption="Test"),
) == parse_html(
"<h2>Environment</h2><table class=\"test\" id=\"package-table\">"
"<caption>Test</caption>"
"<thead>"
"<tr><th scope=\"col\">Name</th><th scope=\"col\">Version</th></tr>"
"</thead><tbody>"
"<tr><td>gwdetchar</td><td>1.2.3</td></tr>"
"<tr><td>gwpy</td><td>1.0.0</td></tr>"
"</tbody></table>"
"<button class=\"btn btn-default btn-table\" "
"onclick=\"exportTableToCSV("package-table.csv", "
""package-table")\">Export to CSV</button>",
)
| gpl-3.0 |
gspilio/nova | nova/manager.py | 2 | 12022 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base Manager class.
Managers are responsible for a certain aspect of the system. It is a logical
grouping of code relating to a portion of the system. In general other
components should be using the manager to make changes to the components that
it is responsible for.
For example, other components that need to deal with volumes in some way,
should do so by calling methods on the VolumeManager instead of directly
changing fields in the database. This allows us to keep all of the code
relating to volumes in the same place.
We have adopted a basic strategy of Smart managers and dumb data, which means
rather than attaching methods to data objects, components should call manager
methods that act on the data.
Methods on managers that can be executed locally should be called directly. If
a particular method must execute on a remote host, this should be done via rpc
to the service that wraps the manager
Managers should be responsible for most of the db access, and
non-implementation specific data. Anything implementation specific that can't
be generalized should be done by the Driver.
In general, we prefer to have one manager with multiple drivers for different
implementations, but sometimes it makes sense to have multiple managers. You
can think of it this way: Abstract different overall strategies at the manager
level(FlatNetwork vs VlanNetwork), and different implementations at the driver
level(LinuxNetDriver vs CiscoNetDriver).
Managers will often provide methods for initial setup of a host or periodic
tasks to a wrapping service.
This module provides Manager, a base class for managers.
"""
import datetime
import eventlet
from oslo.config import cfg
from nova.db import base
from nova import exception
from nova.openstack.common import log as logging
from nova.openstack.common.plugin import pluginmanager
from nova.openstack.common.rpc import dispatcher as rpc_dispatcher
from nova.openstack.common import timeutils
from nova.scheduler import rpcapi as scheduler_rpcapi
periodic_opts = [
cfg.BoolOpt('run_external_periodic_tasks',
default=True,
help=('Some periodic tasks can be run in a separate process. '
'Should we run them here?')),
]
CONF = cfg.CONF
CONF.register_opts(periodic_opts)
CONF.import_opt('host', 'nova.netconf')
LOG = logging.getLogger(__name__)
DEFAULT_INTERVAL = 60.0
def periodic_task(*args, **kwargs):
"""Decorator to indicate that a method is a periodic task.
This decorator can be used in two ways:
1. Without arguments '@periodic_task', this will be run on every cycle
of the periodic scheduler.
2. With arguments:
@periodic_task(spacing=N [, run_immediately=[True|False]])
this will be run on approximately every N seconds. If this number is
negative the periodic task will be disabled. If the run_immediately
argument is provided and has a value of 'True', the first run of the
task will be shortly after task scheduler starts. If
run_immediately is omitted or set to 'False', the first time the
task runs will be approximately N seconds after the task scheduler
starts.
"""
def decorator(f):
# Test for old style invocation
if 'ticks_between_runs' in kwargs:
raise exception.InvalidPeriodicTaskArg(arg='ticks_between_runs')
# Control if run at all
f._periodic_task = True
f._periodic_external_ok = kwargs.pop('external_process_ok', False)
if f._periodic_external_ok and not CONF.run_external_periodic_tasks:
f._periodic_enabled = False
else:
f._periodic_enabled = kwargs.pop('enabled', True)
# Control frequency
f._periodic_spacing = kwargs.pop('spacing', 0)
f._periodic_immediate = kwargs.pop('run_immediately', False)
if f._periodic_immediate:
f._periodic_last_run = None
else:
f._periodic_last_run = timeutils.utcnow()
return f
# NOTE(sirp): The `if` is necessary to allow the decorator to be used with
# and without parens.
#
# In the 'with-parens' case (with kwargs present), this function needs to
# return a decorator function since the interpreter will invoke it like:
#
# periodic_task(*args, **kwargs)(f)
#
# In the 'without-parens' case, the original function will be passed
# in as the first argument, like:
#
# periodic_task(f)
if kwargs:
return decorator
else:
return decorator(args[0])
class ManagerMeta(type):
def __init__(cls, names, bases, dict_):
"""Metaclass that allows us to collect decorated periodic tasks."""
super(ManagerMeta, cls).__init__(names, bases, dict_)
# NOTE(sirp): if the attribute is not present then we must be the base
# class, so, go ahead an initialize it. If the attribute is present,
# then we're a subclass so make a copy of it so we don't step on our
# parent's toes.
try:
cls._periodic_tasks = cls._periodic_tasks[:]
except AttributeError:
cls._periodic_tasks = []
try:
cls._periodic_last_run = cls._periodic_last_run.copy()
except AttributeError:
cls._periodic_last_run = {}
try:
cls._periodic_spacing = cls._periodic_spacing.copy()
except AttributeError:
cls._periodic_spacing = {}
for value in cls.__dict__.values():
if getattr(value, '_periodic_task', False):
task = value
name = task.__name__
if task._periodic_spacing < 0:
LOG.info(_('Skipping periodic task %(task)s because '
'its interval is negative'),
{'task': name})
continue
if not task._periodic_enabled:
LOG.info(_('Skipping periodic task %(task)s because '
'it is disabled'),
{'task': name})
continue
# A periodic spacing of zero indicates that this task should
# be run every pass
if task._periodic_spacing == 0:
task._periodic_spacing = None
cls._periodic_tasks.append((name, task))
cls._periodic_spacing[name] = task._periodic_spacing
cls._periodic_last_run[name] = task._periodic_last_run
class Manager(base.Base):
__metaclass__ = ManagerMeta
# Set RPC API version to 1.0 by default.
RPC_API_VERSION = '1.0'
def __init__(self, host=None, db_driver=None):
if not host:
host = CONF.host
self.host = host
self.load_plugins()
self.backdoor_port = None
super(Manager, self).__init__(db_driver)
def load_plugins(self):
pluginmgr = pluginmanager.PluginManager('nova', self.__class__)
pluginmgr.load_plugins()
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return rpc_dispatcher.RpcDispatcher([self])
def periodic_tasks(self, context, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
idle_for = DEFAULT_INTERVAL
for task_name, task in self._periodic_tasks:
full_task_name = '.'.join([self.__class__.__name__, task_name])
now = timeutils.utcnow()
spacing = self._periodic_spacing[task_name]
last_run = self._periodic_last_run[task_name]
# If a periodic task is _nearly_ due, then we'll run it early
if spacing is not None and last_run is not None:
due = last_run + datetime.timedelta(seconds=spacing)
if not timeutils.is_soon(due, 0.2):
idle_for = min(idle_for, timeutils.delta_seconds(now, due))
continue
if spacing is not None:
idle_for = min(idle_for, spacing)
LOG.debug(_("Running periodic task %(full_task_name)s"), locals())
self._periodic_last_run[task_name] = timeutils.utcnow()
try:
task(self, context)
except Exception as e:
if raise_on_error:
raise
LOG.exception(_("Error during %(full_task_name)s: %(e)s"),
locals())
eventlet.sleep(0)
return idle_for
def init_host(self):
"""Hook to do additional manager initialization when one requests
the service be started. This is called before any service record
is created.
Child classes should override this method.
"""
pass
def pre_start_hook(self, **kwargs):
"""Hook to provide the manager the ability to do additional
start-up work before any RPC queues/consumers are created. This is
called after other initialization has succeeded and a service
record is created.
Child classes should override this method.
"""
pass
def post_start_hook(self):
"""Hook to provide the manager the ability to do additional
start-up work immediately after a service creates RPC consumers
and starts 'running'.
Child classes should override this method.
"""
pass
class SchedulerDependentManager(Manager):
"""Periodically send capability updates to the Scheduler services.
Services that need to update the Scheduler of their capabilities
should derive from this class. Otherwise they can derive from
manager.Manager directly. Updates are only sent after
update_service_capabilities is called with non-None values.
"""
def __init__(self, host=None, db_driver=None, service_name='undefined'):
self.last_capabilities = None
self.service_name = service_name
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
super(SchedulerDependentManager, self).__init__(host, db_driver)
def load_plugins(self):
pluginmgr = pluginmanager.PluginManager('nova', self.service_name)
pluginmgr.load_plugins()
def update_service_capabilities(self, capabilities):
"""Remember these capabilities to send on next periodic update."""
if not isinstance(capabilities, list):
capabilities = [capabilities]
self.last_capabilities = capabilities
@periodic_task
def publish_service_capabilities(self, context):
"""Pass data back to the scheduler.
Called at a periodic interval. And also called via rpc soon after
the start of the scheduler.
"""
if self.last_capabilities:
LOG.debug(_('Notifying Schedulers of capabilities ...'))
self.scheduler_rpcapi.update_service_capabilities(context,
self.service_name, self.host, self.last_capabilities)
| apache-2.0 |
danielharbor/openerp | build/lib/openerp/addons/base/tests/test_osv.py | 446 | 4722 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP S.A. http://www.openerp.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import unittest
from openerp.osv.query import Query
class QueryTestCase(unittest.TestCase):
def test_basic_query(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("product_product", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # outer join
self.assertEquals(query.get_sql()[0].strip(),
""""product_product" LEFT JOIN "res_user" as "product_product__user_id" ON ("product_product"."user_id" = "product_product__user_id"."id"),"product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") """.strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id""".strip())
def test_query_chained_explicit_joins(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("product_template__categ_id", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # CHAINED outer join
self.assertEquals(query.get_sql()[0].strip(),
""""product_product","product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" as "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id")""".strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id""".strip())
def test_mixed_query_chained_explicit_implicit_joins(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("product_template__categ_id", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # CHAINED outer join
query.tables.append('"account.account"')
query.where_clause.append("product_category.expense_account_id = account_account.id") # additional implicit join
self.assertEquals(query.get_sql()[0].strip(),
""""product_product","product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" as "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id"),"account.account" """.strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id AND product_category.expense_account_id = account_account.id""".strip())
def test_raise_missing_lhs(self):
query = Query()
query.tables.append('"product_product"')
self.assertRaises(AssertionError, query.add_join, ("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
virtualopensystems/neutron | neutron/plugins/brocade/nos/nctemplates.py | 8 | 6478 | # Copyright (c) 2013 Brocade Communications Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Authors:
# Varma Bhupatiraju (vbhupati@#brocade.com)
# Shiv Haris ([email protected])
"""NOS NETCONF XML Configuration Command Templates.
Interface Configuration Commands
"""
# Create VLAN (vlan_id)
CREATE_VLAN_INTERFACE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<interface-vlan xmlns="urn:brocade.com:mgmt:brocade-interface">
<interface>
<vlan>
<name>{vlan_id}</name>
</vlan>
</interface>
</interface-vlan>
</config>
"""
# Delete VLAN (vlan_id)
DELETE_VLAN_INTERFACE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<interface-vlan xmlns="urn:brocade.com:mgmt:brocade-interface">
<interface>
<vlan operation="delete">
<name>{vlan_id}</name>
</vlan>
</interface>
</interface-vlan>
</config>
"""
#
# AMPP Life-cycle Management Configuration Commands
#
# Create AMPP port-profile (port_profile_name)
CREATE_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<name>{name}</name>
</port-profile>
</config>
"""
# Create VLAN sub-profile for port-profile (port_profile_name)
CREATE_VLAN_PROFILE_FOR_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<name>{name}</name>
<vlan-profile/>
</port-profile>
</config>
"""
# Configure L2 mode for VLAN sub-profile (port_profile_name)
CONFIGURE_L2_MODE_FOR_VLAN_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<name>{name}</name>
<vlan-profile>
<switchport/>
</vlan-profile>
</port-profile>
</config>
"""
# Configure trunk mode for VLAN sub-profile (port_profile_name)
CONFIGURE_TRUNK_MODE_FOR_VLAN_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<name>{name}</name>
<vlan-profile>
<switchport>
<mode>
<vlan-mode>trunk</vlan-mode>
</mode>
</switchport>
</vlan-profile>
</port-profile>
</config>
"""
# Configure allowed VLANs for VLAN sub-profile
# (port_profile_name, allowed_vlan, native_vlan)
CONFIGURE_ALLOWED_VLANS_FOR_VLAN_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<name>{name}</name>
<vlan-profile>
<switchport>
<trunk>
<allowed>
<vlan>
<add>{vlan_id}</add>
</vlan>
</allowed>
</trunk>
</switchport>
</vlan-profile>
</port-profile>
</config>
"""
# Delete port-profile (port_profile_name)
DELETE_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile
xmlns="urn:brocade.com:mgmt:brocade-port-profile" operation="delete">
<name>{name}</name>
</port-profile>
</config>
"""
# Activate port-profile (port_profile_name)
ACTIVATE_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile-global xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<port-profile>
<name>{name}</name>
<activate/>
</port-profile>
</port-profile-global>
</config>
"""
# Deactivate port-profile (port_profile_name)
DEACTIVATE_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile-global xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<port-profile>
<name>{name}</name>
<activate
xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="delete" />
</port-profile>
</port-profile-global>
</config>
"""
# Associate MAC address to port-profile (port_profile_name, mac_address)
ASSOCIATE_MAC_TO_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile-global xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<port-profile>
<name>{name}</name>
<static>
<mac-address>{mac_address}</mac-address>
</static>
</port-profile>
</port-profile-global>
</config>
"""
# Dissociate MAC address from port-profile (port_profile_name, mac_address)
DISSOCIATE_MAC_FROM_PORT_PROFILE = """
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<port-profile-global xmlns="urn:brocade.com:mgmt:brocade-port-profile">
<port-profile>
<name>{name}</name>
<static
xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="delete">
<mac-address>{mac_address}</mac-address>
</static>
</port-profile>
</port-profile-global>
</config>
"""
#
# Custom RPC Commands
#
#
# Constants
#
# Port profile naming convention for Neutron networks
OS_PORT_PROFILE_NAME = "openstack-profile-{id}"
# Port profile filter expressions
PORT_PROFILE_XPATH_FILTER = "/port-profile"
PORT_PROFILE_NAME_XPATH_FILTER = "/port-profile[name='{name}']"
| apache-2.0 |
amcgee/pymomo | pymomo/config/site_scons/utilities.py | 2 | 2048 | #utilities.py
from SCons.Script import *
from SCons.Environment import Environment
import os
import fnmatch
import json as json
import sys
import os.path
import pic12
import StringIO
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pymomo.utilities import build
from pymomo.mib.config12 import MIB12Processor
def find_files(dirname, pattern):
"""
Recursively find all files matching pattern under path dirname
"""
matches = []
for root, dirnames, filenames in os.walk(dirname, followlinks=True):
print dirnames, filenames
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(root,filename))
return matches
def build_includes(includes):
if isinstance(includes, basestring):
includes = [includes]
return ['-I"%s"' % x for x in includes]
def build_libdirs(libdirs):
if isinstance(libdirs, basestring):
libdirs = [libdirs]
return ['-L"%s"' % x for x in libdirs]
def build_staticlibs(libs, chip):
if isinstance(libs, basestring):
libs = [libs]
processed = []
for lib in libs:
#Allow specifying absolute libraries that don't get architectures
#appended
if lib[0] == '#':
processed.append(lib[1:])
else:
#Append chip type and suffix
proclib = "%s_%s" % (lib, chip.arch_name())
processed.append(proclib)
return ['-l%s' % x for x in processed]
def build_defines(defines):
return ['-D%s=%s' % (x,str(y)) for x,y in defines.iteritems()]
def get_family(fam, modulefile=None):
return build.ChipFamily(fam, modulefile=modulefile)
class BufferedSpawn:
def __init__(self, env, logfile):
self.env = env
self.logfile = logfile
self.stderr = StringIO.StringIO()
self.stdout = StringIO.StringIO()
def spawn(self, sh, escape, cmd, args, env):
cmd_string = " ".join(args)
print cmd_string
self.stdout.write(cmd_string)
try:
retval = self.env['PSPAWN'](sh, escape, cmd, args, env, sys.stdout, sys.stderr)
except OSError, x:
if x.errno != 10:
raise x
print 'OSError Ignored on command: %s' % cmd_string
return retval
| lgpl-3.0 |
xurantju/angr | tests/manual_performance.py | 9 | 1101 | #!/usr/bin/env python
import time
#import nose
import logging
l = logging.getLogger("angr_tests.counter")
l.setLevel(logging.INFO)
try:
# pylint: disable=W0611,F0401
import standard_logging
import angr_debug
except ImportError:
pass
addresses_counter = {
'armel': None,
'armhf': None, # addr+1 to force thumb
'i386': None,
'mips': None,
'mipsel': None,
'ppc': None,
'ppc64': None,
'x86_64': None
}
import angr
import os
location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def run_counter(arch):
p = angr.Project(location + '/' + arch + '/counter')
pg = p.path_group()
start = time.time()
pg.step(n=1000)
end = time.time()
l.info("Time passed: %f seconds", end-start)
def test_counter():
for arch in addresses_counter:
yield run_counter, arch
if __name__ == "__main__":
import sys
if len(sys.argv) == 1:
for func, march in test_counter():
print 'testing ' + march
func(march)
else:
run_counter(sys.argv[1])
| bsd-2-clause |
broferek/ansible | test/units/modules/network/check_point/test_cp_mgmt_tag_facts.py | 19 | 2815 | # Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_tag_facts
OBJECT = {
"from": 1,
"to": 1,
"total": 6,
"objects": [
"53de74b7-8f19-4cbe-99fc-a81ef0759bad"
]
}
SHOW_PLURAL_PAYLOAD = {
'limit': 1,
'details_level': 'uid'
}
SHOW_SINGLE_PAYLOAD = {
'name': 'object_which_is_not_exist'
}
api_call_object = 'tag'
api_call_object_plural_version = 'tags'
failure_msg = '''{u'message': u'Requested object [object_which_is_not_exist] not found', u'code': u'generic_err_object_not_found'}'''
class TestCheckpointTagFacts(object):
module = cp_mgmt_tag_facts
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_show_single_object_which_is_not_exist(self, mocker, connection_mock):
connection_mock.send_request.return_value = (404, failure_msg)
try:
result = self._run_module(SHOW_SINGLE_PAYLOAD)
except Exception as e:
result = e.args[0]
assert result['failed']
assert 'Checkpoint device returned error 404 with message ' + failure_msg == result['msg']
def test_show_few_objects(self, mocker, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(SHOW_PLURAL_PAYLOAD)
assert not result['changed']
assert OBJECT == result['ansible_facts'][api_call_object_plural_version]
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
| gpl-3.0 |
dacb/elvizCluster | unit_tests/test_elviz_abundance_utils.py | 2 | 1469 | import unittest
import pandas as pd
class testCompletenessOfSummarisedData(unittest.TestCase):
def test_animal_data(self):
"""
Make sure each sample's fraction of abundance values sums very close
to 1. On toy data set only.
"""
animal_df = pd.read_csv("./summarised_animals.txt", sep='\t')
sum_by_sample = animal_df.groupby(
['oxy', 'rep', 'week'])['fraction of reads'].sum()
self.assertTrue((sum_by_sample > 0.999).all())
self.assertTrue((sum_by_sample < 1.001).all())
class testAbundannceSummary(unittest.TestCase):
def test_summary_with_all_taxonomy_remaining(self):
"""
Make sure each sample's fraction of abundance values sums very close
to 1.
"""
summary_df = \
pd.read_csv("../results/reduced_data--all_taxonomy_remains.csv")
sum_by_sample = summary_df.groupby(
['oxy', 'rep', 'week'])['fraction of reads'].sum()
self.assertTrue((sum_by_sample > 0.999).all())
self.assertTrue((sum_by_sample < 1.001).all())
if __name__ == '__main__':
animal_df = pd.read_csv("./summarised_animals.txt", sep='\t')
print(animal_df.head())
sums = animal_df.groupby(
['oxy', 'rep', 'week'])['fraction of reads'].sum()
# make sure all the sums are 1:
print(sums)
# Run the Unit Tests
# Note: this has to be last or the stuff above won't run.
unittest.main()
| bsd-3-clause |
liavkoren/djangoDev | django/utils/deconstruct.py | 38 | 1087 | def deconstructible(*args, **kwargs):
"""
Class decorator that allow the decorated class to be serialized
by the migrations subsystem.
Accepts an optional kwarg `path` to specify the import path.
"""
path = kwargs.pop('path', None)
def decorator(klass):
def __new__(cls, *args, **kwargs):
# We capture the arguments to make returning them trivial
obj = super(klass, cls).__new__(cls)
obj._constructor_args = (args, kwargs)
return obj
def deconstruct(obj):
"""
Returns a 3-tuple of class import path, positional arguments,
and keyword arguments.
"""
return (
path or '%s.%s' % (obj.__class__.__module__, obj.__class__.__name__),
obj._constructor_args[0],
obj._constructor_args[1],
)
klass.__new__ = staticmethod(__new__)
klass.deconstruct = deconstruct
return klass
if not args:
return decorator
return decorator(*args, **kwargs)
| bsd-3-clause |
y12uc231/edx-platform | cms/djangoapps/contentstore/tests/test_transcripts_utils.py | 16 | 19943 | # -*- coding: utf-8 -*-
""" Tests for transcripts_utils. """
import unittest
from uuid import uuid4
import copy
import textwrap
from mock import patch, Mock
from django.test.utils import override_settings
from django.conf import settings
from django.utils import translation
from nose.plugins.skip import SkipTest
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.contentstore.content import StaticContent
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.exceptions import NotFoundError
from xmodule.contentstore.django import contentstore
from xmodule.video_module import transcripts_utils
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
class TestGenerateSubs(unittest.TestCase):
"""Tests for `generate_subs` function."""
def setUp(self):
super(TestGenerateSubs, self).setUp()
self.source_subs = {
'start': [100, 200, 240, 390, 1000],
'end': [200, 240, 380, 1000, 1500],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
}
def test_generate_subs_increase_speed(self):
subs = transcripts_utils.generate_subs(2, 1, self.source_subs)
self.assertDictEqual(
subs,
{
'start': [200, 400, 480, 780, 2000],
'end': [400, 480, 760, 2000, 3000],
'text': ['subs #1', 'subs #2', 'subs #3', 'subs #4', 'subs #5']
}
)
def test_generate_subs_decrease_speed_1(self):
subs = transcripts_utils.generate_subs(0.5, 1, self.source_subs)
self.assertDictEqual(
subs,
{
'start': [50, 100, 120, 195, 500],
'end': [100, 120, 190, 500, 750],
'text': ['subs #1', 'subs #2', 'subs #3', 'subs #4', 'subs #5']
}
)
def test_generate_subs_decrease_speed_2(self):
"""Test for correct devision during `generate_subs` process."""
subs = transcripts_utils.generate_subs(1, 2, self.source_subs)
self.assertDictEqual(
subs,
{
'start': [50, 100, 120, 195, 500],
'end': [100, 120, 190, 500, 750],
'text': ['subs #1', 'subs #2', 'subs #3', 'subs #4', 'subs #5']
}
)
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class TestSaveSubsToStore(ModuleStoreTestCase):
"""Tests for `save_subs_to_store` function."""
org = 'MITx'
number = '999'
display_name = 'Test course'
def clear_subs_content(self):
"""Remove, if subtitles content exists."""
try:
content = contentstore().find(self.content_location)
contentstore().delete(content.location)
except NotFoundError:
pass
def setUp(self):
super(TestSaveSubsToStore, self).setUp()
self.course = CourseFactory.create(
org=self.org, number=self.number, display_name=self.display_name)
self.subs = {
'start': [100, 200, 240, 390, 1000],
'end': [200, 240, 380, 1000, 1500],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
}
self.subs_id = str(uuid4())
filename = 'subs_{0}.srt.sjson'.format(self.subs_id)
self.content_location = StaticContent.compute_location(self.course.id, filename)
self.addCleanup(self.clear_subs_content)
# incorrect subs
self.unjsonable_subs = set([1]) # set can't be serialized
self.unjsonable_subs_id = str(uuid4())
filename_unjsonable = 'subs_{0}.srt.sjson'.format(self.unjsonable_subs_id)
self.content_location_unjsonable = StaticContent.compute_location(self.course.id, filename_unjsonable)
self.clear_subs_content()
def test_save_subs_to_store(self):
with self.assertRaises(NotFoundError):
contentstore().find(self.content_location)
result_location = transcripts_utils.save_subs_to_store(
self.subs,
self.subs_id,
self.course)
self.assertTrue(contentstore().find(self.content_location))
self.assertEqual(result_location, self.content_location)
def test_save_unjsonable_subs_to_store(self):
"""
Assures that subs, that can't be dumped, can't be found later.
"""
with self.assertRaises(NotFoundError):
contentstore().find(self.content_location_unjsonable)
with self.assertRaises(TypeError):
transcripts_utils.save_subs_to_store(
self.unjsonable_subs,
self.unjsonable_subs_id,
self.course)
with self.assertRaises(NotFoundError):
contentstore().find(self.content_location_unjsonable)
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class TestDownloadYoutubeSubs(ModuleStoreTestCase):
"""Tests for `download_youtube_subs` function."""
org = 'MITx'
number = '999'
display_name = 'Test course'
def clear_sub_content(self, subs_id):
"""
Remove, if subtitle content exists.
"""
filename = 'subs_{0}.srt.sjson'.format(subs_id)
content_location = StaticContent.compute_location(self.course.id, filename)
try:
content = contentstore().find(content_location)
contentstore().delete(content.location)
except NotFoundError:
pass
def clear_subs_content(self, youtube_subs):
"""
Remove, if subtitles content exists.
youtube_subs: dict of '{speed: youtube_id}' format for different speeds.
"""
for subs_id in youtube_subs.values():
self.clear_sub_content(subs_id)
def setUp(self):
super(TestDownloadYoutubeSubs, self).setUp()
self.course = CourseFactory.create(
org=self.org, number=self.number, display_name=self.display_name)
def test_success_downloading_subs(self):
response = textwrap.dedent("""<?xml version="1.0" encoding="utf-8" ?>
<transcript>
<text start="0" dur="0.27"></text>
<text start="0.27" dur="2.45">Test text 1.</text>
<text start="2.72">Test text 2.</text>
<text start="5.43" dur="1.73">Test text 3.</text>
</transcript>
""")
good_youtube_sub = 'good_id_2'
self.clear_sub_content(good_youtube_sub)
with patch('xmodule.video_module.transcripts_utils.requests.get') as mock_get:
mock_get.return_value = Mock(status_code=200, text=response, content=response)
# Check transcripts_utils.GetTranscriptsFromYouTubeException not thrown
transcripts_utils.download_youtube_subs(good_youtube_sub, self.course, settings)
mock_get.assert_any_call('http://video.google.com/timedtext', params={'lang': 'en', 'v': 'good_id_2'})
# Check asset status after import of transcript.
filename = 'subs_{0}.srt.sjson'.format(good_youtube_sub)
content_location = StaticContent.compute_location(self.course.id, filename)
self.assertTrue(contentstore().find(content_location))
self.clear_sub_content(good_youtube_sub)
def test_subs_for_html5_vid_with_periods(self):
"""
This is to verify a fix whereby subtitle files uploaded against
a HTML5 video that contains periods in the name causes
incorrect subs name parsing
"""
html5_ids = transcripts_utils.get_html5_ids(['foo.mp4', 'foo.1.bar.mp4', 'foo/bar/baz.1.4.mp4', 'foo'])
self.assertEqual(4, len(html5_ids))
self.assertEqual(html5_ids[0], 'foo')
self.assertEqual(html5_ids[1], 'foo.1.bar')
self.assertEqual(html5_ids[2], 'baz.1.4')
self.assertEqual(html5_ids[3], 'foo')
@patch('xmodule.video_module.transcripts_utils.requests.get')
def test_fail_downloading_subs(self, mock_get):
mock_get.return_value = Mock(status_code=404, text='Error 404')
bad_youtube_sub = 'BAD_YOUTUBE_ID2'
self.clear_sub_content(bad_youtube_sub)
with self.assertRaises(transcripts_utils.GetTranscriptsFromYouTubeException):
transcripts_utils.download_youtube_subs(bad_youtube_sub, self.course, settings)
# Check asset status after import of transcript.
filename = 'subs_{0}.srt.sjson'.format(bad_youtube_sub)
content_location = StaticContent.compute_location(
self.course.id, filename
)
with self.assertRaises(NotFoundError):
contentstore().find(content_location)
self.clear_sub_content(bad_youtube_sub)
def test_success_downloading_chinese_transcripts(self):
# Disabled 11/14/13
# This test is flakey because it performs an HTTP request on an external service
# Re-enable when `requests.get` is patched using `mock.patch`
raise SkipTest
good_youtube_sub = 'j_jEn79vS3g' # Chinese, utf-8
self.clear_sub_content(good_youtube_sub)
# Check transcripts_utils.GetTranscriptsFromYouTubeException not thrown
transcripts_utils.download_youtube_subs(good_youtube_sub, self.course, settings)
# Check assets status after importing subtitles.
for subs_id in good_youtube_subs.values():
filename = 'subs_{0}.srt.sjson'.format(subs_id)
content_location = StaticContent.compute_location(
self.course.id, filename
)
self.assertTrue(contentstore().find(content_location))
self.clear_sub_content(good_youtube_sub)
class TestGenerateSubsFromSource(TestDownloadYoutubeSubs):
"""Tests for `generate_subs_from_source` function."""
def test_success_generating_subs(self):
youtube_subs = {
0.5: 'JMD_ifUUfsU',
1.0: 'hI10vDNYz4M',
2.0: 'AKqURZnYqpk'
}
srt_filedata = textwrap.dedent("""
1
00:00:10,500 --> 00:00:13,000
Elephant's Dream
2
00:00:15,000 --> 00:00:18,000
At the left we can see...
""")
self.clear_subs_content(youtube_subs)
# Check transcripts_utils.TranscriptsGenerationException not thrown.
# Also checks that uppercase file extensions are supported.
transcripts_utils.generate_subs_from_source(youtube_subs, 'SRT', srt_filedata, self.course)
# Check assets status after importing subtitles.
for subs_id in youtube_subs.values():
filename = 'subs_{0}.srt.sjson'.format(subs_id)
content_location = StaticContent.compute_location(
self.course.id, filename
)
self.assertTrue(contentstore().find(content_location))
self.clear_subs_content(youtube_subs)
def test_fail_bad_subs_type(self):
youtube_subs = {
0.5: 'JMD_ifUUfsU',
1.0: 'hI10vDNYz4M',
2.0: 'AKqURZnYqpk'
}
srt_filedata = textwrap.dedent("""
1
00:00:10,500 --> 00:00:13,000
Elephant's Dream
2
00:00:15,000 --> 00:00:18,000
At the left we can see...
""")
with self.assertRaises(transcripts_utils.TranscriptsGenerationException) as cm:
transcripts_utils.generate_subs_from_source(youtube_subs, 'BAD_FORMAT', srt_filedata, self.course)
exception_message = cm.exception.message
self.assertEqual(exception_message, "We support only SubRip (*.srt) transcripts format.")
def test_fail_bad_subs_filedata(self):
youtube_subs = {
0.5: 'JMD_ifUUfsU',
1.0: 'hI10vDNYz4M',
2.0: 'AKqURZnYqpk'
}
srt_filedata = """BAD_DATA"""
with self.assertRaises(transcripts_utils.TranscriptsGenerationException) as cm:
transcripts_utils.generate_subs_from_source(youtube_subs, 'srt', srt_filedata, self.course)
exception_message = cm.exception.message
self.assertEqual(exception_message, "Something wrong with SubRip transcripts file during parsing.")
class TestGenerateSrtFromSjson(TestDownloadYoutubeSubs):
"""Tests for `generate_srt_from_sjson` function."""
def test_success_generating_subs(self):
sjson_subs = {
'start': [100, 200, 240, 390, 54000],
'end': [200, 240, 380, 1000, 78400],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
}
srt_subs = transcripts_utils.generate_srt_from_sjson(sjson_subs, 1)
self.assertTrue(srt_subs)
expected_subs = [
'00:00:00,100 --> 00:00:00,200\nsubs #1',
'00:00:00,200 --> 00:00:00,240\nsubs #2',
'00:00:00,240 --> 00:00:00,380\nsubs #3',
'00:00:00,390 --> 00:00:01,000\nsubs #4',
'00:00:54,000 --> 00:01:18,400\nsubs #5',
]
for sub in expected_subs:
self.assertIn(sub, srt_subs)
def test_success_generating_subs_speed_up(self):
sjson_subs = {
'start': [100, 200, 240, 390, 54000],
'end': [200, 240, 380, 1000, 78400],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
}
srt_subs = transcripts_utils.generate_srt_from_sjson(sjson_subs, 0.5)
self.assertTrue(srt_subs)
expected_subs = [
'00:00:00,050 --> 00:00:00,100\nsubs #1',
'00:00:00,100 --> 00:00:00,120\nsubs #2',
'00:00:00,120 --> 00:00:00,190\nsubs #3',
'00:00:00,195 --> 00:00:00,500\nsubs #4',
'00:00:27,000 --> 00:00:39,200\nsubs #5',
]
for sub in expected_subs:
self.assertIn(sub, srt_subs)
def test_success_generating_subs_speed_down(self):
sjson_subs = {
'start': [100, 200, 240, 390, 54000],
'end': [200, 240, 380, 1000, 78400],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
}
srt_subs = transcripts_utils.generate_srt_from_sjson(sjson_subs, 2)
self.assertTrue(srt_subs)
expected_subs = [
'00:00:00,200 --> 00:00:00,400\nsubs #1',
'00:00:00,400 --> 00:00:00,480\nsubs #2',
'00:00:00,480 --> 00:00:00,760\nsubs #3',
'00:00:00,780 --> 00:00:02,000\nsubs #4',
'00:01:48,000 --> 00:02:36,800\nsubs #5',
]
for sub in expected_subs:
self.assertIn(sub, srt_subs)
def test_fail_generating_subs(self):
sjson_subs = {
'start': [100, 200],
'end': [100],
'text': [
'subs #1',
'subs #2'
]
}
srt_subs = transcripts_utils.generate_srt_from_sjson(sjson_subs, 1)
self.assertFalse(srt_subs)
class TestYoutubeTranscripts(unittest.TestCase):
"""
Tests for checking right datastructure returning when using youtube api.
"""
@patch('xmodule.video_module.transcripts_utils.requests.get')
def test_youtube_bad_status_code(self, mock_get):
mock_get.return_value = Mock(status_code=404, text='test')
youtube_id = 'bad_youtube_id'
with self.assertRaises(transcripts_utils.GetTranscriptsFromYouTubeException):
transcripts_utils.get_transcripts_from_youtube(youtube_id, settings, translation)
@patch('xmodule.video_module.transcripts_utils.requests.get')
def test_youtube_empty_text(self, mock_get):
mock_get.return_value = Mock(status_code=200, text='')
youtube_id = 'bad_youtube_id'
with self.assertRaises(transcripts_utils.GetTranscriptsFromYouTubeException):
transcripts_utils.get_transcripts_from_youtube(youtube_id, settings, translation)
def test_youtube_good_result(self):
response = textwrap.dedent("""<?xml version="1.0" encoding="utf-8" ?>
<transcript>
<text start="0" dur="0.27"></text>
<text start="0.27" dur="2.45">Test text 1.</text>
<text start="2.72">Test text 2.</text>
<text start="5.43" dur="1.73">Test text 3.</text>
</transcript>
""")
expected_transcripts = {
'start': [270, 2720, 5430],
'end': [2720, 2720, 7160],
'text': ['Test text 1.', 'Test text 2.', 'Test text 3.']
}
youtube_id = 'good_youtube_id'
with patch('xmodule.video_module.transcripts_utils.requests.get') as mock_get:
mock_get.return_value = Mock(status_code=200, text=response, content=response)
transcripts = transcripts_utils.get_transcripts_from_youtube(youtube_id, settings, translation)
self.assertEqual(transcripts, expected_transcripts)
mock_get.assert_called_with('http://video.google.com/timedtext', params={'lang': 'en', 'v': 'good_youtube_id'})
class TestTranscript(unittest.TestCase):
"""
Tests for Transcript class e.g. different transcript conversions.
"""
def setUp(self):
super(TestTranscript, self).setUp()
self.srt_transcript = textwrap.dedent("""\
0
00:00:10,500 --> 00:00:13,000
Elephant's Dream
1
00:00:15,000 --> 00:00:18,000
At the left we can see...
""")
self.sjson_transcript = textwrap.dedent("""\
{
"start": [
10500,
15000
],
"end": [
13000,
18000
],
"text": [
"Elephant's Dream",
"At the left we can see..."
]
}
""")
self.txt_transcript = u"Elephant's Dream\nAt the left we can see..."
def test_convert_srt_to_txt(self):
expected = self.txt_transcript
actual = transcripts_utils.Transcript.convert(self.srt_transcript, 'srt', 'txt')
self.assertEqual(actual, expected)
def test_convert_srt_to_srt(self):
expected = self.srt_transcript
actual = transcripts_utils.Transcript.convert(self.srt_transcript, 'srt', 'srt')
self.assertEqual(actual, expected)
def test_convert_sjson_to_txt(self):
expected = self.txt_transcript
actual = transcripts_utils.Transcript.convert(self.sjson_transcript, 'sjson', 'txt')
self.assertEqual(actual, expected)
def test_convert_sjson_to_srt(self):
expected = self.srt_transcript
actual = transcripts_utils.Transcript.convert(self.sjson_transcript, 'sjson', 'srt')
self.assertEqual(actual, expected)
def test_convert_srt_to_sjson(self):
with self.assertRaises(NotImplementedError):
transcripts_utils.Transcript.convert(self.srt_transcript, 'srt', 'sjson')
class TestSubsFilename(unittest.TestCase):
"""
Tests for subs_filename funtion.
"""
def test_unicode(self):
name = transcripts_utils.subs_filename(u"˙∆©ƒƒƒ")
self.assertEqual(name, u'subs_˙∆©ƒƒƒ.srt.sjson')
name = transcripts_utils.subs_filename(u"˙∆©ƒƒƒ", 'uk')
self.assertEqual(name, u'uk_subs_˙∆©ƒƒƒ.srt.sjson')
| agpl-3.0 |
jlegendary/nupic | nupic/regions/PyRegion.py | 8 | 11217 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013-2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
This file defines the base class for NuPIC 2 Python regions.
"""
import numpy
RealNumpyDType = numpy.float32
from abc import ABCMeta, abstractmethod
from nupic.support import getCallerInfo
def not_implemented(f):
"""A decorator that raises NotImplementedError exception when called
Keeps the docstring of the original function.
"""
def decorated(*args, **kw):
gci = getCallerInfo()
caller = gci[0] + '()'
if gci[2]:
caller = gci[2] + '.' + caller
message = 'The unimplemented method '
message += '%s() was called by %s' % (f.func_name, caller)
raise NotImplementedError(message)
decorated.__doc__ == f.__doc__
return decorated
class PyRegion(object):
"""
PyRegion provides services to its sub-classes (the actual regions):
- Define and document the interface of a Python region
- Enforce implementation of required methods
- Default implementation for some methods
PyRegion is an abstract base class (http://docs.python.org/library/abc.html).
If a subclass doesn't implement all its abstract methods it can't be
instantiated. Note, that the signature of implemented abstract method in the
subclass doesn't need to match the signature of the abstract method in the
base class. This is very important for __init__() in this case.
The abstract methods (decorated with @abstract method) are:
* __init__
* initialize
* compute
In addition, PyRegion decorates some other methods with the
@not_implemented decorator. A sub-class may opt not to implement these
methods, but if such a methods is called then a NotImplementedError will be
raised. This is useful for methods like setParameterArray if a particular
subclass has no array parameters.
The not implemented methods (decorated with @not_implemented) are:
* getSpec (class method)
* setParameter
* setParameterArray
* getOutputElementCount
The getSpec is a class method, which is actually required but since it's
not an instance method the @abstractmethod decorator doesn't apply.
Finally, PyRegion provides reasonable default implementation to some methods.
Sub-classes may opt to override these methods or use the default
implementation (often recommended).
The implemented methods are:
* getParameter
* getParameterArray
* getParameterArrayCount
* executeMethod
"""
__metaclass__ = ABCMeta
@classmethod
@not_implemented
def getSpec(cls):
"""Returns the region spec for this region. The Region Spec is a dictionary
with the following keys:
description -- a string
singleNodeOnly -- a boolean (True if this Region supports only a single node)
inputs -- a dictionary in which the keys are the names of the inputs and
the values are dictionaries with these keys:
description - string
regionLevel -- True if this is a "region-level" input.
dataType - a string describing the data type, usually 'Real32'
count - the number of items in the input. 0 means unspecified.
required -- boolean - whether the input is must be connected
isDefaultInput -- must be True for exactly one input
requireSplitterMap -- [just set this to False.]
outputs -- a dictionary with similar structure to inputs. The keys
are:
description
dataType
count
regionLevel
isDefaultOutput
parameters -- a dictionary of dictionaries with the following keys:
description
dataType
count
constraints (optional)
accessMode (one of "ReadWrite", "Read", "Create")
This class method is called by NuPIC before creating a Region.
"""
@abstractmethod
def __init__(self, *args, **kwars):
"""Initialize the node with creation parameters from the node spec
Should be implemented by subclasses (unless there are no creation params)
"""
@abstractmethod
def initialize(self, inputs, outputs):
"""Initialize the node after the network is fully linked
It is called once by NuPIC before the first call to compute(). It is
a good place to perform one time initialization that depend on the inputs
and/or outputs. The region may also remember its inputs and outputs here
because they will not change.
inputs: dict of numpy arrays (one per input)
outputs: dict of numpy arrays (one per output)
"""
@abstractmethod
def compute(self, inputs, outputs):
"""Perform the main computation
This method is called in each iteration for each phase the node supports.
inputs: dict of numpy arrays (one per input)
outputs: dict of numpy arrays (one per output)
"""
@not_implemented
def getOutputElementCount(self, name):
"""Return the number of elements in the output of a single node
If the region has multiple nodes (all must have the same output
size) then just the number of output elements of a single node
should be returned.
name: the name of the output
"""
def getParameter(self, name, index):
"""Default implementation that return an attribute with the requested name
This method provides a default implementation of getParameter() that simply
returns an attribute with the parameter name. If the Region conceptually
contains multiple nodes with separate state the 'index' argument is used
to request a parameter of a specific node inside the region. In case of
a region-level parameter the index should be -1
The implementation prevents accessing parameters names that start with '_'.
It may be better to enforce this convention at the node spec level.
name: name of requested parameter
index: index of node inside the region (if relevant)
"""
if name.startswith('_'):
raise Exception('Parameter name must not start with an underscore')
value = getattr(self, name)
return value
def getParameterArrayCount(self, name, index):
"""Default implementation that return the length of the attribute
This default implementation goes hand in hand with getParameterArray().
If you override one of them in your subclass, you should probably override
both of them.
The implementation prevents accessing parameters names that start with '_'.
It may be better to enforce this convention at the node spec level.
name: name of requested parameter
index: index of node inside the region (if relevant)
"""
if name.startswith('_'):
raise Exception('Parameter name must not start with an underscore')
v = getattr(self, name)
return len(self.parameters[name])
def getParameterArray(self, name, index, array):
"""Default implementation that return an attribute with the requested name
This method provides a default implementation of getParameterArray() that
returns an attribute with the parameter name. If the Region conceptually
contains multiple nodes with separate state the 'index' argument is used
to request a parameter of a specific node inside the region. The attribute
value is written into the output array. No type or sanity checks are
performed for performance reasons. If something goes awry it will result
in a low-level exception. If you are unhappy about it you can implement
your own getParameterArray() method in the subclass.
The implementation prevents accessing parameters names that start with '_'.
It may be better to enforce this convention at the node spec level.
name: name of requested parameter
index: index of node inside the region (if relevant)
array: output numpy array that the value is written to
"""
if name.startswith('_'):
raise Exception('Parameter name must not start with an underscore')
v = getattr(self, name)
# Not performing sanity checks for performance reasons.
#assert array.dtype == v.dtype
#assert len(array) == len(v)
array[:] = v
@not_implemented
def setParameter(self, name, index, value):
"""Set the value of a parameter
If the Region conceptually contains multiple nodes with separate state
the 'index' argument is used set a parameter of a specific node inside
the region.
name: name of requested parameter
index: index of node inside the region (if relevant)
value: the value to assign to the requested parameter
"""
@not_implemented
def setParameterArray(self, name, index, array):
"""Set the value of an array parameter
If the Region conceptually contains multiple nodes with separate state
the 'index' argument is used set a parameter of a specific node inside
the region.
name: name of requested parameter
index: index of node inside the region (if relevant)
array: the value to assign to the requested parameter (a numpy array)
"""
def serializeExtraData(self, filePath):
"""This method is called during network serialization with an external
filename that can be used to bypass pickle for saving large binary states.
filePath: full filepath and name
"""
pass
def deSerializeExtraData(self, filePath):
"""This method is called during network deserialization with an external
filename that can be used to bypass pickle for loading large binary states.
filePath: full filepath and name
"""
pass
def executeMethod(self, methodName, args):
"""Executes a method named 'methodName' with the specified arguments.
This method is called when the user executes a command as defined in
the node spec. It provides a perfectly reasonble implementation
of the command mechanism. As a sub-class developer you just need to
implement a method for each command in the node spec. Note that due to
the command mechanism only unnamed argument are supported.
methodName: the name of the method that correspond to a command in the spec
args: list of arguments that will be passed to the method
"""
if not hasattr(self, methodName):
raise Exception('Missing command method: ' + methodName)
m = getattr(self, methodName)
if not hasattr(m, '__call__'):
raise Exception('Command: ' + methodName + ' must be callable')
return m(*args)
| gpl-3.0 |
CTSRD-SOAAP/chromium-42.0.2311.135 | tools/grit/grit/gather/admin_template_unittest.py | 61 | 3929 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for the admin template gatherer.'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import StringIO
import tempfile
import unittest
from grit.gather import admin_template
from grit import util
from grit import grd_reader
from grit import grit_runner
from grit.tool import build
class AdmGathererUnittest(unittest.TestCase):
def testParsingAndTranslating(self):
pseudofile = StringIO.StringIO(
'bingo bongo\n'
'ding dong\n'
'[strings] \n'
'whatcha="bingo bongo"\n'
'gotcha = "bingolabongola "the wise" fingulafongula" \n')
gatherer = admin_template.AdmGatherer(pseudofile)
gatherer.Parse()
self.failUnless(len(gatherer.GetCliques()) == 2)
self.failUnless(gatherer.GetCliques()[1].GetMessage().GetRealContent() ==
'bingolabongola "the wise" fingulafongula')
translation = gatherer.Translate('en')
self.failUnless(translation == gatherer.GetText().strip())
def testErrorHandling(self):
pseudofile = StringIO.StringIO(
'bingo bongo\n'
'ding dong\n'
'whatcha="bingo bongo"\n'
'gotcha = "bingolabongola "the wise" fingulafongula" \n')
gatherer = admin_template.AdmGatherer(pseudofile)
self.assertRaises(admin_template.MalformedAdminTemplateException,
gatherer.Parse)
_TRANSLATABLES_FROM_FILE = (
'Google', 'Google Desktop', 'Preferences',
'Controls Google Desktop preferences',
'Indexing and Capture Control',
'Controls what files, web pages, and other content will be indexed by Google Desktop.',
'Prevent indexing of email',
# there are lots more but we don't check any further
)
def VerifyCliquesFromAdmFile(self, cliques):
self.failUnless(len(cliques) > 20)
for clique, expected in zip(cliques, self._TRANSLATABLES_FROM_FILE):
text = clique.GetMessage().GetRealContent()
self.failUnless(text == expected)
def testFromFile(self):
fname = util.PathFromRoot('grit/testdata/GoogleDesktop.adm')
gatherer = admin_template.AdmGatherer(fname)
gatherer.Parse()
cliques = gatherer.GetCliques()
self.VerifyCliquesFromAdmFile(cliques)
def MakeGrd(self):
grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3">
<release seq="3">
<structures>
<structure type="admin_template" name="IDAT_GOOGLE_DESKTOP_SEARCH"
file="GoogleDesktop.adm" exclude_from_rc="true" />
<structure type="txt" name="BINGOBONGO"
file="README.txt" exclude_from_rc="true" />
</structures>
</release>
<outputs>
<output filename="de_res.rc" type="rc_all" lang="de" />
</outputs>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
return grd
def testInGrd(self):
grd = self.MakeGrd()
cliques = grd.children[0].children[0].children[0].GetCliques()
self.VerifyCliquesFromAdmFile(cliques)
def testFileIsOutput(self):
grd = self.MakeGrd()
dirname = tempfile.mkdtemp()
try:
tool = build.RcBuilder()
tool.o = grit_runner.Options()
tool.output_directory = dirname
tool.res = grd
tool.Process()
self.failUnless(os.path.isfile(
os.path.join(dirname, 'de_GoogleDesktop.adm')))
self.failUnless(os.path.isfile(
os.path.join(dirname, 'de_README.txt')))
finally:
for f in os.listdir(dirname):
os.unlink(os.path.join(dirname, f))
os.rmdir(dirname)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
smi96/django-blog_website | lib/python2.7/site-packages/django/conf/locale/fi/formats.py | 504 | 1390 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G.i'
DATETIME_FORMAT = r'j. E Y \k\e\l\l\o G.i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.n.Y'
SHORT_DATETIME_FORMAT = 'j.n.Y G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', # '20.3.2014'
'%d.%m.%y', # '20.3.14'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H.%M.%S', # '20.3.2014 14.30.59'
'%d.%m.%Y %H.%M.%S.%f', # '20.3.2014 14.30.59.000200'
'%d.%m.%Y %H.%M', # '20.3.2014 14.30'
'%d.%m.%Y', # '20.3.2014'
'%d.%m.%y %H.%M.%S', # '20.3.14 14.30.59'
'%d.%m.%y %H.%M.%S.%f', # '20.3.14 14.30.59.000200'
'%d.%m.%y %H.%M', # '20.3.14 14.30'
'%d.%m.%y', # '20.3.14'
]
TIME_INPUT_FORMATS = [
'%H.%M.%S', # '14.30.59'
'%H.%M.%S.%f', # '14.30.59.000200'
'%H.%M', # '14.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # Non-breaking space
NUMBER_GROUPING = 3
| mit |
Trois-Six/ansible-modules-core | cloud/rackspace/rax_cdb.py | 28 | 8017 | #!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_cdb
short_description: create/delete or resize a Rackspace Cloud Databases instance
description:
- creates / deletes or resize a Rackspace Cloud Databases instance
and optionally waits for it to be 'running'. The name option needs to be
unique since it's used to identify the instance.
version_added: "1.8"
options:
name:
description:
- Name of the databases server instance
default: null
flavor:
description:
- flavor to use for the instance 1 to 6 (i.e. 512MB to 16GB)
default: 1
volume:
description:
- Volume size of the database 1-150GB
default: 2
cdb_type:
description:
- type of instance (i.e. MySQL, MariaDB, Percona)
default: MySQL
version_added: "2.0"
cdb_version:
description:
- version of database (MySQL supports 5.1 and 5.6, MariaDB supports 10, Percona supports 5.6)
choices: ['5.1', '5.6', '10']
version_added: "2.0"
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author: "Simon JAILLET (@jails)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a Cloud Databases
gather_facts: False
tasks:
- name: Server build request
local_action:
module: rax_cdb
credentials: ~/.raxpub
region: IAD
name: db-server1
flavor: 1
volume: 2
cdb_type: MySQL
cdb_version: 5.6
wait: yes
state: present
register: rax_db_server
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def find_instance(name):
cdb = pyrax.cloud_databases
instances = cdb.list()
if instances:
for instance in instances:
if instance.name == name:
return instance
return False
def save_instance(module, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout):
for arg, value in dict(name=name, flavor=flavor,
volume=volume, type=cdb_type, version=cdb_version
).iteritems():
if not value:
module.fail_json(msg='%s is required for the "rax_cdb"'
' module' % arg)
if not (volume >= 1 and volume <= 150):
module.fail_json(msg='volume is required to be between 1 and 150')
cdb = pyrax.cloud_databases
flavors = []
for item in cdb.list_flavors():
flavors.append(item.id)
if not (flavor in flavors):
module.fail_json(msg='unexisting flavor reference "%s"' % str(flavor))
changed = False
instance = find_instance(name)
if not instance:
action = 'create'
try:
instance = cdb.create(name=name, flavor=flavor, volume=volume,
type=cdb_type, version=cdb_version)
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
else:
action = None
if instance.volume.size != volume:
action = 'resize'
if instance.volume.size > volume:
module.fail_json(changed=False, action=action,
msg='The new volume size must be larger than '
'the current volume size',
cdb=rax_to_dict(instance))
instance.resize_volume(volume)
changed = True
if int(instance.flavor.id) != flavor:
action = 'resize'
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
instance.resize(flavor)
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
if wait and instance.status != 'ACTIVE':
module.fail_json(changed=changed, action=action,
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be created' % name)
module.exit_json(changed=changed, action=action, cdb=rax_to_dict(instance))
def delete_instance(module, name, wait, wait_timeout):
if not name:
module.fail_json(msg='name is required for the "rax_cdb" module')
changed = False
instance = find_instance(name)
if not instance:
module.exit_json(changed=False, action='delete')
try:
instance.delete()
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'SHUTDOWN',
attempts=wait_timeout)
if wait and instance.status != 'SHUTDOWN':
module.fail_json(changed=changed, action='delete',
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be deleted' % name)
module.exit_json(changed=changed, action='delete',
cdb=rax_to_dict(instance))
def rax_cdb(module, state, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout):
# act on the state
if state == 'present':
save_instance(module, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout)
elif state == 'absent':
delete_instance(module, name, wait, wait_timeout)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
flavor=dict(type='int', default=1),
volume=dict(type='int', default=2),
cdb_type=dict(type='str', default='MySQL'),
cdb_version=dict(type='str', default='5.6'),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
name = module.params.get('name')
flavor = module.params.get('flavor')
volume = module.params.get('volume')
cdb_type = module.params.get('type')
cdb_version = module.params.get('version')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
rax_cdb(module, state, name, flavor, volume, cdb_type, cdb_version, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
| gpl-3.0 |
xxd3vin/spp-sdk | opt/Python27/Lib/lib-tk/Tix.py | 4 | 77781 | # -*-mode: python; fill-column: 75; tab-width: 8; coding: iso-latin-1-unix -*-
#
# $Id: Tix.py 81008 2010-05-08 20:59:42Z benjamin.peterson $
#
# Tix.py -- Tix widget wrappers.
#
# For Tix, see http://tix.sourceforge.net
#
# - Sudhir Shenoy ([email protected]), Dec. 1995.
# based on an idea of Jean-Marc Lugrin ([email protected])
#
# NOTE: In order to minimize changes to Tkinter.py, some of the code here
# (TixWidget.__init__) has been taken from Tkinter (Widget.__init__)
# and will break if there are major changes in Tkinter.
#
# The Tix widgets are represented by a class hierarchy in python with proper
# inheritance of base classes.
#
# As a result after creating a 'w = StdButtonBox', I can write
# w.ok['text'] = 'Who Cares'
# or w.ok['bg'] = w['bg']
# or even w.ok.invoke()
# etc.
#
# Compare the demo tixwidgets.py to the original Tcl program and you will
# appreciate the advantages.
#
from Tkinter import *
from Tkinter import _flatten, _cnfmerge, _default_root
# WARNING - TkVersion is a limited precision floating point number
if TkVersion < 3.999:
raise ImportError, "This version of Tix.py requires Tk 4.0 or higher"
import _tkinter # If this fails your Python may not be configured for Tk
# Some more constants (for consistency with Tkinter)
WINDOW = 'window'
TEXT = 'text'
STATUS = 'status'
IMMEDIATE = 'immediate'
IMAGE = 'image'
IMAGETEXT = 'imagetext'
BALLOON = 'balloon'
AUTO = 'auto'
ACROSSTOP = 'acrosstop'
# A few useful constants for the Grid widget
ASCII = 'ascii'
CELL = 'cell'
COLUMN = 'column'
DECREASING = 'decreasing'
INCREASING = 'increasing'
INTEGER = 'integer'
MAIN = 'main'
MAX = 'max'
REAL = 'real'
ROW = 'row'
S_REGION = 's-region'
X_REGION = 'x-region'
Y_REGION = 'y-region'
# Some constants used by Tkinter dooneevent()
TCL_DONT_WAIT = 1 << 1
TCL_WINDOW_EVENTS = 1 << 2
TCL_FILE_EVENTS = 1 << 3
TCL_TIMER_EVENTS = 1 << 4
TCL_IDLE_EVENTS = 1 << 5
TCL_ALL_EVENTS = 0
# BEWARE - this is implemented by copying some code from the Widget class
# in Tkinter (to override Widget initialization) and is therefore
# liable to break.
import Tkinter, os
# Could probably add this to Tkinter.Misc
class tixCommand:
"""The tix commands provide access to miscellaneous elements
of Tix's internal state and the Tix application context.
Most of the information manipulated by these commands pertains
to the application as a whole, or to a screen or
display, rather than to a particular window.
This is a mixin class, assumed to be mixed to Tkinter.Tk
that supports the self.tk.call method.
"""
def tix_addbitmapdir(self, directory):
"""Tix maintains a list of directories under which
the tix_getimage and tix_getbitmap commands will
search for image files. The standard bitmap directory
is $TIX_LIBRARY/bitmaps. The addbitmapdir command
adds directory into this list. By using this
command, the image files of an applications can
also be located using the tix_getimage or tix_getbitmap
command.
"""
return self.tk.call('tix', 'addbitmapdir', directory)
def tix_cget(self, option):
"""Returns the current value of the configuration
option given by option. Option may be any of the
options described in the CONFIGURATION OPTIONS section.
"""
return self.tk.call('tix', 'cget', option)
def tix_configure(self, cnf=None, **kw):
"""Query or modify the configuration options of the Tix application
context. If no option is specified, returns a dictionary all of the
available options. If option is specified with no value, then the
command returns a list describing the one named option (this list
will be identical to the corresponding sublist of the value
returned if no option is specified). If one or more option-value
pairs are specified, then the command modifies the given option(s)
to have the given value(s); in this case the command returns an
empty string. Option may be any of the configuration options.
"""
# Copied from Tkinter.py
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(self.tk.call('tix', 'configure')):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, StringType):
x = self.tk.split(self.tk.call('tix', 'configure', '-'+cnf))
return (x[0][1:],) + x[1:]
return self.tk.call(('tix', 'configure') + self._options(cnf))
def tix_filedialog(self, dlgclass=None):
"""Returns the file selection dialog that may be shared among
different calls from this application. This command will create a
file selection dialog widget when it is called the first time. This
dialog will be returned by all subsequent calls to tix_filedialog.
An optional dlgclass parameter can be passed to specified what type
of file selection dialog widget is desired. Possible options are
tix FileSelectDialog or tixExFileSelectDialog.
"""
if dlgclass is not None:
return self.tk.call('tix', 'filedialog', dlgclass)
else:
return self.tk.call('tix', 'filedialog')
def tix_getbitmap(self, name):
"""Locates a bitmap file of the name name.xpm or name in one of the
bitmap directories (see the tix_addbitmapdir command above). By
using tix_getbitmap, you can avoid hard coding the pathnames of the
bitmap files in your application. When successful, it returns the
complete pathname of the bitmap file, prefixed with the character
'@'. The returned value can be used to configure the -bitmap
option of the TK and Tix widgets.
"""
return self.tk.call('tix', 'getbitmap', name)
def tix_getimage(self, name):
"""Locates an image file of the name name.xpm, name.xbm or name.ppm
in one of the bitmap directories (see the addbitmapdir command
above). If more than one file with the same name (but different
extensions) exist, then the image type is chosen according to the
depth of the X display: xbm images are chosen on monochrome
displays and color images are chosen on color displays. By using
tix_ getimage, you can advoid hard coding the pathnames of the
image files in your application. When successful, this command
returns the name of the newly created image, which can be used to
configure the -image option of the Tk and Tix widgets.
"""
return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name):
"""Gets the options manitained by the Tix
scheme mechanism. Available options include:
active_bg active_fg bg
bold_font dark1_bg dark1_fg
dark2_bg dark2_fg disabled_fg
fg fixed_font font
inactive_bg inactive_fg input1_bg
input2_bg italic_font light1_bg
light1_fg light2_bg light2_fg
menu_font output1_bg output2_bg
select_bg select_fg selector
"""
# could use self.tk.globalgetvar('tixOption', name)
return self.tk.call('tix', 'option', 'get', name)
def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None):
"""Resets the scheme and fontset of the Tix application to
newScheme and newFontSet, respectively. This affects only those
widgets created after this call. Therefore, it is best to call the
resetoptions command before the creation of any widgets in a Tix
application.
The optional parameter newScmPrio can be given to reset the
priority level of the Tk options set by the Tix schemes.
Because of the way Tk handles the X option database, after Tix has
been has imported and inited, it is not possible to reset the color
schemes and font sets using the tix config command. Instead, the
tix_resetoptions command must be used.
"""
if newScmPrio is not None:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio)
else:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet)
class Tk(Tkinter.Tk, tixCommand):
"""Toplevel widget of Tix which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
def __init__(self, screenName=None, baseName=None, className='Tix'):
Tkinter.Tk.__init__(self, screenName, baseName, className)
tixlib = os.environ.get('TIX_LIBRARY')
self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]')
if tixlib is not None:
self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib)
self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib)
# Load Tix - this should work dynamically or statically
# If it's static, tcl/tix8.1/pkgIndex.tcl should have
# 'load {} Tix'
# If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have
# 'load libtix8.1.8.3.so Tix'
self.tk.eval('package require Tix')
def destroy(self):
# For safety, remove an delete_window binding before destroy
self.protocol("WM_DELETE_WINDOW", "")
Tkinter.Tk.destroy(self)
# The Tix 'tixForm' geometry manager
class Form:
"""The Tix Form geometry manager
Widgets can be arranged by specifying attachments to other widgets.
See Tix documentation for complete details"""
def config(self, cnf={}, **kw):
self.tk.call('tixForm', self._w, *self._options(cnf, kw))
form = config
def __setitem__(self, key, value):
Form.form(self, {key: value})
def check(self):
return self.tk.call('tixForm', 'check', self._w)
def forget(self):
self.tk.call('tixForm', 'forget', self._w)
def grid(self, xsize=0, ysize=0):
if (not xsize) and (not ysize):
x = self.tk.call('tixForm', 'grid', self._w)
y = self.tk.splitlist(x)
z = ()
for x in y:
z = z + (self.tk.getint(x),)
return z
return self.tk.call('tixForm', 'grid', self._w, xsize, ysize)
def info(self, option=None):
if not option:
return self.tk.call('tixForm', 'info', self._w)
if option[0] != '-':
option = '-' + option
return self.tk.call('tixForm', 'info', self._w, option)
def slaves(self):
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'tixForm', 'slaves', self._w)))
Tkinter.Widget.__bases__ = Tkinter.Widget.__bases__ + (Form,)
class TixWidget(Tkinter.Widget):
"""A TixWidget class is used to package all (or most) Tix widgets.
Widget initialization is extended in two ways:
1) It is possible to give a list of options which must be part of
the creation command (so called Tix 'static' options). These cannot be
given as a 'config' command later.
2) It is possible to give the name of an existing TK widget. These are
child widgets created automatically by a Tix mega-widget. The Tk call
to create these widgets is therefore bypassed in TixWidget.__init__
Both options are for use by subclasses only.
"""
def __init__ (self, master=None, widgetName=None,
static_options=None, cnf={}, kw={}):
# Merge keywords and dictionary arguments
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
# Move static options into extra. static_options must be
# a list of keywords (or None).
extra=()
# 'options' is always a static option
if static_options:
static_options.append('options')
else:
static_options = ['options']
for k,v in cnf.items()[:]:
if k in static_options:
extra = extra + ('-' + k, v)
del cnf[k]
self.widgetName = widgetName
Widget._setup(self, master, cnf)
# If widgetName is None, this is a dummy creation call where the
# corresponding Tk widget has already been created by Tix
if widgetName:
self.tk.call(widgetName, self._w, *extra)
# Non-static options - to be done via a 'config' command
if cnf:
Widget.config(self, cnf)
# Dictionary to hold subwidget names for easier access. We can't
# use the children list because the public Tix names may not be the
# same as the pathname component
self.subwidget_list = {}
# We set up an attribute access function so that it is possible to
# do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello'
# when w is a StdButtonBox.
# We can even do w.ok.invoke() because w.ok is subclassed from the
# Button class if you go through the proper constructors
def __getattr__(self, name):
if name in self.subwidget_list:
return self.subwidget_list[name]
raise AttributeError, name
def set_silent(self, value):
"""Set a variable without calling its action routine"""
self.tk.call('tixSetSilent', self._w, value)
def subwidget(self, name):
"""Return the named subwidget (which must have been created by
the sub-class)."""
n = self._subwidget_name(name)
if not n:
raise TclError, "Subwidget " + name + " not child of " + self._name
# Remove header of name and leading dot
n = n[len(self._w)+1:]
return self._nametowidget(n)
def subwidgets_all(self):
"""Return all subwidgets."""
names = self._subwidget_names()
if not names:
return []
retlist = []
for name in names:
name = name[len(self._w)+1:]
try:
retlist.append(self._nametowidget(name))
except:
# some of the widgets are unknown e.g. border in LabelFrame
pass
return retlist
def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None
def _subwidget_names(self):
"""Return the name of all subwidgets."""
try:
x = self.tk.call(self._w, 'subwidgets', '-all')
return self.tk.split(x)
except TclError:
return None
def config_all(self, option, value):
"""Set configuration options for all subwidgets (and self)."""
if option == '':
return
elif not isinstance(option, StringType):
option = repr(option)
if not isinstance(value, StringType):
value = repr(value)
names = self._subwidget_names()
for name in names:
self.tk.call(name, 'configure', '-' + option, value)
# These are missing from Tkinter
def image_create(self, imgtype, cnf={}, master=None, **kw):
if not master:
master = Tkinter._default_root
if not master:
raise RuntimeError, 'Too early to create image'
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
return master.tk.call(('image', 'create', imgtype,) + options)
def image_delete(self, imgname):
try:
self.tk.call('image', 'delete', imgname)
except TclError:
# May happen if the root was destroyed
pass
# Subwidgets are child widgets created automatically by mega-widgets.
# In python, we have to create these subwidgets manually to mirror their
# existence in Tk/Tix.
class TixSubWidget(TixWidget):
"""Subwidget class.
This is used to mirror child widgets automatically created
by Tix/Tk as part of a mega-widget in Python (which is not informed
of this)"""
def __init__(self, master, name,
destroy_physically=1, check_intermediate=1):
if check_intermediate:
path = master._subwidget_name(name)
try:
path = path[len(master._w)+1:]
plist = path.split('.')
except:
plist = []
if not check_intermediate:
# immediate descendant
TixWidget.__init__(self, master, None, None, {'name' : name})
else:
# Ensure that the intermediate widgets exist
parent = master
for i in range(len(plist) - 1):
n = '.'.join(plist[:i+1])
try:
w = master._nametowidget(n)
parent = w
except KeyError:
# Create the intermediate widget
parent = TixSubWidget(parent, plist[i],
destroy_physically=0,
check_intermediate=0)
# The Tk widget name is in plist, not in name
if plist:
name = plist[-1]
TixWidget.__init__(self, parent, None, None, {'name' : name})
self.destroy_physically = destroy_physically
def destroy(self):
# For some widgets e.g., a NoteBook, when we call destructors,
# we must be careful not to destroy the frame widget since this
# also destroys the parent NoteBook thus leading to an exception
# in Tkinter when it finally calls Tcl to destroy the NoteBook
for c in self.children.values(): c.destroy()
if self._name in self.master.children:
del self.master.children[self._name]
if self._name in self.master.subwidget_list:
del self.master.subwidget_list[self._name]
if self.destroy_physically:
# This is bypassed only for a few widgets
self.tk.call('destroy', self._w)
# Useful func. to split Tcl lists and return as a dict. From Tkinter.py
def _lst2dict(lst):
dict = {}
for x in lst:
dict[x[0][1:]] = (x[0][1:],) + x[1:]
return dict
# Useful class to create a display style - later shared by many items.
# Contributed by Steffen Kremser
class DisplayStyle:
"""DisplayStyle - handle configuration options shared by
(multiple) Display Items"""
def __init__(self, itemtype, cnf={}, **kw):
master = _default_root # global from Tkinter
if not master and 'refwindow' in cnf: master=cnf['refwindow']
elif not master and 'refwindow' in kw: master= kw['refwindow']
elif not master: raise RuntimeError, "Too early to create display style: no root window"
self.tk = master.tk
self.stylename = self.tk.call('tixDisplayStyle', itemtype,
*self._options(cnf,kw) )
def __str__(self):
return self.stylename
def _options(self, cnf, kw):
if kw and cnf:
cnf = _cnfmerge((cnf, kw))
elif kw:
cnf = kw
opts = ()
for k, v in cnf.items():
opts = opts + ('-'+k, v)
return opts
def delete(self):
self.tk.call(self.stylename, 'delete')
def __setitem__(self,key,value):
self.tk.call(self.stylename, 'configure', '-%s'%key, value)
def config(self, cnf={}, **kw):
return _lst2dict(
self.tk.split(
self.tk.call(
self.stylename, 'configure', *self._options(cnf,kw))))
def __getitem__(self,key):
return self.tk.call(self.stylename, 'cget', '-%s'%key)
######################################################
### The Tix Widget classes - in alphabetical order ###
######################################################
class Balloon(TixWidget):
"""Balloon help widget.
Subwidget Class
--------- -----
label Label
message Message"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master=None, cnf={}, **kw):
# static seem to be -installcolormap -initwait -statusbar -cursor
static = ['options', 'installcolormap', 'initwait', 'statusbar',
'cursor']
TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label',
destroy_physically=0)
self.subwidget_list['message'] = _dummyLabel(self, 'message',
destroy_physically=0)
def bind_widget(self, widget, cnf={}, **kw):
"""Bind balloon widget to another.
One balloon widget may be bound to several widgets at the same time"""
self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw))
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
class ButtonBox(TixWidget):
"""ButtonBox - A container for pushbuttons.
Subwidgets are the buttons added with the add method.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixButtonBox',
['orientation', 'options'], cnf, kw)
def add(self, name, cnf={}, **kw):
"""Add a button with given name to box."""
btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return btn
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwdget or selecting from the
listbox subwidget.
Subwidget Class
--------- -----
entry Entry
arrow Button
slistbox ScrolledListBox
tick Button
cross Button : present if created with the fancy option"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixComboBox',
['editable', 'dropdown', 'fancy', 'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
# align
def add_history(self, str):
self.tk.call(self._w, 'addhistory', str)
def append_history(self, str):
self.tk.call(self._w, 'appendhistory', str)
def insert(self, index, str):
self.tk.call(self._w, 'insert', index, str)
def pick(self, index):
self.tk.call(self._w, 'pick', index)
class Control(TixWidget):
"""Control - An entry field with value change arrows. The user can
adjust the value by pressing the two arrow buttons or by entering
the value directly into the entry. The new value will be checked
against the user-defined upper and lower limits.
Subwidget Class
--------- -----
incr Button
decr Button
entry Entry
label Label"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw)
self.subwidget_list['incr'] = _dummyButton(self, 'incr')
self.subwidget_list['decr'] = _dummyButton(self, 'decr')
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def decrement(self):
self.tk.call(self._w, 'decr')
def increment(self):
self.tk.call(self._w, 'incr')
def invoke(self):
self.tk.call(self._w, 'invoke')
def update(self):
self.tk.call(self._w, 'update')
class DirList(TixWidget):
"""DirList - displays a list view of a directory, its previous
directories and its sub-directories. The user can choose one of
the directories displayed in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirTree(TixWidget):
"""DirTree - Directory Listing in a hierarchical view.
Displays a tree view of a directory, its previous directories and its
sub-directories. The user can choose one of the directories displayed
in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirSelectBox(TixWidget):
"""DirSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class ExFileSelectBox(TixWidget):
"""ExFileSelectBox - MS Windows style file select box.
It provides an convenient method for the user to select files.
Subwidget Class
--------- -----
cancel Button
ok Button
hidden Checkbutton
types ComboBox
dir ComboBox
file ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
def filter(self):
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class DirSelectDialog(TixWidget):
"""The DirSelectDialog widget presents the directories in the file
system in a dialog window. The user can use this dialog window to
navigate through the file system to select the desired directory.
Subwidgets Class
---------- -----
dirbox DirSelectDialog"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectDialog',
['options'], cnf, kw)
self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox')
# cancel and ok buttons are missing
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
# Should inherit from a Dialog class
class ExFileSelectDialog(TixWidget):
"""ExFileSelectDialog - MS Windows style file select dialog.
It provides an convenient method for the user to select files.
Subwidgets Class
---------- -----
fsbox ExFileSelectBox"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileSelectBox(TixWidget):
"""ExFileSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
def apply_filter(self): # name of subwidget is same as command
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class FileSelectDialog(TixWidget):
"""FileSelectDialog - Motif style file select dialog.
Subwidgets Class
---------- -----
btns StdButtonBox
fsbox FileSelectBox"""
# FIXME: It should inherit -superclass tixStdDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns')
self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileEntry(TixWidget):
"""FileEntry - Entry field with button that invokes a FileSelectDialog.
The user can type in the filename manually. Alternatively, the user can
press the button widget that sits next to the entry, which will bring
up a file selection dialog.
Subwidgets Class
---------- -----
button Button
entry Entry"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileEntry',
['dialogtype', 'options'], cnf, kw)
self.subwidget_list['button'] = _dummyButton(self, 'button')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def invoke(self):
self.tk.call(self._w, 'invoke')
def file_dialog(self):
# FIXME: return python object
pass
class HList(TixWidget, XView, YView):
"""HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines
according to their places in the hierachy.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixHList',
['columns', 'options'], cnf, kw)
def add(self, entry, cnf={}, **kw):
return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw))
def add_child(self, parent=None, cnf={}, **kw):
if not parent:
parent = ''
return self.tk.call(
self._w, 'addchild', parent, *self._options(cnf, kw))
def anchor_set(self, entry):
self.tk.call(self._w, 'anchor', 'set', entry)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def column_width(self, col=0, width=None, chars=None):
if not chars:
return self.tk.call(self._w, 'column', 'width', col, width)
else:
return self.tk.call(self._w, 'column', 'width', col,
'-char', chars)
def delete_all(self):
self.tk.call(self._w, 'delete', 'all')
def delete_entry(self, entry):
self.tk.call(self._w, 'delete', 'entry', entry)
def delete_offsprings(self, entry):
self.tk.call(self._w, 'delete', 'offsprings', entry)
def delete_siblings(self, entry):
self.tk.call(self._w, 'delete', 'siblings', entry)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def header_create(self, col, cnf={}, **kw):
self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw))
def header_configure(self, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'header', 'configure', col)))
self.tk.call(self._w, 'header', 'configure', col,
*self._options(cnf, kw))
def header_cget(self, col, opt):
return self.tk.call(self._w, 'header', 'cget', col, opt)
def header_exists(self, col):
return self.tk.call(self._w, 'header', 'exists', col)
def header_delete(self, col):
self.tk.call(self._w, 'header', 'delete', col)
def header_size(self, col):
return self.tk.call(self._w, 'header', 'size', col)
def hide_entry(self, entry):
self.tk.call(self._w, 'hide', 'entry', entry)
def indicator_create(self, entry, cnf={}, **kw):
self.tk.call(
self._w, 'indicator', 'create', entry, *self._options(cnf, kw))
def indicator_configure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'indicator', 'configure', entry)))
self.tk.call(
self._w, 'indicator', 'configure', entry, *self._options(cnf, kw))
def indicator_cget(self, entry, opt):
return self.tk.call(self._w, 'indicator', 'cget', entry, opt)
def indicator_exists(self, entry):
return self.tk.call (self._w, 'indicator', 'exists', entry)
def indicator_delete(self, entry):
self.tk.call(self._w, 'indicator', 'delete', entry)
def indicator_size(self, entry):
return self.tk.call(self._w, 'indicator', 'size', entry)
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_bbox(self, entry):
return self._getints(
self.tk.call(self._w, 'info', 'bbox', entry)) or None
def info_children(self, entry=None):
c = self.tk.call(self._w, 'info', 'children', entry)
return self.tk.splitlist(c)
def info_data(self, entry):
return self.tk.call(self._w, 'info', 'data', entry)
def info_dragsite(self):
return self.tk.call(self._w, 'info', 'dragsite')
def info_dropsite(self):
return self.tk.call(self._w, 'info', 'dropsite')
def info_exists(self, entry):
return self.tk.call(self._w, 'info', 'exists', entry)
def info_hidden(self, entry):
return self.tk.call(self._w, 'info', 'hidden', entry)
def info_next(self, entry):
return self.tk.call(self._w, 'info', 'next', entry)
def info_parent(self, entry):
return self.tk.call(self._w, 'info', 'parent', entry)
def info_prev(self, entry):
return self.tk.call(self._w, 'info', 'prev', entry)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def item_cget(self, entry, col, opt):
return self.tk.call(self._w, 'item', 'cget', entry, col, opt)
def item_configure(self, entry, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'item', 'configure', entry, col)))
self.tk.call(self._w, 'item', 'configure', entry, col,
*self._options(cnf, kw))
def item_create(self, entry, col, cnf={}, **kw):
self.tk.call(
self._w, 'item', 'create', entry, col, *self._options(cnf, kw))
def item_exists(self, entry, col):
return self.tk.call(self._w, 'item', 'exists', entry, col)
def item_delete(self, entry, col):
self.tk.call(self._w, 'item', 'delete', entry, col)
def entrycget(self, entry, opt):
return self.tk.call(self._w, 'entrycget', entry, opt)
def entryconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'entryconfigure', entry)))
self.tk.call(self._w, 'entryconfigure', entry,
*self._options(cnf, kw))
def nearest(self, y):
return self.tk.call(self._w, 'nearest', y)
def see(self, entry):
self.tk.call(self._w, 'see', entry)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, entry):
return self.tk.call(self._w, 'selection', 'includes', entry)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def show_entry(self, entry):
return self.tk.call(self._w, 'show', 'entry', entry)
class InputOnly(TixWidget):
"""InputOnly - Invisible widget. Unix only.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw)
class LabelEntry(TixWidget):
"""LabelEntry - Entry field with label. Packages an entry widget
and a label into one mega widget. It can beused be used to simplify
the creation of ``entry-form'' type of interface.
Subwidgets Class
---------- -----
label Label
entry Entry"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelEntry',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
class LabelFrame(TixWidget):
"""LabelFrame - Labelled Frame container. Packages a frame widget
and a label into one mega widget. To create widgets inside a
LabelFrame widget, one creates the new widgets relative to the
frame subwidget and manage them inside the frame subwidget.
Subwidgets Class
---------- -----
label Label
frame Frame"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelFrame',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['frame'] = _dummyFrame(self, 'frame')
class ListNoteBook(TixWidget):
"""A ListNoteBook widget is very similar to the TixNoteBook widget:
it can be used to display many windows in a limited space using a
notebook metaphor. The notebook is divided into a stack of pages
(windows). At one time only one of these pages can be shown.
The user can navigate through these pages by
choosing the name of the desired page in the hlist subwidget."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw)
# Is this necessary? It's not an exposed subwidget in Tix.
self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane',
destroy_physically=0)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
class Meter(TixWidget):
"""The Meter widget can be used to show the progress of a background
job which may take a long time to execute.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixMeter',
['options'], cnf, kw)
class NoteBook(TixWidget):
"""NoteBook - Multi-page container widget (tabbed notebook metaphor).
Subwidgets Class
---------- -----
nbframe NoteBookFrame
<pages> page widgets added dynamically with the add method"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw)
self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe',
destroy_physically=0)
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
def raised(self):
return self.tk.call(self._w, 'raised')
class NoteBookFrame(TixWidget):
# FIXME: This is dangerous to expose to be called on its own.
pass
class OptionMenu(TixWidget):
"""OptionMenu - creates a menu button of options.
Subwidget Class
--------- -----
menubutton Menubutton
menu Menu"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixOptionMenu',
['labelside', 'options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def add_command(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw))
def add_separator(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw))
def delete(self, name):
self.tk.call(self._w, 'delete', name)
def disable(self, name):
self.tk.call(self._w, 'disable', name)
def enable(self, name):
self.tk.call(self._w, 'enable', name)
class PanedWindow(TixWidget):
"""PanedWindow - Multi-pane container widget
allows the user to interactively manipulate the sizes of several
panes. The panes can be arranged either vertically or horizontally.The
user changes the sizes of the panes by dragging the resize handle
between two panes.
Subwidgets Class
---------- -----
<panes> g/p widgets added dynamically with the add method."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw)
# add delete forget panecget paneconfigure panes setsize
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name,
check_intermediate=0)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def forget(self, name):
self.tk.call(self._w, 'forget', name)
def panecget(self, entry, opt):
return self.tk.call(self._w, 'panecget', entry, opt)
def paneconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'paneconfigure', entry)))
self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw))
def panes(self):
names = self.tk.splitlist(self.tk.call(self._w, 'panes'))
return [self.subwidget(x) for x in names]
class PopupMenu(TixWidget):
"""PopupMenu widget can be used as a replacement of the tk_popup command.
The advantage of the Tix PopupMenu widget is it requires less application
code to manipulate.
Subwidgets Class
---------- -----
menubutton Menubutton
menu Menu"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def bind_widget(self, widget):
self.tk.call(self._w, 'bind', widget._w)
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
def post_widget(self, widget, x, y):
self.tk.call(self._w, 'post', widget._w, x, y)
class ResizeHandle(TixWidget):
"""Internal widget to draw resize handles on Scrolled widgets."""
def __init__(self, master, cnf={}, **kw):
# There seems to be a Tix bug rejecting the configure method
# Let's try making the flags -static
flags = ['options', 'command', 'cursorfg', 'cursorbg',
'handlesize', 'hintcolor', 'hintwidth',
'x', 'y']
# In fact, x y height width are configurable
TixWidget.__init__(self, master, 'tixResizeHandle',
flags, cnf, kw)
def attach_widget(self, widget):
self.tk.call(self._w, 'attachwidget', widget._w)
def detach_widget(self, widget):
self.tk.call(self._w, 'detachwidget', widget._w)
def hide(self, widget):
self.tk.call(self._w, 'hide', widget._w)
def show(self, widget):
self.tk.call(self._w, 'show', widget._w)
class ScrolledHList(TixWidget):
"""ScrolledHList - HList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledHList', ['options'],
cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledListBox(TixWidget):
"""ScrolledListBox - Listbox with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledText(TixWidget):
"""ScrolledText - Text with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw)
self.subwidget_list['text'] = _dummyText(self, 'text')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledTList(TixWidget):
"""ScrolledTList - TList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledTList', ['options'],
cnf, kw)
self.subwidget_list['tlist'] = _dummyTList(self, 'tlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledWindow(TixWidget):
"""ScrolledWindow - Window with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw)
self.subwidget_list['window'] = _dummyFrame(self, 'window')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class Select(TixWidget):
"""Select - Container of button subwidgets. It can be used to provide
radio-box or check-box style of selection options for the user.
Subwidgets are buttons added dynamically using the add method."""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixSelect',
['allowzero', 'radio', 'orientation', 'labelside',
'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return self.subwidget_list[name]
def invoke(self, name):
self.tk.call(self._w, 'invoke', name)
class Shell(TixWidget):
"""Toplevel window.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw)
class DialogShell(TixWidget):
"""Toplevel window, with popup popdown and center methods.
It tells the window manager that it is a dialog window and should be
treated specially. The exact treatment depends on the treatment of
the window manager.
Subwidgets - None"""
# FIXME: It should inherit from Shell
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master,
'tixDialogShell',
['options', 'title', 'mapped',
'minheight', 'minwidth',
'parent', 'transient'], cnf, kw)
def popdown(self):
self.tk.call(self._w, 'popdown')
def popup(self):
self.tk.call(self._w, 'popup')
def center(self):
self.tk.call(self._w, 'center')
class StdButtonBox(TixWidget):
"""StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixStdButtonBox',
['orientation', 'options'], cnf, kw)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class TList(TixWidget, XView, YView):
"""TList - Hierarchy display widget which can be
used to display data in a tabular format. The list entries of a TList
widget are similar to the entries in the Tk listbox widget. The main
differences are (1) the TList widget can display the list entries in a
two dimensional format and (2) you can use graphical images as well as
multiple colors and fonts for the list entries.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw)
def active_set(self, index):
self.tk.call(self._w, 'active', 'set', index)
def active_clear(self):
self.tk.call(self._w, 'active', 'clear')
def anchor_set(self, index):
self.tk.call(self._w, 'anchor', 'set', index)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def delete(self, from_, to=None):
self.tk.call(self._w, 'delete', from_, to)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def insert(self, index, cnf={}, **kw):
self.tk.call(self._w, 'insert', index, *self._options(cnf, kw))
def info_active(self):
return self.tk.call(self._w, 'info', 'active')
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_down(self, index):
return self.tk.call(self._w, 'info', 'down', index)
def info_left(self, index):
return self.tk.call(self._w, 'info', 'left', index)
def info_right(self, index):
return self.tk.call(self._w, 'info', 'right', index)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def info_size(self):
return self.tk.call(self._w, 'info', 'size')
def info_up(self, index):
return self.tk.call(self._w, 'info', 'up', index)
def nearest(self, x, y):
return self.tk.call(self._w, 'nearest', x, y)
def see(self, index):
self.tk.call(self._w, 'see', index)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, index):
return self.tk.call(self._w, 'selection', 'includes', index)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierachical
data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTree',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def setmode(self, entrypath, mode='none'):
'''This command is used to indicate whether the entry given by
entryPath has children entries and whether the children are visible. mode
must be one of open, close or none. If mode is set to open, a (+)
indicator is drawn next to the entry. If mode is set to close, a (-)
indicator is drawn next to the entry. If mode is set to none, no
indicators will be drawn for this entry. The default mode is none. The
open mode indicates the entry has hidden children and this entry can be
opened by the user. The close mode indicates that all the children of the
entry are now visible and the entry can be closed by the user.'''
self.tk.call(self._w, 'setmode', entrypath, mode)
# Could try subclassing Tree for CheckList - would need another arg to init
class CheckList(TixWidget):
"""The CheckList widget
displays a list of items to be selected by the user. CheckList acts
similarly to the Tk checkbutton or radiobutton widgets, except it is
capable of handling many more items than checkbuttons or radiobuttons.
"""
# FIXME: It should inherit -superclass tixTree
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixCheckList',
['options', 'radio'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def getselection(self, mode='on'):
'''Returns a list of items whose status matches status. If status is
not specified, the list of items in the "on" status will be returned.
Mode can be on, off, default'''
c = self.tk.split(self.tk.call(self._w, 'getselection', mode))
return self.tk.splitlist(c)
def getstatus(self, entrypath):
'''Returns the current status of entryPath.'''
return self.tk.call(self._w, 'getstatus', entrypath)
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
###########################################################################
### The subclassing below is used to instantiate the subwidgets in each ###
### mega widget. This allows us to access their methods directly. ###
###########################################################################
class _dummyButton(Button, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyCheckbutton(Checkbutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyEntry(Entry, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyFrame(Frame, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyLabel(Label, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyListbox(Listbox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenu(Menu, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenubutton(Menubutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrollbar(Scrollbar, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyText(Text, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledListBox(ScrolledListBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyHList(HList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledHList(ScrolledHList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyTList(TList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically])
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
#cross Button : present if created with the fancy option
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
class _dummyDirList(DirList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyDirSelectBox(DirSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
class _dummyFileSelectBox(FileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
class _dummyFileComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx')
class _dummyStdButtonBox(StdButtonBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget):
def __init__(self, master, name, destroy_physically=0):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyPanedWindow(PanedWindow, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
########################
### Utility Routines ###
########################
#mike Should tixDestroy be exposed as a wrapper? - but not for widgets.
def OptionName(widget):
'''Returns the qualified path name for the widget. Normally used to set
default options for subwidgets. See tixwidgets.py'''
return widget.tk.call('tixOptionName', widget._w)
# Called with a dictionary argument of the form
# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'}
# returns a string which can be used to configure the fsbox file types
# in an ExFileSelectBox. i.e.,
# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}'
def FileTypeList(dict):
s = ''
for type in dict.keys():
s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} '
return s
# Still to be done:
# tixIconView
class CObjView(TixWidget):
"""This file implements the Canvas Object View widget. This is a base
class of IconView. It implements automatic placement/adjustment of the
scrollbars according to the canvas objects inside the canvas subwidget.
The scrollbars are adjusted so that the canvas is just large enough
to see all the objects.
"""
# FIXME: It should inherit -superclass tixScrolledWidget
pass
class Grid(TixWidget, XView, YView):
'''The Tix Grid command creates a new window and makes it into a
tixGrid widget. Additional options, may be specified on the command
line or in the option database to configure aspects such as its cursor
and relief.
A Grid widget displays its contents in a two dimensional grid of cells.
Each cell may contain one Tix display item, which may be in text,
graphics or other formats. See the DisplayStyle class for more information
about Tix display items. Individual cells, or groups of cells, can be
formatted with a wide range of attributes, such as its color, relief and
border.
Subwidgets - None'''
# valid specific resources as of Tk 8.4
# editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd,
# highlightbackground, highlightcolor, leftmargin, itemtype, selectmode,
# selectunit, topmargin,
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw)
# valid options as of Tk 8.4
# anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget,
# edit, entryconfigure, format, geometryinfo, info, index, move, nearest,
# selection, set, size, unset, xview, yview
def anchor_clear(self):
"""Removes the selection anchor."""
self.tk.call(self, 'anchor', 'clear')
def anchor_get(self):
"Get the (x,y) coordinate of the current anchor cell"
return self._getints(self.tk.call(self, 'anchor', 'get'))
def anchor_set(self, x, y):
"""Set the selection anchor to the cell at (x, y)."""
self.tk.call(self, 'anchor', 'set', x, y)
def delete_row(self, from_, to=None):
"""Delete rows between from_ and to inclusive.
If to is not provided, delete only row at from_"""
if to is None:
self.tk.call(self, 'delete', 'row', from_)
else:
self.tk.call(self, 'delete', 'row', from_, to)
def delete_column(self, from_, to=None):
"""Delete columns between from_ and to inclusive.
If to is not provided, delete only column at from_"""
if to is None:
self.tk.call(self, 'delete', 'column', from_)
else:
self.tk.call(self, 'delete', 'column', from_, to)
def edit_apply(self):
"""If any cell is being edited, de-highlight the cell and applies
the changes."""
self.tk.call(self, 'edit', 'apply')
def edit_set(self, x, y):
"""Highlights the cell at (x, y) for editing, if the -editnotify
command returns True for this cell."""
self.tk.call(self, 'edit', 'set', x, y)
def entrycget(self, x, y, option):
"Get the option value for cell at (x,y)"
if option and option[0] != '-':
option = '-' + option
return self.tk.call(self, 'entrycget', x, y, option)
def entryconfigure(self, x, y, cnf=None, **kw):
return self._configure(('entryconfigure', x, y), cnf, kw)
# def format
# def index
def info_exists(self, x, y):
"Return True if display item exists at (x,y)"
return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
def info_bbox(self, x, y):
# This seems to always return '', at least for 'text' displayitems
return self.tk.call(self, 'info', 'bbox', x, y)
def move_column(self, from_, to, offset):
"""Moves the the range of columns from position FROM through TO by
the distance indicated by OFFSET. For example, move_column(2, 4, 1)
moves the columns 2,3,4 to columns 3,4,5."""
self.tk.call(self, 'move', 'column', from_, to, offset)
def move_row(self, from_, to, offset):
"""Moves the the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset)
def nearest(self, x, y):
"Return coordinate of cell nearest pixel coordinate (x,y)"
return self._getints(self.tk.call(self, 'nearest', x, y))
# def selection adjust
# def selection clear
# def selection includes
# def selection set
# def selection toggle
def set(self, x, y, itemtype=None, **kw):
args= self._options(self.cnf, kw)
if itemtype is not None:
args= ('-itemtype', itemtype) + args
self.tk.call(self, 'set', x, y, *args)
def size_column(self, index, **kw):
"""Queries or sets the size of the column given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given column.
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all columns.
When no option-value pair is given, this command returns a tuple
containing the current size setting of the given column. When
option-value pairs are given, the corresponding options of the
size setting of the given column are changed. Options may be one
of the follwing:
pad0 pixels
Specifies the paddings to the left of a column.
pad1 pixels
Specifies the paddings to the right of a column.
size val
Specifies the width of a column .
Val may be: "auto" -- the width of the column is set the
the widest cell in the column; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the width of the column to the
given number of characters."""
return self.tk.split(self.tk.call(self._w, 'size', 'column', index,
*self._options({}, kw)))
def size_row(self, index, **kw):
"""Queries or sets the size of the row given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given row .
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all rows.
When no option-value pair is given, this command returns a list con-
taining the current size setting of the given row . When option-value
pairs are given, the corresponding options of the size setting of the
given row are changed. Options may be one of the follwing:
pad0 pixels
Specifies the paddings to the top of a row.
pad1 pixels
Specifies the paddings to the the bottom of a row.
size val
Specifies the height of a row.
Val may be: "auto" -- the height of the row is set the
the highest cell in the row; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the height of the row to the
given number of characters."""
return self.tk.split(self.tk.call(
self, 'size', 'row', index, *self._options({}, kw)))
def unset(self, x, y):
"""Clears the cell at (x, y) by removing its display item."""
self.tk.call(self._w, 'unset', x, y)
class ScrolledGrid(Grid):
'''Scrolled Grid widgets'''
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw)
| mit |
VitalPet/account-financial-reporting | account_financial_report/report/__init__.py | 38 | 1484 | # -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
# Credits######################################################
# Coded by: Humberto Arocha [email protected]
# Angelica Barrios [email protected]
# Jordi Esteve <[email protected]>
# Planified by: Humberto Arocha
# Finance by: LUBCAN COL S.A.S http://www.lubcancol.com
# Audited by: Humberto Arocha [email protected]
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from . import parser
| agpl-3.0 |
srcLurker/home-assistant | homeassistant/components/qwikswitch.py | 9 | 6190 | """
Support for Qwikswitch devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/qwikswitch/
"""
import logging
import voluptuous as vol
from homeassistant.const import (EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP)
from homeassistant.helpers.discovery import load_platform
from homeassistant.components.light import (ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS, Light)
from homeassistant.components.switch import SwitchDevice
DOMAIN = 'qwikswitch'
REQUIREMENTS = ['https://github.com/kellerza/pyqwikswitch/archive/v0.4.zip'
'#pyqwikswitch==0.4']
_LOGGER = logging.getLogger(__name__)
CV_DIM_VALUE = vol.All(vol.Coerce(float), vol.Range(min=1, max=3))
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required('url', default='http://127.0.0.1:2020'): vol.Coerce(str),
vol.Optional('dimmer_adjust', default=1): CV_DIM_VALUE,
vol.Optional('button_events'): vol.Coerce(str)
})}, extra=vol.ALLOW_EXTRA)
QSUSB = {}
SUPPORT_QWIKSWITCH = SUPPORT_BRIGHTNESS
class QSToggleEntity(object):
"""Representation of a Qwikswitch Entity.
Implement base QS methods. Modeled around HA ToggleEntity[1] & should only
be used in a class that extends both QSToggleEntity *and* ToggleEntity.
Implemented:
- QSLight extends QSToggleEntity and Light[2] (ToggleEntity[1])
- QSSwitch extends QSToggleEntity and SwitchDevice[3] (ToggleEntity[1])
[1] /helpers/entity.py
[2] /components/light/__init__.py
[3] /components/switch/__init__.py
"""
def __init__(self, qsitem, qsusb):
"""Initialize the ToggleEntity."""
from pyqwikswitch import (QS_ID, QS_NAME, QSType, PQS_VALUE, PQS_TYPE)
self._id = qsitem[QS_ID]
self._name = qsitem[QS_NAME]
self._value = qsitem[PQS_VALUE]
self._qsusb = qsusb
self._dim = qsitem[PQS_TYPE] == QSType.dimmer
QSUSB[self._id] = self
@property
def brightness(self):
"""Return the brightness of this light between 0..100."""
return self._value if self._dim else None
# pylint: disable=no-self-use
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the light."""
return self._name
@property
def is_on(self):
"""Check if device is on (non-zero)."""
return self._value > 0
def update_value(self, value):
"""Decode the QSUSB value and update the Home assistant state."""
if value != self._value:
self._value = value
# pylint: disable=no-member
super().update_ha_state() # Part of Entity/ToggleEntity
return self._value
def turn_on(self, **kwargs):
"""Turn the device on."""
newvalue = 255
if ATTR_BRIGHTNESS in kwargs:
newvalue = kwargs[ATTR_BRIGHTNESS]
if self._qsusb.set(self._id, round(min(newvalue, 255)/2.55)) >= 0:
self.update_value(newvalue)
# pylint: disable=unused-argument
def turn_off(self, **kwargs):
"""Turn the device off."""
if self._qsusb.set(self._id, 0) >= 0:
self.update_value(0)
class QSSwitch(QSToggleEntity, SwitchDevice):
"""Switch based on a Qwikswitch relay module."""
pass
class QSLight(QSToggleEntity, Light):
"""Light based on a Qwikswitch relay/dimmer module."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_QWIKSWITCH
def setup(hass, config):
"""Setup the QSUSB component."""
from pyqwikswitch import (QSUsb, CMD_BUTTONS, QS_NAME, QS_ID, QS_CMD,
PQS_VALUE, PQS_TYPE, QSType)
# Override which cmd's in /&listen packets will fire events
# By default only buttons of type [TOGGLE,SCENE EXE,LEVEL]
cmd_buttons = config[DOMAIN].get('button_events', ','.join(CMD_BUTTONS))
cmd_buttons = cmd_buttons.split(',')
url = config[DOMAIN]['url']
dimmer_adjust = config[DOMAIN]['dimmer_adjust']
qsusb = QSUsb(url, _LOGGER, dimmer_adjust)
def _stop(event):
"""Stop the listener queue and clean up."""
nonlocal qsusb
qsusb.stop()
qsusb = None
global QSUSB
QSUSB = {}
_LOGGER.info("Waiting for long poll to QSUSB to time out")
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, _stop)
# Discover all devices in QSUSB
devices = qsusb.devices()
QSUSB['switch'] = []
QSUSB['light'] = []
for item in devices:
if item[PQS_TYPE] == QSType.relay and (item[QS_NAME].lower()
.endswith(' switch')):
item[QS_NAME] = item[QS_NAME][:-7] # Remove ' switch' postfix
QSUSB['switch'].append(QSSwitch(item, qsusb))
elif item[PQS_TYPE] in [QSType.relay, QSType.dimmer]:
QSUSB['light'].append(QSLight(item, qsusb))
else:
_LOGGER.warning("Ignored unknown QSUSB device: %s", item)
# Load platforms
for comp_name in ('switch', 'light'):
if len(QSUSB[comp_name]) > 0:
load_platform(hass, comp_name, 'qwikswitch', {}, config)
def qs_callback(item):
"""Typically a button press or update signal."""
if qsusb is None: # Shutting down
_LOGGER.info("Done")
return
# If button pressed, fire a hass event
if item.get(QS_CMD, '') in cmd_buttons:
hass.bus.fire('qwikswitch.button.' + item.get(QS_ID, '@no_id'))
return
# Update all ha_objects
qsreply = qsusb.devices()
if qsreply is False:
return
for item in qsreply:
if item[QS_ID] in QSUSB:
QSUSB[item[QS_ID]].update_value(
round(min(item[PQS_VALUE], 100) * 2.55))
def _start(event):
"""Start listening."""
qsusb.listen(callback=qs_callback, timeout=30)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start)
return True
| mit |
ghedsouza/django | tests/template_tests/syntax_tests/test_filter_syntax.py | 29 | 9422 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import SomeClass, SomeOtherException, UTF8Class, setup
class FilterSyntaxTests(SimpleTestCase):
@setup({'filter-syntax01': '{{ var|upper }}'})
def test_filter_syntax01(self):
"""
Basic filter usage
"""
output = self.engine.render_to_string('filter-syntax01', {"var": "Django is the greatest!"})
self.assertEqual(output, "DJANGO IS THE GREATEST!")
@setup({'filter-syntax02': '{{ var|upper|lower }}'})
def test_filter_syntax02(self):
"""
Chained filters
"""
output = self.engine.render_to_string('filter-syntax02', {"var": "Django is the greatest!"})
self.assertEqual(output, "django is the greatest!")
@setup({'filter-syntax03': '{{ var |upper }}'})
def test_filter_syntax03(self):
"""
Allow spaces before the filter pipe
"""
output = self.engine.render_to_string('filter-syntax03', {'var': 'Django is the greatest!'})
self.assertEqual(output, 'DJANGO IS THE GREATEST!')
@setup({'filter-syntax04': '{{ var| upper }}'})
def test_filter_syntax04(self):
"""
Allow spaces after the filter pipe
"""
output = self.engine.render_to_string('filter-syntax04', {'var': 'Django is the greatest!'})
self.assertEqual(output, 'DJANGO IS THE GREATEST!')
@setup({'filter-syntax05': '{{ var|does_not_exist }}'})
def test_filter_syntax05(self):
"""
Raise TemplateSyntaxError for a nonexistent filter
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter-syntax05')
@setup({'filter-syntax06': '{{ var|fil(ter) }}'})
def test_filter_syntax06(self):
"""
Raise TemplateSyntaxError when trying to access a filter containing
an illegal character
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter-syntax06')
@setup({'filter-syntax07': "{% nothing_to_see_here %}"})
def test_filter_syntax07(self):
"""
Raise TemplateSyntaxError for invalid block tags
"""
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter-syntax07')
@setup({'filter-syntax08': "{% %}"})
def test_filter_syntax08(self):
"""
Raise TemplateSyntaxError for empty block tags
"""
with self.assertRaisesMessage(TemplateSyntaxError, 'Empty block tag on line 1'):
self.engine.get_template('filter-syntax08')
@setup({'filter-syntax08-multi-line': "line 1\nline 2\nline 3{% %}\nline 4\nline 5"})
def test_filter_syntax08_multi_line(self):
"""
Raise TemplateSyntaxError for empty block tags in templates with
multiple lines.
"""
with self.assertRaisesMessage(TemplateSyntaxError, 'Empty block tag on line 3'):
self.engine.get_template('filter-syntax08-multi-line')
@setup({'filter-syntax09': '{{ var|cut:"o"|upper|lower }}'})
def test_filter_syntax09(self):
"""
Chained filters, with an argument to the first one
"""
output = self.engine.render_to_string('filter-syntax09', {'var': 'Foo'})
self.assertEqual(output, 'f')
@setup({'filter-syntax10': r'{{ var|default_if_none:" endquote\" hah" }}'})
def test_filter_syntax10(self):
"""
Literal string as argument is always "safe" from auto-escaping.
"""
output = self.engine.render_to_string('filter-syntax10', {"var": None})
self.assertEqual(output, ' endquote" hah')
@setup({'filter-syntax11': r'{{ var|default_if_none:var2 }}'})
def test_filter_syntax11(self):
"""
Variable as argument
"""
output = self.engine.render_to_string('filter-syntax11', {"var": None, "var2": "happy"})
self.assertEqual(output, 'happy')
@setup({'filter-syntax12': r'{{ var|yesno:"yup,nup,mup" }} {{ var|yesno }}'})
def test_filter_syntax12(self):
"""
Default argument testing
"""
output = self.engine.render_to_string('filter-syntax12', {"var": True})
self.assertEqual(output, 'yup yes')
@setup({'filter-syntax13': r'1{{ var.method3 }}2'})
def test_filter_syntax13(self):
"""
Fail silently for methods that raise an exception with a
`silent_variable_failure` attribute
"""
output = self.engine.render_to_string('filter-syntax13', {"var": SomeClass()})
if self.engine.string_if_invalid:
self.assertEqual(output, "1INVALID2")
else:
self.assertEqual(output, "12")
@setup({'filter-syntax14': r'1{{ var.method4 }}2'})
def test_filter_syntax14(self):
"""
In methods that raise an exception without a
`silent_variable_attribute` set to True, the exception propagates
"""
with self.assertRaises(SomeOtherException):
self.engine.render_to_string('filter-syntax14', {"var": SomeClass()})
@setup({'filter-syntax15': r'{{ var|default_if_none:"foo\bar" }}'})
def test_filter_syntax15(self):
"""
Escaped backslash in argument
"""
output = self.engine.render_to_string('filter-syntax15', {"var": None})
self.assertEqual(output, r'foo\bar')
@setup({'filter-syntax16': r'{{ var|default_if_none:"foo\now" }}'})
def test_filter_syntax16(self):
"""
Escaped backslash using known escape char
"""
output = self.engine.render_to_string('filter-syntax16', {"var": None})
self.assertEqual(output, r'foo\now')
@setup({'filter-syntax17': r'{{ var|join:"" }}'})
def test_filter_syntax17(self):
"""
Empty strings can be passed as arguments to filters
"""
output = self.engine.render_to_string('filter-syntax17', {'var': ['a', 'b', 'c']})
self.assertEqual(output, 'abc')
@setup({'filter-syntax18': r'{{ var }}'})
def test_filter_syntax18(self):
"""
Strings are converted to bytestrings in the final output.
"""
output = self.engine.render_to_string('filter-syntax18', {'var': UTF8Class()})
self.assertEqual(output, '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
@setup({'filter-syntax19': '{{ var|truncatewords:1 }}'})
def test_filter_syntax19(self):
"""
Numbers as filter arguments should work
"""
output = self.engine.render_to_string('filter-syntax19', {"var": "hello world"})
self.assertEqual(output, "hello ...")
@setup({'filter-syntax20': '{{ ""|default_if_none:"was none" }}'})
def test_filter_syntax20(self):
"""
Filters should accept empty string constants
"""
output = self.engine.render_to_string('filter-syntax20')
self.assertEqual(output, "")
@setup({'filter-syntax21': r'1{{ var.silent_fail_key }}2'})
def test_filter_syntax21(self):
"""
Fail silently for non-callable attribute and dict lookups which
raise an exception with a "silent_variable_failure" attribute
"""
output = self.engine.render_to_string('filter-syntax21', {"var": SomeClass()})
if self.engine.string_if_invalid:
self.assertEqual(output, "1INVALID2")
else:
self.assertEqual(output, "12")
@setup({'filter-syntax22': r'1{{ var.silent_fail_attribute }}2'})
def test_filter_syntax22(self):
"""
Fail silently for non-callable attribute and dict lookups which
raise an exception with a `silent_variable_failure` attribute
"""
output = self.engine.render_to_string('filter-syntax22', {"var": SomeClass()})
if self.engine.string_if_invalid:
self.assertEqual(output, "1INVALID2")
else:
self.assertEqual(output, "12")
@setup({'filter-syntax23': r'1{{ var.noisy_fail_key }}2'})
def test_filter_syntax23(self):
"""
In attribute and dict lookups that raise an unexpected exception
without a `silent_variable_attribute` set to True, the exception
propagates
"""
with self.assertRaises(SomeOtherException):
self.engine.render_to_string('filter-syntax23', {"var": SomeClass()})
@setup({'filter-syntax24': r'1{{ var.noisy_fail_attribute }}2'})
def test_filter_syntax24(self):
"""
In attribute and dict lookups that raise an unexpected exception
without a `silent_variable_attribute` set to True, the exception
propagates
"""
with self.assertRaises(SomeOtherException):
self.engine.render_to_string('filter-syntax24', {"var": SomeClass()})
@setup({'filter-syntax25': '{{ var.attribute_error_attribute }}'})
def test_filter_syntax25(self):
"""
#16383 - Attribute errors from an @property value should be
reraised.
"""
with self.assertRaises(AttributeError):
self.engine.render_to_string('filter-syntax25', {'var': SomeClass()})
@setup({'template': '{{ var.type_error_attribute }}'})
def test_type_error_attribute(self):
with self.assertRaises(TypeError):
self.engine.render_to_string('template', {'var': SomeClass()})
| bsd-3-clause |
tracierenea/gnuradio | gr-vocoder/python/vocoder/qa_g721_vocoder.py | 57 | 1573 | #!/usr/bin/env python
#
# Copyright 2011,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, vocoder, blocks
class test_g721_vocoder (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block()
def tearDown (self):
self.tb = None
def test001_module_load (self):
data = (8,24,36,52,56,64,76,88,104,124,132,148,172,
196,220,244,280,320,372,416,468,524,580,648)
src = blocks.vector_source_s(data)
enc = vocoder.g721_encode_sb()
dec = vocoder.g721_decode_bs()
snk = blocks.vector_sink_s()
self.tb.connect(src, enc, dec, snk)
self.tb.run()
actual_result = snk.data()
self.assertEqual(data, actual_result)
if __name__ == '__main__':
gr_unittest.run(test_g721_vocoder, "test_g721_vocoder.xml")
| gpl-3.0 |
phalax4/CarnotKE | jyhton/lib-python/2.7/encodings/iso8859_16.py | 593 | 13813 | """ Python Character Mapping Codec iso8859_16 generated from 'MAPPINGS/ISO8859/8859-16.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-16',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u0105' # 0xA2 -> LATIN SMALL LETTER A WITH OGONEK
u'\u0141' # 0xA3 -> LATIN CAPITAL LETTER L WITH STROKE
u'\u20ac' # 0xA4 -> EURO SIGN
u'\u201e' # 0xA5 -> DOUBLE LOW-9 QUOTATION MARK
u'\u0160' # 0xA6 -> LATIN CAPITAL LETTER S WITH CARON
u'\xa7' # 0xA7 -> SECTION SIGN
u'\u0161' # 0xA8 -> LATIN SMALL LETTER S WITH CARON
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0218' # 0xAA -> LATIN CAPITAL LETTER S WITH COMMA BELOW
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u0179' # 0xAC -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u017a' # 0xAE -> LATIN SMALL LETTER Z WITH ACUTE
u'\u017b' # 0xAF -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u010c' # 0xB2 -> LATIN CAPITAL LETTER C WITH CARON
u'\u0142' # 0xB3 -> LATIN SMALL LETTER L WITH STROKE
u'\u017d' # 0xB4 -> LATIN CAPITAL LETTER Z WITH CARON
u'\u201d' # 0xB5 -> RIGHT DOUBLE QUOTATION MARK
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u017e' # 0xB8 -> LATIN SMALL LETTER Z WITH CARON
u'\u010d' # 0xB9 -> LATIN SMALL LETTER C WITH CARON
u'\u0219' # 0xBA -> LATIN SMALL LETTER S WITH COMMA BELOW
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u0152' # 0xBC -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xBD -> LATIN SMALL LIGATURE OE
u'\u0178' # 0xBE -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u017c' # 0xBF -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\u0106' # 0xC5 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0150' # 0xD5 -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\u015a' # 0xD7 -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u0170' # 0xD8 -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0118' # 0xDD -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u021a' # 0xDE -> LATIN CAPITAL LETTER T WITH COMMA BELOW
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u0107' # 0xE5 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u0151' # 0xF5 -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\u015b' # 0xF7 -> LATIN SMALL LETTER S WITH ACUTE
u'\u0171' # 0xF8 -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u0119' # 0xFD -> LATIN SMALL LETTER E WITH OGONEK
u'\u021b' # 0xFE -> LATIN SMALL LETTER T WITH COMMA BELOW
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
supermeng/console | apis/utils.py | 1 | 7361 | # -*- coding: utf-8
import requests
import tarfile
import yaml
import json
import subprocess
import re
from retrying import retry
from time import gmtime, strftime
import docker
from docker.types import IPAMConfig, IPAMPool
from cStringIO import StringIO
from commons.miscs import NoAvailableImages
import commons.utils
from commons.settings import (PRIVATE_REGISTRY, DOCKER_BASE_URL, DEBUG,
ETCD_AUTHORITY, CALICO_NETWORK,
SYSTEM_VOLUMES, CALICO_RULE_KEY,
DOMAIN, EXTRA_DOMAINS)
from log import logger
from .calico import (calico_profile_rule_add)
import pycalico.datastore_datatypes
from django.utils.dateparse import parse_datetime
from pytz import timezone
def read_from_etcd(key):
return commons.utils.read_from_etcd(key, ETCD_AUTHORITY)
def set_value_to_etcd(key, value):
return commons.utils.set_value_to_etcd(key, value, ETCD_AUTHORITY)
def delete_from_etcd(key, recursive=False, dir=False):
return commons.utils.delete_from_etcd(key, ETCD_AUTHORITY, recursive=recursive, dir=dir)
def get_domains():
return [DOMAIN] + EXTRA_DOMAINS
VALID_TAG_PATERN = re.compile(r"^(meta)-(?P<meta_version>\S+-\S+)$")
def get_meta_version_from_tag(tag):
if tag is None:
return None
x = VALID_TAG_PATERN.match(tag)
if x:
return x.group('meta_version')
else:
return None
def calico_rule(rule):
c_rule = pycalico.datastore_datatypes.Rule()
for k, v in rule.iteritems():
c_rule[k] = v
return c_rule
def get_calico_default_rules():
inbound_rules, outbound_rules = [], []
try:
etcd_result = read_from_etcd(CALICO_RULE_KEY)
calico_rules = json.loads(
etcd_result.value) # pylint: disable=no-member
for rule in calico_rules["outbound_rules"]:
outbound_rules.append(calico_rule(rule))
for rule in calico_rules["inbound_rules"]:
inbound_rules.append(calico_rule(rule))
except Exception, e:
logger.error("error parsing calico default rule : %s" % str(e))
return [], []
return outbound_rules, inbound_rules
def add_calico_profile_for_app(calico_profile):
if not docker_network_exists(calico_profile):
logger.info("ready creating docker network for profile %s" %
calico_profile)
docker_network_add(calico_profile)
outbound_rules, inbound_rules = get_calico_default_rules()
for rule in reversed(outbound_rules):
calico_profile_rule_add(calico_profile, "outbound_rules", rule)
for rule in reversed(inbound_rules):
calico_profile_rule_add(calico_profile, "inbound_rules", rule)
return True
return False
def get_docker_client(docker_base_url):
return docker.DockerClient(base_url=docker_base_url).api
def normalize_meta_version(meta_version):
return meta_version.replace("meta-", "").replace("build-", "").replace("release-", "")
def gen_image_name(app, meta_version, phase='meta', registry=None):
if not registry:
registry = PRIVATE_REGISTRY
return "%s/%s:%s-%s" % (registry, app, phase, meta_version)
def _is_registry_auth_open(registry=None):
if not registry:
registry = PRIVATE_REGISTRY
url = "http://%s/v2/" % registry
r = requests.get(url)
if r.status_code == 401:
return True
else:
return False
def _get_registry_access_header(app, registry):
if _is_registry_auth_open(registry):
from authorize.models import Authorize
jwt = Authorize.get_jwt_with_appname(app)
header = {'Authorization': 'Bearer %s' % jwt}
else:
header = {}
return header
def search_images_from_registry(app, registry=None):
if not registry:
registry = PRIVATE_REGISTRY
url = "http://%s/v2/%s/tags/list" % (registry, app)
header = _get_registry_access_header(app, registry)
r = requests.get(url, headers=header)
if r.status_code != 200:
raise NoAvailableImages("no images here: %s" % url)
else:
return r.json()
def get_meta_from_registry(app, meta_version, registry=None):
logger.debug("ready get meta version %s for app %s from registry" %
(meta_version, app))
meta_version = normalize_meta_version(meta_version)
if not registry:
registry = PRIVATE_REGISTRY
try:
y = None
c = None
cli = None
cli = get_docker_client(DOCKER_BASE_URL)
# TODO check if the image already exits
cli.pull(
repository="%s/%s" % (registry, app),
tag="meta-%s" % (meta_version, ),
insecure_registry=True
)
image = "%s/%s:meta-%s" % (registry, app, meta_version)
command = '/bin/sleep 0.1'
c = cli.create_container(image=image, command=command)
r = cli.get_archive(container=c.get('Id'), path='/lain.yaml')
tar = tarfile.open(fileobj=StringIO(r[0].data))
f = tar.extractfile('lain.yaml')
y = yaml.safe_load(f.read())
except Exception, e:
logger.error("fail get yaml from %s %s: %s" % (app, meta_version, e))
raise Exception("fail get yaml from %s %s: %s" %
(app, meta_version, e))
finally:
if cli and isinstance(c, dict) and c.get('Id'):
cli.remove_container(container=c.get('Id'), v=True)
return y
def shell(cmd):
retcode = 0
output = None
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, shell=True)
except:
retcode = 1
finally:
return (retcode, output)
class CalicoException(Exception):
pass
def docker_network_exists(name):
cli = get_docker_client(DOCKER_BASE_URL)
try:
cli.inspect_network(name)
except docker.errors.APIError as e:
# Forward compatibility for some exceptions raise.
# Fixed bug for docker 2.1.0 e.status_code (ugly)
if e.response is not None and e.response.status_code == 404:
return False
raise e
return True
def docker_network_add(name):
cli = get_docker_client(DOCKER_BASE_URL)
ipam_pool = IPAMPool(subnet=CALICO_NETWORK)
ipam_config = IPAMConfig(driver="calico-ipam", pool_configs=[ipam_pool])
result = cli.create_network(name, driver="calico", ipam=ipam_config)
logger.info("create docker network for app %s : %s" % (name, result))
def docker_network_remove(name):
cli = get_docker_client(DOCKER_BASE_URL)
cli.remove_network(name)
def get_system_volumes_from_etcd(appname):
return SYSTEM_VOLUMES.get(appname, [])
def get_current_time():
return strftime("%Y-%m-%d %H:%M:%S", gmtime())
def orc_convert_time_from_deployd(d_time):
c_times = d_time.split("T")
if len(c_times) <= 1:
return d_time
else:
return "%s %s" % (c_times[0], c_times[1].split('.')[0])
def convert_time_from_deployd(d_time):
t_time = parse_datetime(d_time)
tzchina = timezone('Asia/Shanghai')
utc = timezone('UTC')
t_time = t_time.replace(tzinfo=utc).astimezone(tzchina)
try:
return t_time.strftime('%Y-%m-%d %H:%M:%S')
except Exception as e:
logger.error("strftime error:%s d_time:%s", str(e), d_time)
return orc_convert_time_from_deployd(d_time)
| mit |
fivejjs/GPy | GPy/testing/likelihood_tests.py | 4 | 30412 | # Copyright (c) 2014, Alan Saul
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import unittest
import GPy
from GPy.models import GradientChecker
import functools
import inspect
from GPy.likelihoods import link_functions
from GPy.core.parameterization import Param
from functools import partial
#np.random.seed(300)
#np.random.seed(7)
#np.seterr(divide='raise')
def dparam_partial(inst_func, *args):
"""
If we have a instance method that needs to be called but that doesn't
take the parameter we wish to change to checkgrad, then this function
will change the variable using set params.
inst_func: should be a instance function of an object that we would like
to change
param: the param that will be given to set_params
args: anything else that needs to be given to the function (for example
the f or Y that are being used in the function whilst we tweak the
param
"""
def param_func(param_val, param_name, inst_func, args):
#inst_func.im_self._set_params(param)
#inst_func.im_self.add_parameter(Param(param_name, param_val))
inst_func.im_self[param_name] = param_val
return inst_func(*args)
return functools.partial(param_func, inst_func=inst_func, args=args)
def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None, randomize=False, verbose=False):
"""
checkgrad expects a f: R^N -> R^1 and df: R^N -> R^N
However if we are holding other parameters fixed and moving something else
We need to check the gradient of each of the fixed parameters
(f and y for example) seperately, whilst moving another parameter.
Otherwise f: gives back R^N and
df: gives back R^NxM where M is
The number of parameters and N is the number of data
Need to take a slice out from f and a slice out of df
"""
print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
func.__name__, dfunc.__name__)
partial_f = dparam_partial(func, *args)
partial_df = dparam_partial(dfunc, *args)
gradchecking = True
zipped_params = zip(params, params_names)
for param_ind, (param_val, param_name) in enumerate(zipped_params):
#Check one parameter at a time, make sure it is 2d (as some gradients only return arrays) then strip out the parameter
fnum = np.atleast_2d(partial_f(param_val, param_name))[:, param_ind].shape[0]
dfnum = np.atleast_2d(partial_df(param_val, param_name))[:, param_ind].shape[0]
for fixed_val in range(dfnum):
#dlik and dlik_dvar gives back 1 value for each
f_ind = min(fnum, fixed_val+1) - 1
print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
#Make grad checker with this param moving, note that set_params is NOT being called
#The parameter is being set directly with __setattr__
#Check only the parameter and function value we wish to check at a time
grad = GradientChecker(lambda p_val: np.atleast_2d(partial_f(p_val, param_name))[f_ind, param_ind],
lambda p_val: np.atleast_2d(partial_df(p_val, param_name))[fixed_val, param_ind],
param_val, [param_name])
if constraints is not None:
for constrain_param, constraint in constraints:
if grad.grep_param_names(constrain_param):
constraint(constrain_param, grad)
else:
print "parameter didn't exist"
print constrain_param, " ", constraint
if randomize:
grad.randomize()
if verbose:
print grad
grad.checkgrad(verbose=1)
if not grad.checkgrad(verbose=True):
gradchecking = False
return gradchecking
from nose.tools import with_setup
class TestNoiseModels(object):
"""
Generic model checker
"""
def setUp(self):
self.N = 15
self.D = 3
self.X = np.random.rand(self.N, self.D)*10
self.real_std = 0.1
noise = np.random.randn(*self.X[:, 0].shape)*self.real_std
self.Y = (np.sin(self.X[:, 0]*2*np.pi) + noise)[:, None]
self.f = np.random.rand(self.N, 1)
self.binary_Y = np.asarray(np.random.rand(self.N) > 0.5, dtype=np.int)[:, None]
self.positive_Y = np.exp(self.Y.copy())
tmp = np.round(self.X[:, 0]*3-3)[:, None] + np.random.randint(0,3, self.X.shape[0])[:, None]
self.integer_Y = np.where(tmp > 0, tmp, 0)
self.var = 0.2
self.var = np.random.rand(1)
#Make a bigger step as lower bound can be quite curved
self.step = 1e-4
def tearDown(self):
self.Y = None
self.f = None
self.X = None
def test_scale2_models(self):
self.setUp()
####################################################
# Constraint wrappers so we can just list them off #
####################################################
def constrain_fixed(regex, model):
model[regex].constrain_fixed()
def constrain_negative(regex, model):
model[regex].constrain_negative()
def constrain_positive(regex, model):
model[regex].constrain_positive()
def constrain_bounded(regex, model, lower, upper):
"""
Used like: partial(constrain_bounded, lower=0, upper=1)
"""
model[regex].constrain_bounded(lower, upper)
"""
Dictionary where we nest models we would like to check
Name: {
"model": model_instance,
"grad_params": {
"names": [names_of_params_we_want, to_grad_check],
"vals": [values_of_params, to_start_at],
"constrain": [constraint_wrappers, listed_here]
},
"laplace": boolean_of_whether_model_should_work_for_laplace,
"ep": boolean_of_whether_model_should_work_for_laplace,
"link_f_constraints": [constraint_wrappers, listed_here]
}
"""
noise_models = {"Student_t_default": {
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [self.var],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
#"constraints": [("t_scale2", constrain_positive), ("deg_free", partial(constrain_fixed, value=5))]
},
"laplace": True
},
"Student_t_1_var": {
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [1.0],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_small_deg_free": {
"model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [self.var],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_small_var": {
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [0.001],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_large_var": {
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [10.0],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_approx_gauss": {
"model": GPy.likelihoods.StudentT(deg_free=1000, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [self.var],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_log": {
"model": GPy.likelihoods.StudentT(gp_link=link_functions.Log(), deg_free=5, sigma2=self.var),
"grad_params": {
"names": [".*t_scale2"],
"vals": [self.var],
"constraints": [(".*t_scale2", constrain_positive), (".*deg_free", constrain_fixed)]
},
"laplace": True
},
"Gaussian_default": {
"model": GPy.likelihoods.Gaussian(variance=self.var),
"grad_params": {
"names": [".*variance"],
"vals": [self.var],
"constraints": [(".*variance", constrain_positive)]
},
"laplace": True,
"ep": False # FIXME: Should be True when we have it working again
},
#"Gaussian_log": {
#"model": GPy.likelihoods.gaussian(gp_link=link_functions.Log(), variance=self.var, D=self.D, N=self.N),
#"grad_params": {
#"names": ["noise_model_variance"],
#"vals": [self.var],
#"constraints": [constrain_positive]
#},
#"laplace": True
#},
#"Gaussian_probit": {
#"model": GPy.likelihoods.gaussian(gp_link=link_functions.Probit(), variance=self.var, D=self.D, N=self.N),
#"grad_params": {
#"names": ["noise_model_variance"],
#"vals": [self.var],
#"constraints": [constrain_positive]
#},
#"laplace": True
#},
#"Gaussian_log_ex": {
#"model": GPy.likelihoods.gaussian(gp_link=link_functions.Log_ex_1(), variance=self.var, D=self.D, N=self.N),
#"grad_params": {
#"names": ["noise_model_variance"],
#"vals": [self.var],
#"constraints": [constrain_positive]
#},
#"laplace": True
#},
"Bernoulli_default": {
"model": GPy.likelihoods.Bernoulli(),
"link_f_constraints": [partial(constrain_bounded, lower=0, upper=1)],
"laplace": True,
"Y": self.binary_Y,
"ep": False # FIXME: Should be True when we have it working again
},
"Exponential_default": {
"model": GPy.likelihoods.Exponential(),
"link_f_constraints": [constrain_positive],
"Y": self.positive_Y,
"laplace": True,
},
"Poisson_default": {
"model": GPy.likelihoods.Poisson(),
"link_f_constraints": [constrain_positive],
"Y": self.integer_Y,
"laplace": True,
"ep": False #Should work though...
}#,
#GAMMA needs some work!"Gamma_default": {
#"model": GPy.likelihoods.Gamma(),
#"link_f_constraints": [constrain_positive],
#"Y": self.positive_Y,
#"laplace": True
#}
}
for name, attributes in noise_models.iteritems():
model = attributes["model"]
if "grad_params" in attributes:
params = attributes["grad_params"]
param_vals = params["vals"]
param_names= params["names"]
param_constraints = params["constraints"]
else:
params = []
param_vals = []
param_names = []
constrain_positive = []
param_constraints = [] # ??? TODO: Saul to Fix.
if "link_f_constraints" in attributes:
link_f_constraints = attributes["link_f_constraints"]
else:
link_f_constraints = []
if "Y" in attributes:
Y = attributes["Y"].copy()
else:
Y = self.Y.copy()
if "f" in attributes:
f = attributes["f"].copy()
else:
f = self.f.copy()
if "laplace" in attributes:
laplace = attributes["laplace"]
else:
laplace = False
if "ep" in attributes:
ep = attributes["ep"]
else:
ep = False
#if len(param_vals) > 1:
#raise NotImplementedError("Cannot support multiple params in likelihood yet!")
#Required by all
#Normal derivatives
yield self.t_logpdf, model, Y, f
yield self.t_dlogpdf_df, model, Y, f
yield self.t_d2logpdf_df2, model, Y, f
#Link derivatives
yield self.t_dlogpdf_dlink, model, Y, f, link_f_constraints
yield self.t_d2logpdf_dlink2, model, Y, f, link_f_constraints
if laplace:
#Laplace only derivatives
yield self.t_d3logpdf_df3, model, Y, f
yield self.t_d3logpdf_dlink3, model, Y, f, link_f_constraints
#Params
yield self.t_dlogpdf_dparams, model, Y, f, param_vals, param_names, param_constraints
yield self.t_dlogpdf_df_dparams, model, Y, f, param_vals, param_names, param_constraints
yield self.t_d2logpdf2_df2_dparams, model, Y, f, param_vals, param_names, param_constraints
#Link params
yield self.t_dlogpdf_link_dparams, model, Y, f, param_vals, param_names, param_constraints
yield self.t_dlogpdf_dlink_dparams, model, Y, f, param_vals, param_names, param_constraints
yield self.t_d2logpdf2_dlink2_dparams, model, Y, f, param_vals, param_names, param_constraints
#laplace likelihood gradcheck
yield self.t_laplace_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints
if ep:
#ep likelihood gradcheck
yield self.t_ep_fit_rbf_white, model, self.X, Y, f, self.step, param_vals, param_names, param_constraints
self.tearDown()
#############
# dpdf_df's #
#############
@with_setup(setUp, tearDown)
def t_logpdf(self, model, Y, f):
print "\n{}".format(inspect.stack()[0][3])
print model
#print model._get_params()
np.testing.assert_almost_equal(
model.pdf(f.copy(), Y.copy()).prod(),
np.exp(model.logpdf(f.copy(), Y.copy()).sum())
)
@with_setup(setUp, tearDown)
def t_dlogpdf_df(self, model, Y, f):
print "\n{}".format(inspect.stack()[0][3])
self.description = "\n{}".format(inspect.stack()[0][3])
logpdf = functools.partial(model.logpdf, y=Y)
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
grad.randomize()
print model
assert grad.checkgrad(verbose=1)
@with_setup(setUp, tearDown)
def t_d2logpdf_df2(self, model, Y, f):
print "\n{}".format(inspect.stack()[0][3])
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g')
grad.randomize()
print model
assert grad.checkgrad(verbose=1)
@with_setup(setUp, tearDown)
def t_d3logpdf_df3(self, model, Y, f):
print "\n{}".format(inspect.stack()[0][3])
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
grad.randomize()
print model
assert grad.checkgrad(verbose=1)
##############
# df_dparams #
##############
@with_setup(setUp, tearDown)
def t_dlogpdf_dparams(self, model, Y, f, params, params_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.logpdf, model.dlogpdf_dtheta,
params, params_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
@with_setup(setUp, tearDown)
def t_dlogpdf_df_dparams(self, model, Y, f, params, params_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.dlogpdf_df, model.dlogpdf_df_dtheta,
params, params_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
@with_setup(setUp, tearDown)
def t_d2logpdf2_df2_dparams(self, model, Y, f, params, params_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.d2logpdf_df2, model.d2logpdf_df2_dtheta,
params, params_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
################
# dpdf_dlink's #
################
@with_setup(setUp, tearDown)
def t_dlogpdf_dlink(self, model, Y, f, link_f_constraints):
print "\n{}".format(inspect.stack()[0][3])
logpdf = functools.partial(model.logpdf_link, y=Y)
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g')
#Apply constraints to link_f values
for constraint in link_f_constraints:
constraint('g', grad)
grad.randomize()
print grad
print model
assert grad.checkgrad(verbose=1)
@with_setup(setUp, tearDown)
def t_d2logpdf_dlink2(self, model, Y, f, link_f_constraints):
print "\n{}".format(inspect.stack()[0][3])
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g')
#Apply constraints to link_f values
for constraint in link_f_constraints:
constraint('g', grad)
grad.randomize()
print grad
print model
assert grad.checkgrad(verbose=1)
@with_setup(setUp, tearDown)
def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints):
print "\n{}".format(inspect.stack()[0][3])
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y)
grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')
#Apply constraints to link_f values
for constraint in link_f_constraints:
constraint('g', grad)
grad.randomize()
print grad
print model
assert grad.checkgrad(verbose=1)
#################
# dlink_dparams #
#################
@with_setup(setUp, tearDown)
def t_dlogpdf_link_dparams(self, model, Y, f, params, param_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.logpdf_link, model.dlogpdf_link_dtheta,
params, param_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
@with_setup(setUp, tearDown)
def t_dlogpdf_dlink_dparams(self, model, Y, f, params, param_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.dlogpdf_dlink, model.dlogpdf_dlink_dtheta,
params, param_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
@with_setup(setUp, tearDown)
def t_d2logpdf2_dlink2_dparams(self, model, Y, f, params, param_names, param_constraints):
print "\n{}".format(inspect.stack()[0][3])
print model
assert (
dparam_checkgrad(model.d2logpdf_dlink2, model.d2logpdf_dlink2_dtheta,
params, param_names, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
)
################
# laplace test #
################
@with_setup(setUp, tearDown)
def t_laplace_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints):
print "\n{}".format(inspect.stack()[0][3])
#Normalize
Y = Y/Y.max()
white_var = 1e-6
kernel = GPy.kern.RBF(X.shape[1]) + GPy.kern.White(X.shape[1])
laplace_likelihood = GPy.inference.latent_function_inference.Laplace()
m = GPy.core.GP(X.copy(), Y.copy(), kernel, likelihood=model, inference_method=laplace_likelihood)
m['.*white'].constrain_fixed(white_var)
#Set constraints
for constrain_param, constraint in constraints:
constraint(constrain_param, m)
print m
m.randomize()
#Set params
for param_num in range(len(param_names)):
name = param_names[param_num]
m[name] = param_vals[param_num]
#m.optimize(max_iters=8)
print m
#if not m.checkgrad(step=step):
#m.checkgrad(verbose=1, step=step)
#NOTE this test appears to be stochastic for some likelihoods (student t?)
# appears to all be working in test mode right now...
#if isinstance(model, GPy.likelihoods.StudentT):
assert m.checkgrad(verbose=1, step=step)
###########
# EP test #
###########
@with_setup(setUp, tearDown)
def t_ep_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints):
print "\n{}".format(inspect.stack()[0][3])
#Normalize
Y = Y/Y.max()
white_var = 1e-6
kernel = GPy.kern.RBF(X.shape[1]) + GPy.kern.White(X.shape[1])
ep_inf = GPy.inference.latent_function_inference.EP()
m = GPy.core.GP(X.copy(), Y.copy(), kernel=kernel, likelihood=model, inference_method=ep_inf)
m['.*white'].constrain_fixed(white_var)
for param_num in range(len(param_names)):
name = param_names[param_num]
m[name] = param_vals[param_num]
constraints[param_num](name, m)
m.randomize()
print m
assert m.checkgrad(verbose=1, step=step)
class LaplaceTests(unittest.TestCase):
"""
Specific likelihood tests, not general enough for the above tests
"""
def setUp(self):
self.N = 5
self.D = 3
self.X = np.random.rand(self.N, self.D)*10
self.real_std = 0.1
noise = np.random.randn(*self.X[:, 0].shape)*self.real_std
self.Y = (np.sin(self.X[:, 0]*2*np.pi) + noise)[:, None]
self.f = np.random.rand(self.N, 1)
self.var = 0.2
self.var = np.random.rand(1)
self.stu_t = GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var)
#TODO: gaussians with on Identity link. self.gauss = GPy.likelihoods.Gaussian(gp_link=link_functions.Log(), variance=self.var)
self.gauss = GPy.likelihoods.Gaussian(variance=self.var)
#Make a bigger step as lower bound can be quite curved
self.step = 1e-6
def tearDown(self):
self.stu_t = None
self.gauss = None
self.Y = None
self.f = None
self.X = None
def test_gaussian_d2logpdf_df2_2(self):
print "\n{}".format(inspect.stack()[0][3])
self.Y = None
self.N = 2
self.D = 1
self.X = np.linspace(0, self.D, self.N)[:, None]
self.real_std = 0.2
noise = np.random.randn(*self.X.shape)*self.real_std
self.Y = np.sin(self.X*2*np.pi) + noise
self.f = np.random.rand(self.N, 1)
dlogpdf_df = functools.partial(self.gauss.dlogpdf_df, y=self.Y)
d2logpdf_df2 = functools.partial(self.gauss.d2logpdf_df2, y=self.Y)
grad = GradientChecker(dlogpdf_df, d2logpdf_df2, self.f.copy(), 'g')
grad.randomize()
self.assertTrue(grad.checkgrad(verbose=1))
def test_laplace_log_likelihood(self):
debug = False
real_std = 0.1
initial_var_guess = 0.5
#Start a function, any function
X = np.linspace(0.0, np.pi*2, 100)[:, None]
Y = np.sin(X) + np.random.randn(*X.shape)*real_std
Y = Y/Y.max()
#Yc = Y.copy()
#Yc[75:80] += 1
kernel1 = GPy.kern.RBF(X.shape[1]) + GPy.kern.White(X.shape[1])
#FIXME: Make sure you can copy kernels when params is fixed
#kernel2 = kernel1.copy()
kernel2 = GPy.kern.RBF(X.shape[1]) + GPy.kern.White(X.shape[1])
gauss_distr1 = GPy.likelihoods.Gaussian(variance=initial_var_guess)
exact_inf = GPy.inference.latent_function_inference.ExactGaussianInference()
m1 = GPy.core.GP(X, Y.copy(), kernel=kernel1, likelihood=gauss_distr1, inference_method=exact_inf)
m1['.*white'].constrain_fixed(1e-6)
m1['.*rbf.variance'] = initial_var_guess
m1['.*rbf.variance'].constrain_bounded(1e-4, 10)
m1.randomize()
gauss_distr2 = GPy.likelihoods.Gaussian(variance=initial_var_guess)
laplace_inf = GPy.inference.latent_function_inference.Laplace()
m2 = GPy.core.GP(X, Y.copy(), kernel=kernel2, likelihood=gauss_distr2, inference_method=laplace_inf)
m2['.*white'].constrain_fixed(1e-6)
m2['.*rbf.variance'].constrain_bounded(1e-4, 10)
m2.randomize()
if debug:
print m1
print m2
optimizer = 'scg'
print "Gaussian"
m1.optimize(optimizer, messages=debug)
print "Laplace Gaussian"
m2.optimize(optimizer, messages=debug)
if debug:
print m1
print m2
m2[:] = m1[:]
#Predict for training points to get posterior mean and variance
post_mean, post_var = m1.predict(X)
post_mean_approx, post_var_approx, = m2.predict(X)
if debug:
import pylab as pb
pb.figure(5)
pb.title('posterior means')
pb.scatter(X, post_mean, c='g')
pb.scatter(X, post_mean_approx, c='r', marker='x')
pb.figure(6)
pb.title('plot_f')
m1.plot_f(fignum=6)
m2.plot_f(fignum=6)
fig, axes = pb.subplots(2, 1)
fig.suptitle('Covariance matricies')
a1 = pb.subplot(121)
a1.matshow(m1.likelihood.covariance_matrix)
a2 = pb.subplot(122)
a2.matshow(m2.likelihood.covariance_matrix)
pb.figure(8)
pb.scatter(X, m1.likelihood.Y, c='g')
pb.scatter(X, m2.likelihood.Y, c='r', marker='x')
#Check Y's are the same
np.testing.assert_almost_equal(m1.Y, m2.Y, decimal=5)
#Check marginals are the same
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)
#Check marginals are the same with random
m1.randomize()
m2[:] = m1[:]
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)
#Check they are checkgradding
#m1.checkgrad(verbose=1)
#m2.checkgrad(verbose=1)
self.assertTrue(m1.checkgrad(verbose=True))
self.assertTrue(m2.checkgrad(verbose=True))
if __name__ == "__main__":
print "Running unit tests"
unittest.main()
| bsd-3-clause |
Anveling/sp17-i524 | project/S17-IR-P013/code/weather_data_analysis/run/wda_mapper.py | 19 | 2000 | #!/usr/bin/env python
import sys
import logging
import iu.i524.S17IRP013.hadoop.hbase_to_hdfs as h2h
DEFAULT_STATION_ID = 'DST:IND000DEF'
logging.basicConfig(format = '%(asctime)s %(message)s',\
datefmt = '%m/%d/%Y %I:%M:%S %p',\
filename = 'wda_app.log',\
level=logging.DEBUG)
def get_default_result():
result = dict()
result['TMAX'] = [DEFAULT_STATION_ID,0]
result['PRCP'] = [DEFAULT_STATION_ID,0]
result['TAVG'] = [DEFAULT_STATION_ID,0]
result['TMIN'] = [DEFAULT_STATION_ID,100]
return result
def compare_props(prop,result):
logging.info(prop)
if prop['parameter'] == 'TMAX':
if float(prop['value']) > float(result['TMAX'][1]) or result['TMAX'][1] == 0:
result['TMAX'][0] = prop['station_id']
result['TMAX'][1] = prop['value']
elif prop['parameter'] == 'TAVG':
if float(prop['value']) > float(result['TAVG'][1]) or result['TAVG'][1] == 0:
result['TAVG'][0] = prop['station_id']
result['TAVG'][1] = prop['value']
elif prop['parameter'] == 'PRCP':
if float(prop['value']) > float(result['PRCP'][1]) or result['PRCP'][1] == 0:
result['PRCP'][0] = prop['station_id']
result['PRCP'][1] = prop['value']
elif prop['parameter'] == 'TMIN':
if float(prop['value']) < float(result['TMIN'][1]) or result['TMIN'][1] == 0:
result['TMIN'][0] = prop['station_id']
result['TMIN'][1] = prop['value']
return result
# input comes from STDIN (standard input)
index = 0
for year_month in sys.stdin:
year_month = year_month.strip()
data_list = h2h.find_by_id(row_key=year_month)
tmax = 70
tmin=-70
tavg=0
prcp=0
result = get_default_result()
## Run analysis
for prop in data_list:
result = compare_props(prop=prop,result=result)
#print '%s\t%s' % (index, str(result))
print str(result)
| apache-2.0 |
Mattze96/youtube-dl | test/helper.py | 63 | 8488 | from __future__ import unicode_literals
import errno
import io
import hashlib
import json
import os.path
import re
import types
import sys
import youtube_dl.extractor
from youtube_dl import YoutubeDL
from youtube_dl.utils import (
compat_str,
preferredencoding,
write_string,
)
def get_params(override=None):
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"parameters.json")
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
parameters = json.load(pf)
if override:
parameters.update(override)
return parameters
def try_rm(filename):
""" Remove a file if it exists """
try:
os.remove(filename)
except OSError as ose:
if ose.errno != errno.ENOENT:
raise
def report_warning(message):
'''
Print the message to stderr, it will be prefixed with 'WARNING:'
If stderr is a tty file the 'WARNING:' will be colored
'''
if sys.stderr.isatty() and os.name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
output = '%s %s\n' % (_msg_header, message)
if 'b' in getattr(sys.stderr, 'mode', '') or sys.version_info[0] < 3:
output = output.encode(preferredencoding())
sys.stderr.write(output)
class FakeYDL(YoutubeDL):
def __init__(self, override=None):
# Different instances of the downloader can't share the same dictionary
# some test set the "sublang" parameter, which would break the md5 checks.
params = get_params(override=override)
super(FakeYDL, self).__init__(params, auto_init=False)
self.result = []
def to_screen(self, s, skip_eol=None):
print(s)
def trouble(self, s, tb=None):
raise Exception(s)
def download(self, x):
self.result.append(x)
def expect_warning(self, regex):
# Silence an expected warning matching a regex
old_report_warning = self.report_warning
def report_warning(self, message):
if re.match(regex, message):
return
old_report_warning(message)
self.report_warning = types.MethodType(report_warning, self)
def gettestcases(include_onlymatching=False):
for ie in youtube_dl.extractor.gen_extractors():
for tc in ie.get_testcases(include_onlymatching):
yield tc
md5 = lambda s: hashlib.md5(s.encode('utf-8')).hexdigest()
def expect_info_dict(self, got_dict, expected_dict):
for info_field, expected in expected_dict.items():
if isinstance(expected, compat_str) and expected.startswith('re:'):
got = got_dict.get(info_field)
match_str = expected[len('re:'):]
match_rex = re.compile(match_str)
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
match_rex.match(got),
'field %s (value: %r) should match %r' % (info_field, got, match_str))
elif isinstance(expected, compat_str) and expected.startswith('startswith:'):
got = got_dict.get(info_field)
start_str = expected[len('startswith:'):]
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
got.startswith(start_str),
'field %s (value: %r) should start with %r' % (info_field, got, start_str))
elif isinstance(expected, compat_str) and expected.startswith('contains:'):
got = got_dict.get(info_field)
contains_str = expected[len('contains:'):]
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
contains_str in got,
'field %s (value: %r) should contain %r' % (info_field, got, contains_str))
elif isinstance(expected, type):
got = got_dict.get(info_field)
self.assertTrue(isinstance(got, expected),
'Expected type %r for field %s, but got value %r of type %r' % (expected, info_field, got, type(got)))
else:
if isinstance(expected, compat_str) and expected.startswith('md5:'):
got = 'md5:' + md5(got_dict.get(info_field))
elif isinstance(expected, compat_str) and expected.startswith('mincount:'):
got = got_dict.get(info_field)
self.assertTrue(
isinstance(got, (list, dict)),
'Expected field %s to be a list or a dict, but it is of type %s' % (
info_field, type(got).__name__))
expected_num = int(expected.partition(':')[2])
assertGreaterEqual(
self, len(got), expected_num,
'Expected %d items in field %s, but only got %d' % (
expected_num, info_field, len(got)
)
)
continue
else:
got = got_dict.get(info_field)
self.assertEqual(expected, got,
'invalid value for field %s, expected %r, got %r' % (info_field, expected, got))
# Check for the presence of mandatory fields
if got_dict.get('_type') not in ('playlist', 'multi_video'):
for key in ('id', 'url', 'title', 'ext'):
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
# Check for mandatory fields that are automatically set by YoutubeDL
for key in ['webpage_url', 'extractor', 'extractor_key']:
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
# Are checkable fields missing from the test case definition?
test_info_dict = dict((key, value if not isinstance(value, compat_str) or len(value) < 250 else 'md5:' + md5(value))
for key, value in got_dict.items()
if value and key in ('id', 'title', 'description', 'uploader', 'upload_date', 'timestamp', 'uploader_id', 'location', 'age_limit'))
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
if missing_keys:
def _repr(v):
if isinstance(v, compat_str):
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
else:
return repr(v)
info_dict_str = ''
if len(missing_keys) != len(expected_dict):
info_dict_str += ''.join(
' %s: %s,\n' % (_repr(k), _repr(v))
for k, v in test_info_dict.items() if k not in missing_keys)
if info_dict_str:
info_dict_str += '\n'
info_dict_str += ''.join(
' %s: %s,\n' % (_repr(k), _repr(test_info_dict[k]))
for k in missing_keys)
write_string(
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
self.assertFalse(
missing_keys,
'Missing keys in test definition: %s' % (
', '.join(sorted(missing_keys))))
def assertRegexpMatches(self, text, regexp, msg=None):
if hasattr(self, 'assertRegexp'):
return self.assertRegexp(text, regexp, msg)
else:
m = re.match(regexp, text)
if not m:
note = 'Regexp didn\'t match: %r not found' % (regexp)
if len(text) < 1000:
note += ' in %r' % text
if msg is None:
msg = note
else:
msg = note + ', ' + msg
self.assertTrue(m, msg)
def assertGreaterEqual(self, got, expected, msg=None):
if not (got >= expected):
if msg is None:
msg = '%r not greater than or equal to %r' % (got, expected)
self.assertTrue(got >= expected, msg)
def expect_warnings(ydl, warnings_re):
real_warning = ydl.report_warning
def _report_warning(w):
if not any(re.search(w_re, w) for w_re in warnings_re):
real_warning(w)
ydl.report_warning = _report_warning
| unlicense |
stackforge/tricircle | tricircle/network/local_l3_plugin.py | 1 | 2505 | # Copyright 2015 Huawei Technologies Co., Ltd.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import orm
from neutron_lib import constants
from neutron_lib.exceptions import l3 as l3_exc
from neutron.db.models import l3 as l3_models
from neutron.db import models_v2
from neutron.services.l3_router import l3_router_plugin
class TricircleL3Plugin(l3_router_plugin.L3RouterPlugin):
# Override the original implementation to allow associating a floating ip
# to a port whose network is not attached to the router. Tricircle will
# configures extra routes to guarantee packets can reach the port.
def get_router_for_floatingip(self, context, internal_port,
internal_subnet, external_network_id):
"""Find a router to handle the floating-ip association.
:param internal_port: The port for the fixed-ip.
:param internal_subnet: The subnet for the fixed-ip.
:param external_network_id: The external network for floating-ip.
:raises: ExternalGatewayForFloatingIPNotFound if no suitable router
is found.
"""
router_port = l3_models.RouterPort
gw_port = orm.aliased(models_v2.Port, name="gw_port")
router_port_qry = context.session.query(
router_port.router_id
).join(gw_port, gw_port.device_id == router_port.router_id).filter(
gw_port.network_id == external_network_id,
gw_port.device_owner == constants.DEVICE_OWNER_ROUTER_GW
).distinct()
first_router_id = None
for router in router_port_qry:
if not first_router_id:
first_router_id = router.router_id
if first_router_id:
return first_router_id
raise l3_exc.ExternalGatewayForFloatingIPNotFound(
subnet_id=internal_subnet['id'],
external_network_id=external_network_id,
port_id=internal_port['id'])
| apache-2.0 |
technicalpickles/zulip | zerver/lib/bugdown/testing_mocks.py | 124 | 8065 | from __future__ import absolute_import
import ujson
NORMAL_TWEET = """{
"coordinates": null,
"created_at": "Sat Sep 10 22:23:38 +0000 2011",
"truncated": false,
"favorited": false,
"id_str": "112652479837110273",
"in_reply_to_user_id_str": "783214",
"text": "@twitter meets @seepicturely at #tcdisrupt cc.@boscomonkey @episod http://t.co/6J2EgYM",
"contributors": null,
"id": 112652479837110273,
"retweet_count": 0,
"in_reply_to_status_id_str": null,
"geo": null,
"retweeted": false,
"possibly_sensitive": false,
"in_reply_to_user_id": 783214,
"user": {
"profile_sidebar_border_color": "eeeeee",
"profile_background_tile": true,
"profile_sidebar_fill_color": "efefef",
"name": "Eoin McMillan ",
"profile_image_url": "http://a1.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"created_at": "Mon May 16 20:07:59 +0000 2011",
"location": "Twitter",
"profile_link_color": "009999",
"follow_request_sent": null,
"is_translator": false,
"id_str": "299862462",
"favourites_count": 0,
"default_profile": false,
"url": "http://www.eoin.me",
"contributors_enabled": false,
"id": 299862462,
"utc_offset": null,
"profile_image_url_https": "https://si0.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"profile_use_background_image": true,
"listed_count": 0,
"followers_count": 9,
"lang": "en",
"profile_text_color": "333333",
"protected": false,
"profile_background_image_url_https": "https://si0.twimg.com/images/themes/theme14/bg.gif",
"description": "Eoin's photography account. See @mceoin for tweets.",
"geo_enabled": false,
"verified": false,
"profile_background_color": "131516",
"time_zone": null,
"notifications": null,
"statuses_count": 255,
"friends_count": 0,
"default_profile_image": false,
"profile_background_image_url": "http://a1.twimg.com/images/themes/theme14/bg.gif",
"screen_name": "imeoin",
"following": null,
"show_all_inline_media": false
},
"in_reply_to_screen_name": "twitter",
"in_reply_to_status_id": null,
"user_mentions": [
{
"screen_name": "twitter",
"name": "Twitter",
"id": 1
},
{
"screen_name": "seepicturely",
"name": "Seepicturely",
"id": 2
},
{
"screen_name": "boscomonkey",
"name": "Bosco So",
"id": 3
},
{
"screen_name": "episod",
"name": "Taylor Singletary",
"id": 4
}
],
"urls": {
"http://t.co/6J2EgYM": "http://instagram.com/p/MuW67/"
}
}"""
MENTION_IN_LINK_TWEET = """{
"coordinates": null,
"created_at": "Sat Sep 10 22:23:38 +0000 2011",
"truncated": false,
"favorited": false,
"id_str": "112652479837110273",
"in_reply_to_user_id_str": "783214",
"text": "http://t.co/@foo",
"contributors": null,
"id": 112652479837110273,
"retweet_count": 0,
"in_reply_to_status_id_str": null,
"geo": null,
"retweeted": false,
"possibly_sensitive": false,
"in_reply_to_user_id": 783214,
"user": {
"profile_sidebar_border_color": "eeeeee",
"profile_background_tile": true,
"profile_sidebar_fill_color": "efefef",
"name": "Eoin McMillan ",
"profile_image_url": "http://a1.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"created_at": "Mon May 16 20:07:59 +0000 2011",
"location": "Twitter",
"profile_link_color": "009999",
"follow_request_sent": null,
"is_translator": false,
"id_str": "299862462",
"favourites_count": 0,
"default_profile": false,
"url": "http://www.eoin.me",
"contributors_enabled": false,
"id": 299862462,
"utc_offset": null,
"profile_image_url_https": "https://si0.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"profile_use_background_image": true,
"listed_count": 0,
"followers_count": 9,
"lang": "en",
"profile_text_color": "333333",
"protected": false,
"profile_background_image_url_https": "https://si0.twimg.com/images/themes/theme14/bg.gif",
"description": "Eoin's photography account. See @mceoin for tweets.",
"geo_enabled": false,
"verified": false,
"profile_background_color": "131516",
"time_zone": null,
"notifications": null,
"statuses_count": 255,
"friends_count": 0,
"default_profile_image": false,
"profile_background_image_url": "http://a1.twimg.com/images/themes/theme14/bg.gif",
"screen_name": "imeoin",
"following": null,
"show_all_inline_media": false
},
"in_reply_to_screen_name": "twitter",
"in_reply_to_status_id": null,
"user_mentions": [
{
"screen_name": "foo",
"name": "Foo",
"id": 1
}
],
"urls": {
"http://t.co/@foo": "http://foo.com"
}
}"""
MEDIA_TWEET = """{
"coordinates": null,
"created_at": "Sat Sep 10 22:23:38 +0000 2011",
"truncated": false,
"favorited": false,
"id_str": "112652479837110273",
"in_reply_to_user_id_str": "783214",
"text": "http://t.co/xo7pAhK6n3",
"contributors": null,
"id": 112652479837110273,
"retweet_count": 0,
"in_reply_to_status_id_str": null,
"geo": null,
"retweeted": false,
"possibly_sensitive": false,
"in_reply_to_user_id": 783214,
"user": {
"profile_sidebar_border_color": "eeeeee",
"profile_background_tile": true,
"profile_sidebar_fill_color": "efefef",
"name": "Eoin McMillan ",
"profile_image_url": "http://a1.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"created_at": "Mon May 16 20:07:59 +0000 2011",
"location": "Twitter",
"profile_link_color": "009999",
"follow_request_sent": null,
"is_translator": false,
"id_str": "299862462",
"favourites_count": 0,
"default_profile": false,
"url": "http://www.eoin.me",
"contributors_enabled": false,
"id": 299862462,
"utc_offset": null,
"profile_image_url_https": "https://si0.twimg.com/profile_images/1380912173/Screen_shot_2011-06-03_at_7.35.36_PM_normal.png",
"profile_use_background_image": true,
"listed_count": 0,
"followers_count": 9,
"lang": "en",
"profile_text_color": "333333",
"protected": false,
"profile_background_image_url_https": "https://si0.twimg.com/images/themes/theme14/bg.gif",
"description": "Eoin's photography account. See @mceoin for tweets.",
"geo_enabled": false,
"verified": false,
"profile_background_color": "131516",
"time_zone": null,
"notifications": null,
"statuses_count": 255,
"friends_count": 0,
"default_profile_image": false,
"profile_background_image_url": "http://a1.twimg.com/images/themes/theme14/bg.gif",
"screen_name": "imeoin",
"following": null,
"show_all_inline_media": false
},
"in_reply_to_screen_name": "twitter",
"in_reply_to_status_id": null,
"media": [
{
"display_url": "pic.twitter.com/xo7pAhK6n3",
"expanded_url": "http://twitter.com/NEVNBoston/status/421654515616849920/photo/1",
"id": 421654515495211010,
"id_str": "421654515495211010",
"indices": [121, 143],
"media_url": "http://pbs.twimg.com/media/BdoEjD4IEAIq86Z.jpg",
"media_url_https": "https://pbs.twimg.com/media/BdoEjD4IEAIq86Z.jpg",
"sizes": {"large": {"h": 700, "resize": "fit", "w": 1024},
"medium": {"h": 410, "resize": "fit", "w": 599},
"small": {"h": 232, "resize": "fit", "w": 340},
"thumb": {"h": 150, "resize": "crop", "w": 150}},
"type": "photo",
"url": "http://t.co/xo7pAhK6n3"}
]
}"""
def twitter(tweet_id):
if tweet_id in ["112652479837110273", "287977969287315456", "287977969287315457"]:
return ujson.loads(NORMAL_TWEET)
elif tweet_id == "287977969287315458":
return ujson.loads(MENTION_IN_LINK_TWEET)
elif tweet_id == "287977969287315459":
return ujson.loads(MEDIA_TWEET)
else:
return None
| apache-2.0 |
totolef/Sick-beard | sickbeard/notifiers/libnotify.py | 49 | 4677 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import os
import cgi
import sickbeard
from sickbeard import logger, common
def diagnose():
'''
Check the environment for reasons libnotify isn't working. Return a
user-readable message indicating possible issues.
'''
try:
import pynotify #@UnusedImport
except ImportError:
return (u"<p>Error: pynotify isn't installed. On Ubuntu/Debian, install the "
u"<a href=\"apt:python-notify\">python-notify</a> package.")
if 'DISPLAY' not in os.environ and 'DBUS_SESSION_BUS_ADDRESS' not in os.environ:
return (u"<p>Error: Environment variables DISPLAY and DBUS_SESSION_BUS_ADDRESS "
u"aren't set. libnotify will only work when you run Sick Beard "
u"from a desktop login.")
try:
import dbus
except ImportError:
pass
else:
try:
bus = dbus.SessionBus()
except dbus.DBusException, e:
return (u"<p>Error: unable to connect to D-Bus session bus: <code>%s</code>."
u"<p>Are you running Sick Beard in a desktop session?") % (cgi.escape(e),)
try:
bus.get_object('org.freedesktop.Notifications',
'/org/freedesktop/Notifications')
except dbus.DBusException, e:
return (u"<p>Error: there doesn't seem to be a notification daemon available: <code>%s</code> "
u"<p>Try installing notification-daemon or notify-osd.") % (cgi.escape(e),)
return u"<p>Error: Unable to send notification."
class LibnotifyNotifier:
def __init__(self):
self.pynotify = None
self.gobject = None
def init_pynotify(self):
if self.pynotify is not None:
return True
try:
import pynotify
except ImportError:
logger.log(u"Unable to import pynotify. libnotify notifications won't work.")
return False
try:
import gobject
except ImportError:
logger.log(u"Unable to import gobject. We can't catch a GError in display.")
return False
if not pynotify.init('Sick Beard'):
logger.log(u"Initialization of pynotify failed. libnotify notifications won't work.")
return False
self.pynotify = pynotify
self.gobject = gobject
return True
def notify_snatch(self, ep_name):
if sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH:
self._notify(common.notifyStrings[common.NOTIFY_SNATCH], ep_name)
def notify_download(self, ep_name):
if sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD:
self._notify(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)
def notify_subtitle_download(self, ep_name, lang):
if sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD:
self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang)
def test_notify(self):
return self._notify('Test notification', "This is a test notification from Sick Beard", force=True)
def _notify(self, title, message, force=False):
if not sickbeard.USE_LIBNOTIFY and not force:
return False
if not self.init_pynotify():
return False
# Can't make this a global constant because PROG_DIR isn't available
# when the module is imported.
icon_path = os.path.join(sickbeard.PROG_DIR, "data/images/sickbeard_touch_icon.png")
icon_uri = 'file://' + os.path.abspath(icon_path)
# If the session bus can't be acquired here a bunch of warning messages
# will be printed but the call to show() will still return True.
# pynotify doesn't seem too keen on error handling.
n = self.pynotify.Notification(title, message, icon_uri)
try:
return n.show()
except self.gobject.GError:
return False
notifier = LibnotifyNotifier
| gpl-3.0 |
maximmbed/mbed | tools/host_tests/hello_auto.py | 122 | 1148 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class HelloTest():
HELLO_WORLD = "Hello World"
def test(self, selftest):
c = selftest.mbed.serial_readline()
if c is None:
return selftest.RESULT_IO_SERIAL
selftest.notify("Read %d bytes:"% len(c))
selftest.notify(c.strip())
result = True
# Because we can have targetID here let's try to decode
if len(c) < len(self.HELLO_WORLD):
result = False
else:
result = self.HELLO_WORLD in c
return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
| apache-2.0 |
rkokkelk/CouchPotatoServer | libs/tornado/testing.py | 22 | 26389 | #!/usr/bin/env python
"""Support classes for automated testing.
* `AsyncTestCase` and `AsyncHTTPTestCase`: Subclasses of unittest.TestCase
with additional support for testing asynchronous (`.IOLoop` based) code.
* `ExpectLog` and `LogTrapTestCase`: Make test logs less spammy.
* `main()`: A simple test runner (wrapper around unittest.main()) with support
for the tornado.autoreload module to rerun the tests when code changes.
"""
from __future__ import absolute_import, division, print_function, with_statement
try:
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from tornado.ioloop import IOLoop, TimeoutError
from tornado import netutil
except ImportError:
# These modules are not importable on app engine. Parts of this module
# won't work, but e.g. LogTrapTestCase and main() will.
AsyncHTTPClient = None
gen = None
HTTPServer = None
IOLoop = None
netutil = None
SimpleAsyncHTTPClient = None
from tornado.log import gen_log, app_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info, basestring_type
import functools
import logging
import os
import re
import signal
import socket
import sys
import types
try:
from cStringIO import StringIO # py2
except ImportError:
from io import StringIO # py3
# Tornado's own test suite requires the updated unittest module
# (either py27+ or unittest2) so tornado.test.util enforces
# this requirement, but for other users of tornado.testing we want
# to allow the older version if unitest2 is not available.
if sys.version_info >= (3,):
# On python 3, mixing unittest2 and unittest (including doctest)
# doesn't seem to work, so always use unittest.
import unittest
else:
# On python 2, prefer unittest2 when available.
try:
import unittest2 as unittest
except ImportError:
import unittest
_next_port = 10000
def get_unused_port():
"""Returns a (hopefully) unused port number.
This function does not guarantee that the port it returns is available,
only that a series of get_unused_port calls in a single process return
distinct ports.
.. deprecated::
Use bind_unused_port instead, which is guaranteed to find an unused port.
"""
global _next_port
port = _next_port
_next_port = _next_port + 1
return port
def bind_unused_port():
"""Binds a server socket to an available port on localhost.
Returns a tuple (socket, port).
"""
[sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET)
port = sock.getsockname()[1]
return sock, port
def get_async_test_timeout():
"""Get the global timeout setting for async tests.
Returns a float, the timeout in seconds.
.. versionadded:: 3.1
"""
try:
return float(os.environ.get('ASYNC_TEST_TIMEOUT'))
except (ValueError, TypeError):
return 5
class _TestMethodWrapper(object):
"""Wraps a test method to raise an error if it returns a value.
This is mainly used to detect undecorated generators (if a test
method yields it must use a decorator to consume the generator),
but will also detect other kinds of return values (these are not
necessarily errors, but we alert anyway since there is no good
reason to return a value from a test.
"""
def __init__(self, orig_method):
self.orig_method = orig_method
def __call__(self, *args, **kwargs):
result = self.orig_method(*args, **kwargs)
if isinstance(result, types.GeneratorType):
raise TypeError("Generator test methods should be decorated with "
"tornado.testing.gen_test")
elif result is not None:
raise ValueError("Return value from test method ignored: %r" %
result)
def __getattr__(self, name):
"""Proxy all unknown attributes to the original method.
This is important for some of the decorators in the `unittest`
module, such as `unittest.skipIf`.
"""
return getattr(self.orig_method, name)
class AsyncTestCase(unittest.TestCase):
"""`~unittest.TestCase` subclass for testing `.IOLoop`-based
asynchronous code.
The unittest framework is synchronous, so the test must be
complete by the time the test method returns. This means that
asynchronous code cannot be used in quite the same way as usual.
To write test functions that use the same ``yield``-based patterns
used with the `tornado.gen` module, decorate your test methods
with `tornado.testing.gen_test` instead of
`tornado.gen.coroutine`. This class also provides the `stop()`
and `wait()` methods for a more manual style of testing. The test
method itself must call ``self.wait()``, and asynchronous
callbacks should call ``self.stop()`` to signal completion.
By default, a new `.IOLoop` is constructed for each test and is available
as ``self.io_loop``. This `.IOLoop` should be used in the construction of
HTTP clients/servers, etc. If the code being tested requires a
global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
The `.IOLoop`'s ``start`` and ``stop`` methods should not be
called directly. Instead, use `self.stop <stop>` and `self.wait
<wait>`. Arguments passed to ``self.stop`` are returned from
``self.wait``. It is possible to have multiple ``wait``/``stop``
cycles in the same test.
Example::
# This test uses coroutine style.
class MyTestCase(AsyncTestCase):
@tornado.testing.gen_test
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
response = yield client.fetch("http://www.tornadoweb.org")
# Test contents of response
self.assertIn("FriendFeed", response.body)
# This test uses argument passing between self.stop and self.wait.
class MyTestCase2(AsyncTestCase):
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
client.fetch("http://www.tornadoweb.org/", self.stop)
response = self.wait()
# Test contents of response
self.assertIn("FriendFeed", response.body)
# This test uses an explicit callback-based style.
class MyTestCase3(AsyncTestCase):
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
client.fetch("http://www.tornadoweb.org/", self.handle_fetch)
self.wait()
def handle_fetch(self, response):
# Test contents of response (failures and exceptions here
# will cause self.wait() to throw an exception and end the
# test).
# Exceptions thrown here are magically propagated to
# self.wait() in test_http_fetch() via stack_context.
self.assertIn("FriendFeed", response.body)
self.stop()
"""
def __init__(self, methodName='runTest', **kwargs):
super(AsyncTestCase, self).__init__(methodName, **kwargs)
self.__stopped = False
self.__running = False
self.__failure = None
self.__stop_args = None
self.__timeout = None
# It's easy to forget the @gen_test decorator, but if you do
# the test will silently be ignored because nothing will consume
# the generator. Replace the test method with a wrapper that will
# make sure it's not an undecorated generator.
setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
def setUp(self):
super(AsyncTestCase, self).setUp()
self.io_loop = self.get_new_ioloop()
self.io_loop.make_current()
def tearDown(self):
self.io_loop.clear_current()
if (not IOLoop.initialized() or
self.io_loop is not IOLoop.instance()):
# Try to clean up any file descriptors left open in the ioloop.
# This avoids leaks, especially when tests are run repeatedly
# in the same process with autoreload (because curl does not
# set FD_CLOEXEC on its file descriptors)
self.io_loop.close(all_fds=True)
super(AsyncTestCase, self).tearDown()
# In case an exception escaped or the StackContext caught an exception
# when there wasn't a wait() to re-raise it, do so here.
# This is our last chance to raise an exception in a way that the
# unittest machinery understands.
self.__rethrow()
def get_new_ioloop(self):
"""Creates a new `.IOLoop` for this test. May be overridden in
subclasses for tests that require a specific `.IOLoop` (usually
the singleton `.IOLoop.instance()`).
"""
return IOLoop()
def _handle_exception(self, typ, value, tb):
if self.__failure is None:
self.__failure = (typ, value, tb)
else:
app_log.error("multiple unhandled exceptions in test",
exc_info=(typ, value, tb))
self.stop()
return True
def __rethrow(self):
if self.__failure is not None:
failure = self.__failure
self.__failure = None
raise_exc_info(failure)
def run(self, result=None):
with ExceptionStackContext(self._handle_exception):
super(AsyncTestCase, self).run(result)
# As a last resort, if an exception escaped super.run() and wasn't
# re-raised in tearDown, raise it here. This will cause the
# unittest run to fail messily, but that's better than silently
# ignoring an error.
self.__rethrow()
def stop(self, _arg=None, **kwargs):
"""Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
to return.
Keyword arguments or a single positional argument passed to `stop()` are
saved and will be returned by `wait()`.
"""
assert _arg is None or not kwargs
self.__stop_args = kwargs or _arg
if self.__running:
self.io_loop.stop()
self.__running = False
self.__stopped = True
def wait(self, condition=None, timeout=None):
"""Runs the `.IOLoop` until stop is called or timeout has passed.
In the event of a timeout, an exception will be thrown. The
default timeout is 5 seconds; it may be overridden with a
``timeout`` keyword argument or globally with the
``ASYNC_TEST_TIMEOUT`` environment variable.
If ``condition`` is not None, the `.IOLoop` will be restarted
after `stop()` until ``condition()`` returns true.
.. versionchanged:: 3.1
Added the ``ASYNC_TEST_TIMEOUT`` environment variable.
"""
if timeout is None:
timeout = get_async_test_timeout()
if not self.__stopped:
if timeout:
def timeout_func():
try:
raise self.failureException(
'Async operation timed out after %s seconds' %
timeout)
except Exception:
self.__failure = sys.exc_info()
self.stop()
self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, timeout_func)
while True:
self.__running = True
self.io_loop.start()
if (self.__failure is not None or
condition is None or condition()):
break
if self.__timeout is not None:
self.io_loop.remove_timeout(self.__timeout)
self.__timeout = None
assert self.__stopped
self.__stopped = False
self.__rethrow()
result = self.__stop_args
self.__stop_args = None
return result
class AsyncHTTPTestCase(AsyncTestCase):
"""A test case that starts up an HTTP server.
Subclasses must override `get_app()`, which returns the
`tornado.web.Application` (or other `.HTTPServer` callback) to be tested.
Tests will typically use the provided ``self.http_client`` to fetch
URLs from this server.
Example::
class MyHTTPTest(AsyncHTTPTestCase):
def get_app(self):
return Application([('/', MyHandler)...])
def test_homepage(self):
# The following two lines are equivalent to
# response = self.fetch('/')
# but are shown in full here to demonstrate explicit use
# of self.stop and self.wait.
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
# test contents of response
"""
def setUp(self):
super(AsyncHTTPTestCase, self).setUp()
sock, port = bind_unused_port()
self.__port = port
self.http_client = self.get_http_client()
self._app = self.get_app()
self.http_server = self.get_http_server()
self.http_server.add_sockets([sock])
def get_http_client(self):
return AsyncHTTPClient(io_loop=self.io_loop)
def get_http_server(self):
return HTTPServer(self._app, io_loop=self.io_loop,
**self.get_httpserver_options())
def get_app(self):
"""Should be overridden by subclasses to return a
`tornado.web.Application` or other `.HTTPServer` callback.
"""
raise NotImplementedError()
def fetch(self, path, **kwargs):
"""Convenience method to synchronously fetch a url.
The given path will be appended to the local server's host and
port. Any additional kwargs will be passed directly to
`.AsyncHTTPClient.fetch` (and so could be used to pass
``method="POST"``, ``body="..."``, etc).
"""
self.http_client.fetch(self.get_url(path), self.stop, **kwargs)
return self.wait()
def get_httpserver_options(self):
"""May be overridden by subclasses to return additional
keyword arguments for the server.
"""
return {}
def get_http_port(self):
"""Returns the port used by the server.
A new port is chosen for each test.
"""
return self.__port
def get_protocol(self):
return 'http'
def get_url(self, path):
"""Returns an absolute url for the given path on the test server."""
return '%s://localhost:%s%s' % (self.get_protocol(),
self.get_http_port(), path)
def tearDown(self):
self.http_server.stop()
self.io_loop.run_sync(self.http_server.close_all_connections,
timeout=get_async_test_timeout())
if (not IOLoop.initialized() or
self.http_client.io_loop is not IOLoop.instance()):
self.http_client.close()
super(AsyncHTTPTestCase, self).tearDown()
class AsyncHTTPSTestCase(AsyncHTTPTestCase):
"""A test case that starts an HTTPS server.
Interface is generally the same as `AsyncHTTPTestCase`.
"""
def get_http_client(self):
# Some versions of libcurl have deadlock bugs with ssl,
# so always run these tests with SimpleAsyncHTTPClient.
return SimpleAsyncHTTPClient(io_loop=self.io_loop, force_instance=True,
defaults=dict(validate_cert=False))
def get_httpserver_options(self):
return dict(ssl_options=self.get_ssl_options())
def get_ssl_options(self):
"""May be overridden by subclasses to select SSL options.
By default includes a self-signed testing certificate.
"""
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
module_dir = os.path.dirname(__file__)
return dict(
certfile=os.path.join(module_dir, 'test', 'test.crt'),
keyfile=os.path.join(module_dir, 'test', 'test.key'))
def get_protocol(self):
return 'https'
def gen_test(func=None, timeout=None):
"""Testing equivalent of ``@gen.coroutine``, to be applied to test methods.
``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
already running. ``@gen_test`` should be applied to test methods
on subclasses of `AsyncTestCase`.
Example::
class MyTest(AsyncHTTPTestCase):
@gen_test
def test_something(self):
response = yield gen.Task(self.fetch('/'))
By default, ``@gen_test`` times out after 5 seconds. The timeout may be
overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
or for each test with the ``timeout`` keyword argument::
class MyTest(AsyncHTTPTestCase):
@gen_test(timeout=10)
def test_something_slow(self):
response = yield gen.Task(self.fetch('/'))
.. versionadded:: 3.1
The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
variable.
.. versionchanged:: 4.0
The wrapper now passes along ``*args, **kwargs`` so it can be used
on functions with arguments.
"""
if timeout is None:
timeout = get_async_test_timeout()
def wrap(f):
# Stack up several decorators to allow us to access the generator
# object itself. In the innermost wrapper, we capture the generator
# and save it in an attribute of self. Next, we run the wrapped
# function through @gen.coroutine. Finally, the coroutine is
# wrapped again to make it synchronous with run_sync.
#
# This is a good case study arguing for either some sort of
# extensibility in the gen decorators or cancellation support.
@functools.wraps(f)
def pre_coroutine(self, *args, **kwargs):
result = f(self, *args, **kwargs)
if isinstance(result, types.GeneratorType):
self._test_generator = result
else:
self._test_generator = None
return result
coro = gen.coroutine(pre_coroutine)
@functools.wraps(coro)
def post_coroutine(self, *args, **kwargs):
try:
return self.io_loop.run_sync(
functools.partial(coro, self, *args, **kwargs),
timeout=timeout)
except TimeoutError as e:
# run_sync raises an error with an unhelpful traceback.
# If we throw it back into the generator the stack trace
# will be replaced by the point where the test is stopped.
self._test_generator.throw(e)
# In case the test contains an overly broad except clause,
# we may get back here. In this case re-raise the original
# exception, which is better than nothing.
raise
return post_coroutine
if func is not None:
# Used like:
# @gen_test
# def f(self):
# pass
return wrap(func)
else:
# Used like @gen_test(timeout=10)
return wrap
# Without this attribute, nosetests will try to run gen_test as a test
# anywhere it is imported.
gen_test.__test__ = False
class LogTrapTestCase(unittest.TestCase):
"""A test case that captures and discards all logging output
if the test passes.
Some libraries can produce a lot of logging output even when
the test succeeds, so this class can be useful to minimize the noise.
Simply use it as a base class for your test case. It is safe to combine
with AsyncTestCase via multiple inheritance
(``class MyTestCase(AsyncHTTPTestCase, LogTrapTestCase):``)
This class assumes that only one log handler is configured and
that it is a `~logging.StreamHandler`. This is true for both
`logging.basicConfig` and the "pretty logging" configured by
`tornado.options`. It is not compatible with other log buffering
mechanisms, such as those provided by some test runners.
"""
def run(self, result=None):
logger = logging.getLogger()
if not logger.handlers:
logging.basicConfig()
handler = logger.handlers[0]
if (len(logger.handlers) > 1 or
not isinstance(handler, logging.StreamHandler)):
# Logging has been configured in a way we don't recognize,
# so just leave it alone.
super(LogTrapTestCase, self).run(result)
return
old_stream = handler.stream
try:
handler.stream = StringIO()
gen_log.info("RUNNING TEST: " + str(self))
old_error_count = len(result.failures) + len(result.errors)
super(LogTrapTestCase, self).run(result)
new_error_count = len(result.failures) + len(result.errors)
if new_error_count != old_error_count:
old_stream.write(handler.stream.getvalue())
finally:
handler.stream = old_stream
class ExpectLog(logging.Filter):
"""Context manager to capture and suppress expected log output.
Useful to make tests of error conditions less noisy, while still
leaving unexpected log entries visible. *Not thread safe.*
Usage::
with ExpectLog('tornado.application', "Uncaught exception"):
error_response = self.fetch("/some_page")
"""
def __init__(self, logger, regex, required=True):
"""Constructs an ExpectLog context manager.
:param logger: Logger object (or name of logger) to watch. Pass
an empty string to watch the root logger.
:param regex: Regular expression to match. Any log entries on
the specified logger that match this regex will be suppressed.
:param required: If true, an exeption will be raised if the end of
the ``with`` statement is reached without matching any log entries.
"""
if isinstance(logger, basestring_type):
logger = logging.getLogger(logger)
self.logger = logger
self.regex = re.compile(regex)
self.required = required
self.matched = False
def filter(self, record):
message = record.getMessage()
if self.regex.match(message):
self.matched = True
return False
return True
def __enter__(self):
self.logger.addFilter(self)
def __exit__(self, typ, value, tb):
self.logger.removeFilter(self)
if not typ and self.required and not self.matched:
raise Exception("did not get expected log message")
def main(**kwargs):
"""A simple test runner.
This test runner is essentially equivalent to `unittest.main` from
the standard library, but adds support for tornado-style option
parsing and log formatting.
The easiest way to run a test is via the command line::
python -m tornado.testing tornado.test.stack_context_test
See the standard library unittest module for ways in which tests can
be specified.
Projects with many tests may wish to define a test script like
``tornado/test/runtests.py``. This script should define a method
``all()`` which returns a test suite and then call
`tornado.testing.main()`. Note that even when a test script is
used, the ``all()`` test suite may be overridden by naming a
single test on the command line::
# Runs all tests
python -m tornado.test.runtests
# Runs one test
python -m tornado.test.runtests tornado.test.stack_context_test
Additional keyword arguments passed through to ``unittest.main()``.
For example, use ``tornado.testing.main(verbosity=2)``
to show many test details as they are run.
See http://docs.python.org/library/unittest.html#unittest.main
for full argument list.
"""
from tornado.options import define, options, parse_command_line
define('exception_on_interrupt', type=bool, default=True,
help=("If true (default), ctrl-c raises a KeyboardInterrupt "
"exception. This prints a stack trace but cannot interrupt "
"certain operations. If false, the process is more reliably "
"killed, but does not print a stack trace."))
# support the same options as unittest's command-line interface
define('verbose', type=bool)
define('quiet', type=bool)
define('failfast', type=bool)
define('catch', type=bool)
define('buffer', type=bool)
argv = [sys.argv[0]] + parse_command_line(sys.argv)
if not options.exception_on_interrupt:
signal.signal(signal.SIGINT, signal.SIG_DFL)
if options.verbose is not None:
kwargs['verbosity'] = 2
if options.quiet is not None:
kwargs['verbosity'] = 0
if options.failfast is not None:
kwargs['failfast'] = True
if options.catch is not None:
kwargs['catchbreak'] = True
if options.buffer is not None:
kwargs['buffer'] = True
if __name__ == '__main__' and len(argv) == 1:
print("No tests specified", file=sys.stderr)
sys.exit(1)
try:
# In order to be able to run tests by their fully-qualified name
# on the command line without importing all tests here,
# module must be set to None. Python 3.2's unittest.main ignores
# defaultTest if no module is given (it tries to do its own
# test discovery, which is incompatible with auto2to3), so don't
# set module if we're not asking for a specific test.
if len(argv) > 1:
unittest.main(module=None, argv=argv, **kwargs)
else:
unittest.main(defaultTest="all", argv=argv, **kwargs)
except SystemExit as e:
if e.code == 0:
gen_log.info('PASS')
else:
gen_log.error('FAIL')
raise
if __name__ == '__main__':
main()
| gpl-3.0 |
miminar/openshift-ansible | roles/openshift_openstack/library/os_lbaas_deletion.py | 15 | 3149 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2018 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-wildcard-import,wildcard-import,unused-import,redefined-builtin
''' os_lbaas_deletion '''
import keystoneauth1
from oslo_serialization import jsonutils
from ansible.module_utils.basic import AnsibleModule
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_lbaas_deletion
short_description: Delete LBaaS created by Kuryr
description:
- Delete all the LBaaS created by Kuryr with the cascade flag
author:
- "Luis Tomas Bolivar <[email protected]>"
'''
RETURN = '''
'''
def main():
''' Main module function '''
module = AnsibleModule(
argument_spec=dict(
lbaas_annotation=dict(default=False, type='dict'),
),
supports_check_mode=True,
)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
try:
cloud = shade.openstack_cloud()
# pylint: disable=broad-except
except Exception:
module.fail_json(msg='Failed to connect to the cloud')
try:
adapter = keystoneauth1.adapter.Adapter(
session=cloud.keystone_session,
service_type=cloud.cloud_config.get_service_type('load-balancer'),
interface=cloud.cloud_config.get_interface('load-balancer'),
endpoint_override=cloud.cloud_config.get_endpoint('load-balancer'),
version=cloud.cloud_config.get_api_version('load-balancer'))
# pylint: disable=broad-except
except Exception:
module.fail_json(msg='Failed to get an adapter to talk to the Octavia '
'API')
try:
lbaas_state = (
module.params['lbaas_annotation'][
'openstack.org/kuryr-lbaas-state'])
# pylint: disable=broad-except
except Exception:
module.exit_json(change=True, msg='No information about the lbaas to '
'delete')
lbaas_data = jsonutils.loads(lbaas_state)['versioned_object.data'][
'loadbalancer']
lbaas_id = lbaas_data['versioned_object.data']['id']
try:
adapter.delete(
'/v2.0/lbaas/loadbalancers/' + lbaas_id + '?cascade=True')
# pylint: disable=broad-except
except Exception:
module.fail_json(msg='Failed to delete Octavia LBaaS with cascade '
'flag')
module.exit_json(
changed=True)
if __name__ == '__main__':
main()
| apache-2.0 |
CingHu/neutron-ustack | neutron/plugins/ml2/drivers/arista/arista_l3_driver.py | 3 | 18083 | # Copyright 2014 Arista Networks, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sukhdev Kapur, Arista Networks, Inc.
#
import hashlib
import socket
import struct
import jsonrpclib
from oslo.config import cfg
from neutron import context as nctx
from neutron.db import db_base_plugin_v2
from neutron.openstack.common import log as logging
from neutron.plugins.ml2.drivers.arista import exceptions as arista_exc
LOG = logging.getLogger(__name__)
EOS_UNREACHABLE_MSG = _('Unable to reach EOS')
DEFAULT_VLAN = 1
MLAG_SWITCHES = 2
VIRTUAL_ROUTER_MAC = '00:11:22:33:44:55'
IPV4_BITS = 32
IPV6_BITS = 128
router_in_vrf = {
'router': {'create': ['vrf definition {0}',
'rd {1}',
'exit'],
'delete': ['no vrf definition {0}']},
'interface': {'add': ['ip routing vrf {1}',
'vlan {0}',
'exit',
'interface vlan {0}',
'vrf forwarding {1}',
'ip address {2}'],
'remove': ['no interface vlan {0}']}}
router_in_default_vrf = {
'router': {'create': [], # Place holder for now.
'delete': []}, # Place holder for now.
'interface': {'add': ['ip routing',
'vlan {0}',
'exit',
'interface vlan {0}',
'ip address {2}'],
'remove': ['no interface vlan {0}']}}
router_in_default_vrf_v6 = {
'router': {'create': [],
'delete': []},
'interface': {'add': ['ipv6 unicast-routing',
'vlan {0}',
'exit',
'interface vlan {0}',
'ipv6 enable',
'ipv6 address {2}'],
'remove': ['no interface vlan {0}']}}
additional_cmds_for_mlag = {
'router': {'create': ['ip virtual-router mac-address {0}'],
'delete': ['no ip virtual-router mac-address']},
'interface': {'add': ['ip virtual-router address {0}'],
'remove': []}}
additional_cmds_for_mlag_v6 = {
'router': {'create': [],
'delete': []},
'interface': {'add': ['ipv6 virtual-router address {0}'],
'remove': []}}
class AristaL3Driver(object):
"""Wraps Arista JSON RPC.
All communications between Neutron and EOS are over JSON RPC.
EOS - operating system used on Arista hardware
Command API - JSON RPC API provided by Arista EOS
"""
def __init__(self):
self._servers = []
self._hosts = []
self.interfaceDict = None
self._validate_config()
host = cfg.CONF.l3_arista.primary_l3_host
self._hosts.append(host)
self._servers.append(jsonrpclib.Server(self._eapi_host_url(host)))
self.mlag_configured = cfg.CONF.l3_arista.mlag_config
self.use_vrf = cfg.CONF.l3_arista.use_vrf
if self.mlag_configured:
host = cfg.CONF.l3_arista.secondary_l3_host
self._hosts.append(host)
self._servers.append(jsonrpclib.Server(self._eapi_host_url(host)))
self._additionalRouterCmdsDict = additional_cmds_for_mlag['router']
self._additionalInterfaceCmdsDict = (
additional_cmds_for_mlag['interface'])
if self.use_vrf:
self.routerDict = router_in_vrf['router']
self.interfaceDict = router_in_vrf['interface']
else:
self.routerDict = router_in_default_vrf['router']
self.interfaceDict = router_in_default_vrf['interface']
def _eapi_host_url(self, host):
user = cfg.CONF.l3_arista.primary_l3_host_username
pwd = cfg.CONF.l3_arista.primary_l3_host_password
eapi_server_url = ('https://%s:%s@%s/command-api' %
(user, pwd, host))
return eapi_server_url
def _validate_config(self):
if cfg.CONF.l3_arista.get('primary_l3_host') == '':
msg = _('Required option primary_l3_host is not set')
LOG.error(msg)
raise arista_exc.AristaSevicePluginConfigError(msg=msg)
if cfg.CONF.l3_arista.get('mlag_config'):
if cfg.CONF.l3_arista.get('use_vrf'):
#This is invalid/unsupported configuration
msg = _('VRFs are not supported MLAG config mode')
LOG.error(msg)
raise arista_exc.AristaSevicePluginConfigError(msg=msg)
if cfg.CONF.l3_arista.get('secondary_l3_host') == '':
msg = _('Required option secondary_l3_host is not set')
LOG.error(msg)
raise arista_exc.AristaSevicePluginConfigError(msg=msg)
if cfg.CONF.l3_arista.get('primary_l3_host_username') == '':
msg = _('Required option primary_l3_host_username is not set')
LOG.error(msg)
raise arista_exc.AristaSevicePluginConfigError(msg=msg)
def create_router_on_eos(self, router_name, rdm, server):
"""Creates a router on Arista HW Device.
:param router_name: globally unique identifier for router/VRF
:param rdm: A value generated by hashing router name
:param server: Server endpoint on the Arista switch to be configured
"""
cmds = []
rd = "%s:%s" % (rdm, rdm)
for c in self.routerDict['create']:
cmds.append(c.format(router_name, rd))
if self.mlag_configured:
mac = VIRTUAL_ROUTER_MAC
for c in self._additionalRouterCmdsDict['create']:
cmds.append(c.format(mac))
self._run_openstack_l3_cmds(cmds, server)
def delete_router_from_eos(self, router_name, server):
"""Deletes a router from Arista HW Device.
:param router_name: globally unique identifier for router/VRF
:param server: Server endpoint on the Arista switch to be configured
"""
cmds = []
for c in self.routerDict['delete']:
cmds.append(c.format(router_name))
if self.mlag_configured:
for c in self._additionalRouterCmdsDict['delete']:
cmds.append(c)
self._run_openstack_l3_cmds(cmds, server)
def _select_dicts(self, ipv):
if self.use_vrf:
self.interfaceDict = router_in_vrf['interface']
else:
if ipv == 6:
#for IPv6 use IPv6 commmands
self.interfaceDict = router_in_default_vrf_v6['interface']
self._additionalInterfaceCmdsDict = (
additional_cmds_for_mlag_v6['interface'])
else:
self.interfaceDict = router_in_default_vrf['interface']
self._additionalInterfaceCmdsDict = (
additional_cmds_for_mlag['interface'])
def add_interface_to_router(self, segment_id,
router_name, gip, router_ip, mask, server):
"""Adds an interface to existing HW router on Arista HW device.
:param segment_id: VLAN Id associated with interface that is added
:param router_name: globally unique identifier for router/VRF
:param gip: Gateway IP associated with the subnet
:param router_ip: IP address of the router
:param mask: subnet mask to be used
:param server: Server endpoint on the Arista switch to be configured
"""
if not segment_id:
segment_id = DEFAULT_VLAN
cmds = []
for c in self.interfaceDict['add']:
if self.mlag_configured:
ip = router_ip
else:
ip = gip + '/' + mask
cmds.append(c.format(segment_id, router_name, ip))
if self.mlag_configured:
for c in self._additionalInterfaceCmdsDict['add']:
cmds.append(c.format(gip))
self._run_openstack_l3_cmds(cmds, server)
def delete_interface_from_router(self, segment_id, router_name, server):
"""Deletes an interface from existing HW router on Arista HW device.
:param segment_id: VLAN Id associated with interface that is added
:param router_name: globally unique identifier for router/VRF
:param server: Server endpoint on the Arista switch to be configured
"""
if not segment_id:
segment_id = DEFAULT_VLAN
cmds = []
for c in self.interfaceDict['remove']:
cmds.append(c.format(segment_id))
self._run_openstack_l3_cmds(cmds, server)
def create_router(self, context, tenant_id, router):
"""Creates a router on Arista Switch.
Deals with multiple configurations - such as Router per VRF,
a router in default VRF, Virtual Router in MLAG configurations
"""
if router:
router_name = self._arista_router_name(tenant_id, router['name'])
rdm = str(int(hashlib.sha256(router_name).hexdigest(),
16) % 6553)
for s in self._servers:
self.create_router_on_eos(router_name, rdm, s)
def delete_router(self, context, tenant_id, router_id, router):
"""Deletes a router from Arista Switch."""
if router:
for s in self._servers:
self.delete_router_from_eos(self._arista_router_name(
tenant_id, router['name']), s)
def update_router(self, context, router_id, original_router, new_router):
"""Updates a router which is already created on Arista Switch.
TODO: (Sukhdev) - to be implemented in next release.
"""
pass
def add_router_interface(self, context, router_info):
"""Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations.
"""
if router_info:
self._select_dicts(router_info['ip_version'])
cidr = router_info['cidr']
subnet_mask = cidr.split('/')[1]
router_name = self._arista_router_name(router_info['tenant_id'],
router_info['name'])
if self.mlag_configured:
# For MLAG, we send a specific IP address as opposed to cidr
# For now, we are using x.x.x.253 and x.x.x.254 as virtual IP
for i, server in enumerate(self._servers):
#get appropriate virtual IP address for this router
router_ip = self._get_router_ip(cidr, i,
router_info['ip_version'])
self.add_interface_to_router(router_info['seg_id'],
router_name,
router_info['gip'],
router_ip, subnet_mask,
server)
else:
for s in self._servers:
self.add_interface_to_router(router_info['seg_id'],
router_name,
router_info['gip'],
None, subnet_mask, s)
def remove_router_interface(self, context, router_info):
"""Removes previously configured interface from router on Arista HW.
This deals with both IPv6 and IPv4 configurations.
"""
if router_info:
router_name = self._arista_router_name(router_info['tenant_id'],
router_info['name'])
for s in self._servers:
self.delete_interface_from_router(router_info['seg_id'],
router_name, s)
def _run_openstack_l3_cmds(self, commands, server):
"""Execute/sends a CAPI (Command API) command to EOS.
In this method, list of commands is appended with prefix and
postfix commands - to make is understandble by EOS.
:param commands : List of command to be executed on EOS.
:param server: Server endpoint on the Arista switch to be configured
"""
command_start = ['enable', 'configure']
command_end = ['exit']
full_command = command_start + commands + command_end
LOG.info(_('Executing command on Arista EOS: %s'), full_command)
try:
# this returns array of return values for every command in
# full_command list
ret = server.runCmds(version=1, cmds=full_command)
LOG.info(_('Results of execution on Arista EOS: %s'), ret)
except Exception:
msg = (_('Error occured while trying to execute '
'commands %(cmd)s on EOS %(host)s') %
{'cmd': full_command, 'host': server})
LOG.exception(msg)
raise arista_exc.AristaServicePluginRpcError(msg=msg)
def _arista_router_name(self, tenant_id, name):
# Use a unique name so that OpenStack created routers/SVIs
# can be distinguishged from the user created routers/SVIs
# on Arista HW.
return 'OS' + '-' + tenant_id + '-' + name
def _get_binary_from_ipv4(self, ip_addr):
return struct.unpack("!L", socket.inet_pton(socket.AF_INET,
ip_addr))[0]
def _get_binary_from_ipv6(self, ip_addr):
hi, lo = struct.unpack("!QQ", socket.inet_pton(socket.AF_INET6,
ip_addr))
return (hi << 64) | lo
def _get_ipv4_from_binary(self, bin_addr):
return socket.inet_ntop(socket.AF_INET, struct.pack("!L", bin_addr))
def _get_ipv6_from_binary(self, bin_addr):
hi = bin_addr >> 64
lo = bin_addr & 0xFFFFFFFF
return socket.inet_ntop(socket.AF_INET6, struct.pack("!QQ", hi, lo))
def _get_router_ip(self, cidr, ip_count, ip_ver):
""" For a given IP subnet and IP version type, generate IP for router.
This method takes the network address (cidr) and selects an
IP address that should be assigned to virtual router running
on multiple switches. It uses upper addresses in a subnet address
as IP for the router. Each instace of the router, on each switch,
requires uniqe IP address. For example in IPv4 case, on a 255
subnet, it will pick X.X.X.254 as first addess, X.X.X.253 for next,
and so on.
"""
start_ip = MLAG_SWITCHES + ip_count
network_addr, prefix = cidr.split('/')
if ip_ver == 4:
bits = IPV4_BITS
ip = self._get_binary_from_ipv4(network_addr)
elif ip_ver == 6:
bits = IPV6_BITS
ip = self._get_binary_from_ipv6(network_addr)
mask = (pow(2, bits) - 1) << (bits - int(prefix))
network_addr = ip & mask
router_ip = pow(2, bits - int(prefix)) - start_ip
router_ip = network_addr | router_ip
if ip_ver == 4:
return self._get_ipv4_from_binary(router_ip) + '/' + prefix
else:
return self._get_ipv6_from_binary(router_ip) + '/' + prefix
class NeutronNets(db_base_plugin_v2.NeutronDbPluginV2):
"""Access to Neutron DB.
Provides access to the Neutron Data bases for all provisioned
networks as well ports. This data is used during the synchronization
of DB between ML2 Mechanism Driver and Arista EOS
Names of the networks and ports are not stored in Arista repository
They are pulled from Neutron DB.
"""
def __init__(self):
self.admin_ctx = nctx.get_admin_context()
def get_all_networks_for_tenant(self, tenant_id):
filters = {'tenant_id': [tenant_id]}
return super(NeutronNets,
self).get_networks(self.admin_ctx, filters=filters) or []
def get_all_ports_for_tenant(self, tenant_id):
filters = {'tenant_id': [tenant_id]}
return super(NeutronNets,
self).get_ports(self.admin_ctx, filters=filters) or []
def _get_network(self, tenant_id, network_id):
filters = {'tenant_id': [tenant_id],
'id': [network_id]}
return super(NeutronNets,
self).get_networks(self.admin_ctx, filters=filters) or []
def get_subnet_info(self, subnet_id):
subnet = self.get_subnet(subnet_id)
return subnet
def get_subnet_ip_version(self, subnet_id):
subnet = self.get_subnet(subnet_id)
return subnet['ip_version']
def get_subnet_gateway_ip(self, subnet_id):
subnet = self.get_subnet(subnet_id)
return subnet['gateway_ip']
def get_subnet_cidr(self, subnet_id):
subnet = self.get_subnet(subnet_id)
return subnet['cidr']
def get_network_id(self, subnet_id):
subnet = self.get_subnet(subnet_id)
return subnet['network_id']
def get_network_id_from_port_id(self, port_id):
port = self.get_port(port_id)
return port['network_id']
def get_subnet(self, subnet_id):
return super(NeutronNets,
self).get_subnet(self.admin_ctx, subnet_id) or []
def get_port(self, port_id):
return super(NeutronNets,
self).get_port(self.admin_ctx, port_id) or []
| apache-2.0 |
fginter/twitter_pp | filter_tw.py | 1 | 1551 | #-*- coding: utf-8 -*-
import json
import sys
import re
import codecs
out8=codecs.getwriter("utf-8")(sys.stdout)
focus_h_tags=set((u"pakolaiset",u"refugeecrisis",u"syrianrefugees",u"syria",u"syyria",u"pakolaiskriisi",u"migrantcrisis",u"refugeeswelcome",u"migrants",u"réfugiés",u"syrie"))
def keep(tweet):
"""in goes one tweet parsed json
returns True/False do we want it or not"""
htags=set(h["text"].lower() for h in tweet["entities"]["hashtags"])
return htags&focus_h_tags
ws_re=re.compile(r"\s+")
def print_tweet(t):
l=t["lang"]
ul=t["user"]["lang"]
tstamp=t["created_at"]
geo=t["coordinates"]
ugeo=t["user"]["location"]
if not ugeo or not ugeo.strip():
ugeo=u"NA"
else:
ugeo=ugeo.strip()
place=t["place"]
if not geo:
geo=u"NA"
else:
geo=u"%f,%f"%tuple(geo["coordinates"])
if not place:
place=u"NA"
else:
place=place["full_name"]
htags=u",".join(sorted(set(h["text"].lower() for h in t["entities"]["hashtags"])))
txt=ws_re.sub(u" ",t["text"])
print >> out8, u"\t".join((l,tstamp,geo,place,ul,ugeo,htags,txt))
print >> out8, u"\t".join((u"Lang",u"Time",u"TweetGeo",u"TweetPlace",u"UserLang",u"UserPlace",u"HashTags",u"Text"))
with open("/dev/stdin","r") as f_in:
tweet_list=json.load(f_in)
print >> sys.stderr, "Tweets:", len(tweet_list)
counter=0
for t in tweet_list:
if not keep(t):
continue
print_tweet(t)
counter+=1
print >> sys.stderr, "Matched:", counter
| mit |
parlar/calls2xls | external/CrossMap/usr/lib64/python2.7/site-packages/bx/seq/seq_tests.py | 7 | 2063 | """
Tests for `bx.seq.seq`.
"""
import unittest
import os.path
import sys
import bx.seq, fasta_tests, nib_tests, qdna_tests
test_fa = "test_data/seq_tests/test.fa"
test2_fa = "test_data/seq_tests/test2.fa"
test_nib = "test_data/seq_tests/test.nib"
test_qdna = "test_data/seq_tests/test.qdna"
valid_fasta = fasta_tests.valid_seq
valid_nib = nib_tests.valid_seq
valid_qdna = qdna_tests.valid_seq
# Same sequences as stored in test2.fa
valid2_fa = [("apple", "GGCGCTGCGATAAGGTTGCGACAACACGGACCTTCTTTTGCCTACCTCTGTTCTTGGCACG"),
("orange", "CGTGCCGAGAACAGAAAATACGCCGGGCGGTGCAGTAGTATCTTGGTATCCGATATGCAGG"),
("grapefruit", "CCTGCATATCGACTAGTACACCCTCCCGAGGTACCCCACCCATCCCTCTTTTCTCGGCGCG")]
class SEQTestCase (unittest.TestCase):
def test_get_fasta (self):
fastafile = bx.seq.seq_file (file (test_fa, "rb"))
check_get (fastafile, valid_fasta, 3, 40)
def test_get_nib (self):
nibfile = bx.seq.seq_file (file (test_nib, "rb"))
check_get (nibfile, valid_nib, 3, 40)
def test_get_qdna (self):
qdnafile = bx.seq.seq_file (file (test_qdna, "rb"))
check_get (qdnafile, valid_qdna, 3, 40)
def test_get_reader (self):
reader = bx.seq.seq_reader (file (test2_fa, "rb"))
for (ix,seq) in enumerate(reader):
assert (ix < len(valid2_fa)), "FastaReader returns too many sequences"
text = "%s" % seq
fields = text.split()
assert (len(fields) == 2), "SeqReader.__str__ returns incorrect sequence string \"%s\" (%d)" % text
assert (fields[0] == valid2_fa[ix][0]), "FastaReader returned the wrong name (%s,%s)" % (fields[0],valid2_fa[ix][0])
assert (fields[1] == valid2_fa[ix][1]), "FastaReader returned the wrong text (%s,%s)" % (fields[1],valid2_fa[ix][1])
def check_get (seqfile, valid_seq, start, len):
assert seqfile.get (start, len) == valid_seq[start:start+len]
test_classes = [SEQTestCase]
suite = unittest.TestSuite ([unittest.makeSuite (c) for c in test_classes])
| mit |
boneyao/sentry | src/sentry/migrations/0174_auto__del_field_projectkey_user_added.py | 34 | 37246 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'ProjectKey.user_added'
db.delete_column(u'sentry_projectkey', 'user_added_id')
def backwards(self, orm):
# Adding field 'ProjectKey.user_added'
db.add_column(u'sentry_projectkey', 'user_added',
self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(related_name='keys_added_set', null=True, to=orm['sentry.User']),
keep_default=False)
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'audit_actors'", 'to': "orm['sentry.User']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'badge': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group', 'datetime'),)"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'storage': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'storage_options': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry'] | bsd-3-clause |
darron/dd-agent | checks.d/sqlserver.py | 22 | 17085 | '''
Check the performance counters from SQL Server
See http://blogs.msdn.com/b/psssql/archive/2013/09/23/interpreting-the-counter-values-from-sys-dm-os-performance-counters.aspx
for information on how to report the metrics available in the sys.dm_os_performance_counters table
'''
# stdlib
import traceback
# 3rd party
import adodbapi
# project
from checks import AgentCheck
ALL_INSTANCES = 'ALL'
VALID_METRIC_TYPES = ('gauge', 'rate', 'histogram')
# Constant for SQLServer cntr_type
PERF_LARGE_RAW_BASE = 1073939712
PERF_RAW_LARGE_FRACTION = 537003264
PERF_AVERAGE_BULK = 1073874176
PERF_COUNTER_BULK_COUNT = 272696576
PERF_COUNTER_LARGE_RAWCOUNT = 65792
# Queries
COUNTER_TYPE_QUERY = '''select distinct cntr_type
from sys.dm_os_performance_counters
where counter_name = ?;'''
BASE_NAME_QUERY = '''select distinct counter_name
from sys.dm_os_performance_counters
where (counter_name=? or counter_name=?
or counter_name=?) and cntr_type=%s;''' % PERF_LARGE_RAW_BASE
INSTANCES_QUERY = '''select instance_name
from sys.dm_os_performance_counters
where counter_name=? and instance_name!='_Total';'''
VALUE_AND_BASE_QUERY = '''select cntr_value
from sys.dm_os_performance_counters
where (counter_name=? or counter_name=?)
and instance_name=?
order by cntr_type;'''
class SQLConnectionError(Exception):
"""
Exception raised for SQL instance connection issues
"""
pass
class SQLServer(AgentCheck):
SOURCE_TYPE_NAME = 'sql server'
SERVICE_CHECK_NAME = 'sqlserver.can_connect'
# FIXME: 6.x, set default to 5s (like every check)
DEFAULT_COMMAND_TIMEOUT = 30
METRICS = [
('sqlserver.buffer.cache_hit_ratio', 'Buffer cache hit ratio', ''), # RAW_LARGE_FRACTION
('sqlserver.buffer.page_life_expectancy', 'Page life expectancy', ''), # LARGE_RAWCOUNT
('sqlserver.stats.batch_requests', 'Batch Requests/sec', ''), # BULK_COUNT
('sqlserver.stats.sql_compilations', 'SQL Compilations/sec', ''), # BULK_COUNT
('sqlserver.stats.sql_recompilations', 'SQL Re-Compilations/sec', ''), # BULK_COUNT
('sqlserver.stats.connections', 'User Connections', ''), # LARGE_RAWCOUNT
('sqlserver.stats.lock_waits', 'Lock Waits/sec', '_Total'), # BULK_COUNT
('sqlserver.access.page_splits', 'Page Splits/sec', ''), # BULK_COUNT
('sqlserver.stats.procs_blocked', 'Processes blocked', ''), # LARGE_RAWCOUNT
('sqlserver.buffer.checkpoint_pages', 'Checkpoint pages/sec', '') # BULK_COUNT
]
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
# Cache connections
self.connections = {}
self.failed_connections = {}
self.instances_metrics = {}
# Pre-process the list of metrics to collect
custom_metrics = init_config.get('custom_metrics', [])
for instance in instances:
try:
self._make_metric_list_to_collect(instance, custom_metrics)
except SQLConnectionError:
self.log.exception("Skipping SQL Server instance")
continue
def _make_metric_list_to_collect(self, instance, custom_metrics):
"""
Store the list of metrics to collect by instance_key.
Will also create and cache cursors to query the db.
"""
metrics_to_collect = []
for name, counter_name, instance_name in self.METRICS:
try:
sql_type, base_name = self.get_sql_type(instance, counter_name)
metrics_to_collect.append(self.typed_metric(name,
counter_name,
base_name,
None,
sql_type,
instance_name,
None))
except SQLConnectionError:
raise
except Exception:
self.log.warning("Can't load the metric %s, ignoring", name, exc_info=True)
continue
# Load any custom metrics from conf.d/sqlserver.yaml
for row in custom_metrics:
user_type = row.get('type')
if user_type is not None and user_type not in VALID_METRIC_TYPES:
self.log.error('%s has an invalid metric type: %s', row['name'], user_type)
sql_type = None
try:
if user_type is None:
sql_type, base_name = self.get_sql_type(instance, row['counter_name'])
except Exception:
self.log.warning("Can't load the metric %s, ignoring", row['name'], exc_info=True)
continue
metrics_to_collect.append(self.typed_metric(row['name'],
row['counter_name'],
base_name,
user_type,
sql_type,
row.get('instance_name', ''),
row.get('tag_by', None)))
instance_key = self._conn_key(instance)
self.instances_metrics[instance_key] = metrics_to_collect
def typed_metric(self, dd_name, sql_name, base_name, user_type, sql_type, instance_name, tag_by):
'''
Create the appropriate SqlServerMetric object, each implementing its method to
fetch the metrics properly.
If a `type` was specified in the config, it is used to report the value
directly fetched from SQLServer. Otherwise, it is decided based on the
sql_type, according to microsoft's documentation.
'''
metric_type_mapping = {
PERF_COUNTER_BULK_COUNT: (self.rate, SqlSimpleMetric),
PERF_COUNTER_LARGE_RAWCOUNT: (self.gauge, SqlSimpleMetric),
PERF_LARGE_RAW_BASE: (self.gauge, SqlSimpleMetric),
PERF_RAW_LARGE_FRACTION: (self.gauge, SqlFractionMetric),
PERF_AVERAGE_BULK: (self.gauge, SqlIncrFractionMetric)
}
if user_type is not None:
# user type overrides any other value
metric_type = getattr(self, user_type)
cls = SqlSimpleMetric
else:
metric_type, cls = metric_type_mapping[sql_type]
return cls(dd_name, sql_name, base_name,
metric_type, instance_name, tag_by, self.log)
def _get_access_info(self, instance):
''' Convenience method to extract info from instance
'''
host = instance.get('host', '127.0.0.1,1433')
username = instance.get('username')
password = instance.get('password')
database = instance.get('database', 'master')
return host, username, password, database
def _conn_key(self, instance):
''' Return a key to use for the connection cache
'''
host, username, password, database = self._get_access_info(instance)
return '%s:%s:%s:%s' % (host, username, password, database)
def _conn_string(self, instance):
''' Return a connection string to use with adodbapi
'''
host, username, password, database = self._get_access_info(instance)
conn_str = 'Provider=SQLOLEDB;Data Source=%s;Initial Catalog=%s;' \
% (host, database)
if username:
conn_str += 'User ID=%s;' % (username)
if password:
conn_str += 'Password=%s;' % (password)
if not username and not password:
conn_str += 'Integrated Security=SSPI;'
return conn_str
def get_cursor(self, instance, cache_failure=False):
'''
Return a cursor to execute query against the db
Cursor are cached in the self.connections dict
'''
conn_key = self._conn_key(instance)
host = instance.get('host')
database = instance.get('database')
service_check_tags = [
'host:%s' % host,
'db:%s' % database
]
if conn_key in self.failed_connections:
raise self.failed_connections[conn_key]
if conn_key not in self.connections:
try:
conn = adodbapi.connect(
self._conn_string(instance),
timeout=int(instance.get('command_timeout',
self.DEFAULT_COMMAND_TIMEOUT))
)
self.connections[conn_key] = conn
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=service_check_tags)
except Exception:
cx = "%s - %s" % (host, database)
message = "Unable to connect to SQL Server for instance %s." % cx
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=message)
password = instance.get('password')
tracebk = traceback.format_exc()
if password is not None:
tracebk = tracebk.replace(password, "*" * 6)
# Avoid multiple connection timeouts (too slow):
# save the exception, re-raise it when needed
cxn_failure_exp = SQLConnectionError("%s \n %s" % (message, tracebk))
if cache_failure:
self.failed_connections[conn_key] = cxn_failure_exp
raise cxn_failure_exp
conn = self.connections[conn_key]
cursor = conn.cursor()
return cursor
def get_sql_type(self, instance, counter_name):
'''
Return the type of the performance counter so that we can report it to
Datadog correctly
If the sql_type is one that needs a base (PERF_RAW_LARGE_FRACTION and
PERF_AVERAGE_BULK), the name of the base counter will also be returned
'''
cursor = self.get_cursor(instance, cache_failure=True)
cursor.execute(COUNTER_TYPE_QUERY, (counter_name,))
(sql_type,) = cursor.fetchone()
if sql_type == PERF_LARGE_RAW_BASE:
self.log.warning("Metric %s is of type Base and shouldn't be reported this way",
counter_name)
base_name = None
if sql_type in [PERF_AVERAGE_BULK, PERF_RAW_LARGE_FRACTION]:
# This is an ugly hack. For certains type of metric (PERF_RAW_LARGE_FRACTION
# and PERF_AVERAGE_BULK), we need two metrics: the metrics specified and
# a base metrics to get the ratio. There is no unique schema so we generate
# the possible candidates and we look at which ones exist in the db.
candidates = (counter_name + " base",
counter_name.replace("(ms)", "base"),
counter_name.replace("Avg ", "") + " base"
)
try:
cursor.execute(BASE_NAME_QUERY, candidates)
base_name = cursor.fetchone().counter_name.strip()
self.log.debug("Got base metric: %s for metric: %s", base_name, counter_name)
except Exception, e:
self.log.warning("Could not get counter_name of base for metric: %s", e)
self.close_cursor(cursor)
return sql_type, base_name
def check(self, instance):
"""
Fetch the metrics from the sys.dm_os_performance_counters table
"""
cursor = self.get_cursor(instance)
custom_tags = instance.get('tags', [])
instance_key = self._conn_key(instance)
metrics_to_collect = self.instances_metrics[instance_key]
for metric in metrics_to_collect:
try:
metric.fetch_metric(cursor, custom_tags)
except Exception, e:
self.log.warning("Could not fetch metric %s: %s" % (metric.datadog_name, e))
self.close_cursor(cursor)
def close_cursor(self, cursor):
"""
We close the cursor explicitly b/c we had proven memory leaks
We handle any exception from closing, although according to the doc:
"in adodbapi, it is NOT an error to re-close a closed cursor"
"""
try:
cursor.close()
except Exception as e:
self.log.warning("Could not close adodbapi cursor\n{0}".format(e))
class SqlServerMetric(object):
'''General class for common methods, should never be instantiated directly
'''
def __init__(self, datadog_name, sql_name, base_name,
report_function, instance, tag_by, logger):
self.datadog_name = datadog_name
self.sql_name = sql_name
self.base_name = base_name
self.report_function = report_function
self.instance = instance
self.tag_by = tag_by
self.instances = None
self.past_values = {}
self.log = logger
def fetch_metrics(self, cursor, tags):
raise NotImplementedError
class SqlSimpleMetric(SqlServerMetric):
def fetch_metric(self, cursor, tags):
query_base = '''
select instance_name, cntr_value
from sys.dm_os_performance_counters
where counter_name = ?
'''
if self.instance == ALL_INSTANCES:
query = query_base + "and instance_name!= '_Total'"
query_content = (self.sql_name,)
else:
query = query_base + "and instance_name=?"
query_content = (self.sql_name, self.instance)
cursor.execute(query, query_content)
rows = cursor.fetchall()
for instance_name, cntr_value in rows:
metric_tags = tags
if self.instance == ALL_INSTANCES:
metric_tags = metric_tags + ['%s:%s' % (self.tag_by, instance_name.strip())]
self.report_function(self.datadog_name, cntr_value,
tags=metric_tags)
class SqlFractionMetric(SqlServerMetric):
def set_instances(self, cursor):
if self.instance == ALL_INSTANCES:
cursor.execute(INSTANCES_QUERY, (self.sql_name,))
self.instances = [row.instance_name for row in cursor.fetchall()]
else:
self.instances = [self.instance]
def fetch_metric(self, cursor, tags):
'''
Because we need to query the metrics by matching pairs, we can't query
all of them together without having to perform some matching based on
the name afterwards so instead we query instance by instance.
We cache the list of instance so that we don't have to look it up every time
'''
if self.instances is None:
self.set_instances(cursor)
for instance in self.instances:
cursor.execute(VALUE_AND_BASE_QUERY, (self.sql_name, self.base_name, instance))
rows = cursor.fetchall()
if len(rows) != 2:
self.log.warning("Missing counter to compute fraction for "
"metric %s instance %s, skipping", self.sql_name, instance)
continue
value = rows[0, "cntr_value"]
base = rows[1, "cntr_value"]
metric_tags = tags
if self.instance == ALL_INSTANCES:
metric_tags = metric_tags + ['%s:%s' % (self.tag_by, instance.strip())]
self.report_fraction(value, base, metric_tags)
def report_fraction(self, value, base, metric_tags):
try:
result = value / float(base)
self.report_function(self.datadog_name, result, tags=metric_tags)
except ZeroDivisionError:
self.log.debug("Base value is 0, won't report metric %s for tags %s",
self.datadog_name, metric_tags)
class SqlIncrFractionMetric(SqlFractionMetric):
def report_fraction(self, value, base, metric_tags):
key = "key:" + "".join(metric_tags)
if key in self.past_values:
old_value, old_base = self.past_values[key]
diff_value = value - old_value
diff_base = base - old_base
try:
result = diff_value / float(diff_base)
self.report_function(self.datadog_name, result, tags=metric_tags)
except ZeroDivisionError:
self.log.debug("Base value is 0, won't report metric %s for tags %s",
self.datadog_name, metric_tags)
self.past_values[key] = (value, base)
| bsd-3-clause |
jking6884/RESTapi | app/users/views.py | 2 | 4603 | from flask import Blueprint, request, jsonify, make_response
from app.users.models import Users, UsersSchema
from flask_restful import Api
from app.baseviews import Resource
from app.basemodels import db
from sqlalchemy.exc import SQLAlchemyError
from marshmallow import ValidationError
from werkzeug.security import generate_password_hash
users = Blueprint('users', __name__)
# http://marshmallow.readthedocs.org/en/latest/quickstart.html#declaring-schemas
# https://github.com/marshmallow-code/marshmallow-jsonapi
schema = UsersSchema(strict=True)
api = Api(users)
# Users
class CreateListUsers(Resource):
"""http://jsonapi.org/format/#fetching
A server MUST respond to a successful request to fetch an individual resource or resource collection with a 200 OK response.
A server MUST respond with 404 Not Found when processing a request to fetch a single resource that does not exist, except when the request warrants a 200 OK response with null as the primary data (as described above)
a self link as part of the top-level links object"""
def get(self):
users_query = Users.query.all()
results = schema.dump(users_query, many=True).data
return results
"""http://jsonapi.org/format/#crud
A resource can be created by sending a POST request to a URL that represents a collection of users. The request MUST include a single resource object as primary data. The resource object MUST contain at least a type member.
If a POST request did not include a Client-Generated ID and the requested resource has been created successfully, the server MUST return a 201 Created status code"""
def post(self):
raw_dict = request.get_json(force=True)
try:
schema.validate(raw_dict)
request_dict = raw_dict['data']['attributes']
user = Users(request_dict['email'], generate_password_hash(request_dict['password']), request_dict['name'], request_dict[
'active'], request_dict['role'],)
user.add(user)
# Should not return password hash
query = Users.query.get(user.id)
results = schema.dump(query).data
return results, 201
except ValidationError as err:
resp = jsonify({"error": err.messages})
resp.status_code = 403
return resp
except SQLAlchemyError as e:
db.session.rollback()
resp = jsonify({"error": str(e)})
resp.status_code = 403
return resp
class GetUpdateDeleteUser(Resource):
"""http://jsonapi.org/format/#fetching
A server MUST respond to a successful request to fetch an individual resource or resource collection with a 200 OK response.
A server MUST respond with 404 Not Found when processing a request to fetch a single resource that does not exist, except when the request warrants a 200 OK response with null as the primary data (as described above)
a self link as part of the top-level links object"""
def get(self, id):
user_query = Users.query.get_or_404(id)
result = schema.dump(user_query).data
return result
"""http://jsonapi.org/format/#crud-updating"""
def patch(self, id):
user = Users.query.get_or_404(id)
raw_dict = request.get_json(force=True)
try:
schema.validate(raw_dict)
request_dict = raw_dict['data']['attributes']
for key, value in request_dict.items():
setattr(user, key, value)
user.update()
return self.get(id)
except ValidationError as err:
resp = jsonify({"error": err.messages})
resp.status_code = 401
return resp
except SQLAlchemyError as e:
db.session.rollback()
resp = jsonify({"error": str(e)})
resp.status_code = 401
return resp
# http://jsonapi.org/format/#crud-deleting
# A server MUST return a 204 No Content status code if a deletion request
# is successful and no content is returned.
def delete(self, id):
user = Users.query.get_or_404(id)
try:
delete = user.delete(user)
response = make_response()
response.status_code = 204
return response
except SQLAlchemyError as e:
db.session.rollback()
resp = jsonify({"error": str(e)})
resp.status_code = 401
return resp
api.add_resource(CreateListUsers, '.json')
api.add_resource(GetUpdateDeleteUser, '/<int:id>.json')
| mit |
mustafat/odoo-1 | openerp/addons/base/tests/test_basecase.py | 379 | 3895 | # -*- coding: utf-8 -*-
import unittest2
from openerp.tests import common
class test_single_transaction_case(common.SingleTransactionCase):
"""
Check the whole-class transaction behavior of SingleTransactionCase.
"""
def test_00(self):
"""Create a partner."""
cr, uid = self.cr, self.uid
self.registry('res.partner').create(cr, uid, {'name': 'test_per_class_teardown_partner'})
ids = self.registry('res.partner').search(cr, uid, [('name', '=', 'test_per_class_teardown_partner')])
self.assertEqual(1, len(ids), "Test partner not found.")
def test_01(self):
"""Find the created partner."""
cr, uid = self.cr, self.uid
ids = self.registry('res.partner').search(cr, uid, [('name', '=', 'test_per_class_teardown_partner')])
self.assertEqual(1, len(ids), "Test partner not found.")
def test_20a(self):
""" Create a partner with a XML ID """
cr, uid = self.cr, self.uid
res_partner = self.registry('res.partner')
ir_model_data = self.registry('ir.model.data')
pid, _ = res_partner.name_create(cr, uid, 'Mr Blue')
ir_model_data.create(cr, uid, {'name': 'test_partner_blue',
'module': 'base',
'model': 'res.partner',
'res_id': pid})
def test_20b(self):
""" Resolve xml id with ref() and browse_ref() """
cr, uid = self.cr, self.uid
res_partner = self.registry('res.partner')
xid = 'base.test_partner_blue'
p_ref = self.ref(xid)
self.assertTrue(p_ref, "ref() should resolve xid to database ID")
partner = res_partner.browse(cr, uid, p_ref)
p_browse_ref = self.browse_ref(xid)
self.assertEqual(partner, p_browse_ref, "browse_ref() should resolve xid to browse records")
class test_transaction_case(common.TransactionCase):
"""
Check the per-method transaction behavior of TransactionCase.
"""
def test_00(self):
"""Create a partner."""
cr, uid = self.cr, self.uid
ids = self.registry('res.partner').search(cr, uid, [('name', '=', 'test_per_class_teardown_partner')])
self.assertEqual(0, len(ids), "Test partner found.")
self.registry('res.partner').create(cr, uid, {'name': 'test_per_class_teardown_partner'})
ids = self.registry('res.partner').search(cr, uid, [('name', '=', 'test_per_class_teardown_partner')])
self.assertEqual(1, len(ids), "Test partner not found.")
def test_01(self):
"""Don't find the created partner."""
cr, uid = self.cr, self.uid
ids = self.registry('res.partner').search(cr, uid, [('name', '=', 'test_per_class_teardown_partner')])
self.assertEqual(0, len(ids), "Test partner found.")
def test_20a(self):
""" Create a partner with a XML ID then resolve xml id with ref() and browse_ref() """
cr, uid = self.cr, self.uid
res_partner = self.registry('res.partner')
ir_model_data = self.registry('ir.model.data')
pid, _ = res_partner.name_create(cr, uid, 'Mr Yellow')
ir_model_data.create(cr, uid, {'name': 'test_partner_yellow',
'module': 'base',
'model': 'res.partner',
'res_id': pid})
xid = 'base.test_partner_yellow'
p_ref = self.ref(xid)
self.assertEquals(p_ref, pid, "ref() should resolve xid to database ID")
partner = res_partner.browse(cr, uid, pid)
p_browse_ref = self.browse_ref(xid)
self.assertEqual(partner, p_browse_ref, "browse_ref() should resolve xid to browse records")
if __name__ == '__main__':
unittest2.main()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
haxoza/django | tests/serializers/models/base.py | 112 | 4314 | """
Serialization
``django.core.serializers`` provides interfaces to converting Django
``QuerySet`` objects to and from "flat" data (i.e. strings).
"""
from __future__ import unicode_literals
from decimal import Decimal
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class CategoryMetaDataManager(models.Manager):
def get_by_natural_key(self, kind, name):
return self.get(kind=kind, name=name)
@python_2_unicode_compatible
class CategoryMetaData(models.Model):
kind = models.CharField(max_length=10)
name = models.CharField(max_length=10)
value = models.CharField(max_length=10)
objects = CategoryMetaDataManager()
class Meta:
unique_together = (('kind', 'name'),)
def __str__(self):
return '[%s:%s]=%s' % (self.kind, self.name, self.value)
def natural_key(self):
return (self.kind, self.name)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
meta_data = models.ForeignKey(CategoryMetaData, models.SET_NULL, null=True, default=None)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
headline = models.CharField(max_length=50)
pub_date = models.DateTimeField()
categories = models.ManyToManyField(Category)
meta_data = models.ManyToManyField(CategoryMetaData)
class Meta:
ordering = ('pub_date',)
def __str__(self):
return self.headline
@python_2_unicode_compatible
class AuthorProfile(models.Model):
author = models.OneToOneField(Author, models.CASCADE, primary_key=True)
date_of_birth = models.DateField()
def __str__(self):
return "Profile of %s" % self.author
@python_2_unicode_compatible
class Actor(models.Model):
name = models.CharField(max_length=20, primary_key=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Movie(models.Model):
actor = models.ForeignKey(Actor, models.CASCADE)
title = models.CharField(max_length=50)
price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal('0.00'))
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
class Score(models.Model):
score = models.FloatField()
@python_2_unicode_compatible
class Team(object):
def __init__(self, title):
self.title = title
def __str__(self):
raise NotImplementedError("Not so simple")
def to_string(self):
return "%s" % self.title
class TeamField(models.CharField):
def __init__(self):
super(TeamField, self).__init__(max_length=100)
def get_db_prep_save(self, value, connection):
return six.text_type(value.title)
def to_python(self, value):
if isinstance(value, Team):
return value
return Team(value)
def from_db_value(self, value, expression, connection, context):
return Team(value)
def value_to_string(self, obj):
return self.value_from_object(obj).to_string()
def deconstruct(self):
name, path, args, kwargs = super(TeamField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
@python_2_unicode_compatible
class Player(models.Model):
name = models.CharField(max_length=50)
rank = models.IntegerField()
team = TeamField()
def __str__(self):
return '%s (%d) playing for %s' % (self.name, self.rank, self.team.to_string())
class BaseModel(models.Model):
parent_data = models.IntegerField()
class ProxyBaseModel(BaseModel):
class Meta:
proxy = True
class ProxyProxyBaseModel(ProxyBaseModel):
class Meta:
proxy = True
class ComplexModel(models.Model):
field1 = models.CharField(max_length=10)
field2 = models.CharField(max_length=10)
field3 = models.CharField(max_length=10)
| bsd-3-clause |
izonder/intellij-community | plugins/hg4idea/testData/bin/mercurial/cmdutil.py | 90 | 77434 | # cmdutil.py - help for command processing in mercurial
#
# Copyright 2005-2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import hex, nullid, nullrev, short
from i18n import _
import os, sys, errno, re, tempfile
import util, scmutil, templater, patch, error, templatekw, revlog, copies
import match as matchmod
import subrepo, context, repair, graphmod, revset, phases, obsolete
import changelog
import bookmarks
import lock as lockmod
def parsealiases(cmd):
return cmd.lstrip("^").split("|")
def findpossible(cmd, table, strict=False):
"""
Return cmd -> (aliases, command table entry)
for each matching command.
Return debug commands (or their aliases) only if no normal command matches.
"""
choice = {}
debugchoice = {}
if cmd in table:
# short-circuit exact matches, "log" alias beats "^log|history"
keys = [cmd]
else:
keys = table.keys()
for e in keys:
aliases = parsealiases(e)
found = None
if cmd in aliases:
found = cmd
elif not strict:
for a in aliases:
if a.startswith(cmd):
found = a
break
if found is not None:
if aliases[0].startswith("debug") or found.startswith("debug"):
debugchoice[found] = (aliases, table[e])
else:
choice[found] = (aliases, table[e])
if not choice and debugchoice:
choice = debugchoice
return choice
def findcmd(cmd, table, strict=True):
"""Return (aliases, command table entry) for command string."""
choice = findpossible(cmd, table, strict)
if cmd in choice:
return choice[cmd]
if len(choice) > 1:
clist = choice.keys()
clist.sort()
raise error.AmbiguousCommand(cmd, clist)
if choice:
return choice.values()[0]
raise error.UnknownCommand(cmd)
def findrepo(p):
while not os.path.isdir(os.path.join(p, ".hg")):
oldp, p = p, os.path.dirname(p)
if p == oldp:
return None
return p
def bailifchanged(repo):
if repo.dirstate.p2() != nullid:
raise util.Abort(_('outstanding uncommitted merge'))
modified, added, removed, deleted = repo.status()[:4]
if modified or added or removed or deleted:
raise util.Abort(_("outstanding uncommitted changes"))
ctx = repo[None]
for s in sorted(ctx.substate):
if ctx.sub(s).dirty():
raise util.Abort(_("uncommitted changes in subrepo %s") % s)
def logmessage(ui, opts):
""" get the log message according to -m and -l option """
message = opts.get('message')
logfile = opts.get('logfile')
if message and logfile:
raise util.Abort(_('options --message and --logfile are mutually '
'exclusive'))
if not message and logfile:
try:
if logfile == '-':
message = ui.fin.read()
else:
message = '\n'.join(util.readfile(logfile).splitlines())
except IOError, inst:
raise util.Abort(_("can't read commit message '%s': %s") %
(logfile, inst.strerror))
return message
def loglimit(opts):
"""get the log limit according to option -l/--limit"""
limit = opts.get('limit')
if limit:
try:
limit = int(limit)
except ValueError:
raise util.Abort(_('limit must be a positive integer'))
if limit <= 0:
raise util.Abort(_('limit must be positive'))
else:
limit = None
return limit
def makefilename(repo, pat, node, desc=None,
total=None, seqno=None, revwidth=None, pathname=None):
node_expander = {
'H': lambda: hex(node),
'R': lambda: str(repo.changelog.rev(node)),
'h': lambda: short(node),
'm': lambda: re.sub('[^\w]', '_', str(desc))
}
expander = {
'%': lambda: '%',
'b': lambda: os.path.basename(repo.root),
}
try:
if node:
expander.update(node_expander)
if node:
expander['r'] = (lambda:
str(repo.changelog.rev(node)).zfill(revwidth or 0))
if total is not None:
expander['N'] = lambda: str(total)
if seqno is not None:
expander['n'] = lambda: str(seqno)
if total is not None and seqno is not None:
expander['n'] = lambda: str(seqno).zfill(len(str(total)))
if pathname is not None:
expander['s'] = lambda: os.path.basename(pathname)
expander['d'] = lambda: os.path.dirname(pathname) or '.'
expander['p'] = lambda: pathname
newname = []
patlen = len(pat)
i = 0
while i < patlen:
c = pat[i]
if c == '%':
i += 1
c = pat[i]
c = expander[c]()
newname.append(c)
i += 1
return ''.join(newname)
except KeyError, inst:
raise util.Abort(_("invalid format spec '%%%s' in output filename") %
inst.args[0])
def makefileobj(repo, pat, node=None, desc=None, total=None,
seqno=None, revwidth=None, mode='wb', modemap={},
pathname=None):
writable = mode not in ('r', 'rb')
if not pat or pat == '-':
fp = writable and repo.ui.fout or repo.ui.fin
if util.safehasattr(fp, 'fileno'):
return os.fdopen(os.dup(fp.fileno()), mode)
else:
# if this fp can't be duped properly, return
# a dummy object that can be closed
class wrappedfileobj(object):
noop = lambda x: None
def __init__(self, f):
self.f = f
def __getattr__(self, attr):
if attr == 'close':
return self.noop
else:
return getattr(self.f, attr)
return wrappedfileobj(fp)
if util.safehasattr(pat, 'write') and writable:
return pat
if util.safehasattr(pat, 'read') and 'r' in mode:
return pat
fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
mode = modemap.get(fn, mode)
if mode == 'wb':
modemap[fn] = 'ab'
return open(fn, mode)
def openrevlog(repo, cmd, file_, opts):
"""opens the changelog, manifest, a filelog or a given revlog"""
cl = opts['changelog']
mf = opts['manifest']
msg = None
if cl and mf:
msg = _('cannot specify --changelog and --manifest at the same time')
elif cl or mf:
if file_:
msg = _('cannot specify filename with --changelog or --manifest')
elif not repo:
msg = _('cannot specify --changelog or --manifest '
'without a repository')
if msg:
raise util.Abort(msg)
r = None
if repo:
if cl:
r = repo.changelog
elif mf:
r = repo.manifest
elif file_:
filelog = repo.file(file_)
if len(filelog):
r = filelog
if not r:
if not file_:
raise error.CommandError(cmd, _('invalid arguments'))
if not os.path.isfile(file_):
raise util.Abort(_("revlog '%s' not found") % file_)
r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
file_[:-2] + ".i")
return r
def copy(ui, repo, pats, opts, rename=False):
# called with the repo lock held
#
# hgsep => pathname that uses "/" to separate directories
# ossep => pathname that uses os.sep to separate directories
cwd = repo.getcwd()
targets = {}
after = opts.get("after")
dryrun = opts.get("dry_run")
wctx = repo[None]
def walkpat(pat):
srcs = []
badstates = after and '?' or '?r'
m = scmutil.match(repo[None], [pat], opts, globbed=True)
for abs in repo.walk(m):
state = repo.dirstate[abs]
rel = m.rel(abs)
exact = m.exact(abs)
if state in badstates:
if exact and state == '?':
ui.warn(_('%s: not copying - file is not managed\n') % rel)
if exact and state == 'r':
ui.warn(_('%s: not copying - file has been marked for'
' remove\n') % rel)
continue
# abs: hgsep
# rel: ossep
srcs.append((abs, rel, exact))
return srcs
# abssrc: hgsep
# relsrc: ossep
# otarget: ossep
def copyfile(abssrc, relsrc, otarget, exact):
abstarget = scmutil.canonpath(repo.root, cwd, otarget)
if '/' in abstarget:
# We cannot normalize abstarget itself, this would prevent
# case only renames, like a => A.
abspath, absname = abstarget.rsplit('/', 1)
abstarget = repo.dirstate.normalize(abspath) + '/' + absname
reltarget = repo.pathto(abstarget, cwd)
target = repo.wjoin(abstarget)
src = repo.wjoin(abssrc)
state = repo.dirstate[abstarget]
scmutil.checkportable(ui, abstarget)
# check for collisions
prevsrc = targets.get(abstarget)
if prevsrc is not None:
ui.warn(_('%s: not overwriting - %s collides with %s\n') %
(reltarget, repo.pathto(abssrc, cwd),
repo.pathto(prevsrc, cwd)))
return
# check for overwrites
exists = os.path.lexists(target)
samefile = False
if exists and abssrc != abstarget:
if (repo.dirstate.normalize(abssrc) ==
repo.dirstate.normalize(abstarget)):
if not rename:
ui.warn(_("%s: can't copy - same file\n") % reltarget)
return
exists = False
samefile = True
if not after and exists or after and state in 'mn':
if not opts['force']:
ui.warn(_('%s: not overwriting - file exists\n') %
reltarget)
return
if after:
if not exists:
if rename:
ui.warn(_('%s: not recording move - %s does not exist\n') %
(relsrc, reltarget))
else:
ui.warn(_('%s: not recording copy - %s does not exist\n') %
(relsrc, reltarget))
return
elif not dryrun:
try:
if exists:
os.unlink(target)
targetdir = os.path.dirname(target) or '.'
if not os.path.isdir(targetdir):
os.makedirs(targetdir)
if samefile:
tmp = target + "~hgrename"
os.rename(src, tmp)
os.rename(tmp, target)
else:
util.copyfile(src, target)
srcexists = True
except IOError, inst:
if inst.errno == errno.ENOENT:
ui.warn(_('%s: deleted in working copy\n') % relsrc)
srcexists = False
else:
ui.warn(_('%s: cannot copy - %s\n') %
(relsrc, inst.strerror))
return True # report a failure
if ui.verbose or not exact:
if rename:
ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
else:
ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
targets[abstarget] = abssrc
# fix up dirstate
scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
dryrun=dryrun, cwd=cwd)
if rename and not dryrun:
if not after and srcexists and not samefile:
util.unlinkpath(repo.wjoin(abssrc))
wctx.forget([abssrc])
# pat: ossep
# dest ossep
# srcs: list of (hgsep, hgsep, ossep, bool)
# return: function that takes hgsep and returns ossep
def targetpathfn(pat, dest, srcs):
if os.path.isdir(pat):
abspfx = scmutil.canonpath(repo.root, cwd, pat)
abspfx = util.localpath(abspfx)
if destdirexists:
striplen = len(os.path.split(abspfx)[0])
else:
striplen = len(abspfx)
if striplen:
striplen += len(os.sep)
res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
elif destdirexists:
res = lambda p: os.path.join(dest,
os.path.basename(util.localpath(p)))
else:
res = lambda p: dest
return res
# pat: ossep
# dest ossep
# srcs: list of (hgsep, hgsep, ossep, bool)
# return: function that takes hgsep and returns ossep
def targetpathafterfn(pat, dest, srcs):
if matchmod.patkind(pat):
# a mercurial pattern
res = lambda p: os.path.join(dest,
os.path.basename(util.localpath(p)))
else:
abspfx = scmutil.canonpath(repo.root, cwd, pat)
if len(abspfx) < len(srcs[0][0]):
# A directory. Either the target path contains the last
# component of the source path or it does not.
def evalpath(striplen):
score = 0
for s in srcs:
t = os.path.join(dest, util.localpath(s[0])[striplen:])
if os.path.lexists(t):
score += 1
return score
abspfx = util.localpath(abspfx)
striplen = len(abspfx)
if striplen:
striplen += len(os.sep)
if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
score = evalpath(striplen)
striplen1 = len(os.path.split(abspfx)[0])
if striplen1:
striplen1 += len(os.sep)
if evalpath(striplen1) > score:
striplen = striplen1
res = lambda p: os.path.join(dest,
util.localpath(p)[striplen:])
else:
# a file
if destdirexists:
res = lambda p: os.path.join(dest,
os.path.basename(util.localpath(p)))
else:
res = lambda p: dest
return res
pats = scmutil.expandpats(pats)
if not pats:
raise util.Abort(_('no source or destination specified'))
if len(pats) == 1:
raise util.Abort(_('no destination specified'))
dest = pats.pop()
destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
if not destdirexists:
if len(pats) > 1 or matchmod.patkind(pats[0]):
raise util.Abort(_('with multiple sources, destination must be an '
'existing directory'))
if util.endswithsep(dest):
raise util.Abort(_('destination %s is not a directory') % dest)
tfn = targetpathfn
if after:
tfn = targetpathafterfn
copylist = []
for pat in pats:
srcs = walkpat(pat)
if not srcs:
continue
copylist.append((tfn(pat, dest, srcs), srcs))
if not copylist:
raise util.Abort(_('no files to copy'))
errors = 0
for targetpath, srcs in copylist:
for abssrc, relsrc, exact in srcs:
if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
errors += 1
if errors:
ui.warn(_('(consider using --after)\n'))
return errors != 0
def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
runargs=None, appendpid=False):
'''Run a command as a service.'''
if opts['daemon'] and not opts['daemon_pipefds']:
# Signal child process startup with file removal
lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
os.close(lockfd)
try:
if not runargs:
runargs = util.hgcmd() + sys.argv[1:]
runargs.append('--daemon-pipefds=%s' % lockpath)
# Don't pass --cwd to the child process, because we've already
# changed directory.
for i in xrange(1, len(runargs)):
if runargs[i].startswith('--cwd='):
del runargs[i]
break
elif runargs[i].startswith('--cwd'):
del runargs[i:i + 2]
break
def condfn():
return not os.path.exists(lockpath)
pid = util.rundetached(runargs, condfn)
if pid < 0:
raise util.Abort(_('child process failed to start'))
finally:
try:
os.unlink(lockpath)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if parentfn:
return parentfn(pid)
else:
return
if initfn:
initfn()
if opts['pid_file']:
mode = appendpid and 'a' or 'w'
fp = open(opts['pid_file'], mode)
fp.write(str(os.getpid()) + '\n')
fp.close()
if opts['daemon_pipefds']:
lockpath = opts['daemon_pipefds']
try:
os.setsid()
except AttributeError:
pass
os.unlink(lockpath)
util.hidewindow()
sys.stdout.flush()
sys.stderr.flush()
nullfd = os.open(os.devnull, os.O_RDWR)
logfilefd = nullfd
if logfile:
logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
os.dup2(nullfd, 0)
os.dup2(logfilefd, 1)
os.dup2(logfilefd, 2)
if nullfd not in (0, 1, 2):
os.close(nullfd)
if logfile and logfilefd not in (0, 1, 2):
os.close(logfilefd)
if runfn:
return runfn()
def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
opts=None):
'''export changesets as hg patches.'''
total = len(revs)
revwidth = max([len(str(rev)) for rev in revs])
filemode = {}
def single(rev, seqno, fp):
ctx = repo[rev]
node = ctx.node()
parents = [p.node() for p in ctx.parents() if p]
branch = ctx.branch()
if switch_parent:
parents.reverse()
prev = (parents and parents[0]) or nullid
shouldclose = False
if not fp and len(template) > 0:
desc_lines = ctx.description().rstrip().split('\n')
desc = desc_lines[0] #Commit always has a first line.
fp = makefileobj(repo, template, node, desc=desc, total=total,
seqno=seqno, revwidth=revwidth, mode='wb',
modemap=filemode)
if fp != template:
shouldclose = True
if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
repo.ui.note("%s\n" % fp.name)
if not fp:
write = repo.ui.write
else:
def write(s, **kw):
fp.write(s)
write("# HG changeset patch\n")
write("# User %s\n" % ctx.user())
write("# Date %d %d\n" % ctx.date())
write("# %s\n" % util.datestr(ctx.date()))
if branch and branch != 'default':
write("# Branch %s\n" % branch)
write("# Node ID %s\n" % hex(node))
write("# Parent %s\n" % hex(prev))
if len(parents) > 1:
write("# Parent %s\n" % hex(parents[1]))
write(ctx.description().rstrip())
write("\n\n")
for chunk, label in patch.diffui(repo, prev, node, opts=opts):
write(chunk, label=label)
if shouldclose:
fp.close()
for seqno, rev in enumerate(revs):
single(rev, seqno + 1, fp)
def diffordiffstat(ui, repo, diffopts, node1, node2, match,
changes=None, stat=False, fp=None, prefix='',
listsubrepos=False):
'''show diff or diffstat.'''
if fp is None:
write = ui.write
else:
def write(s, **kw):
fp.write(s)
if stat:
diffopts = diffopts.copy(context=0)
width = 80
if not ui.plain():
width = ui.termwidth()
chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
prefix=prefix)
for chunk, label in patch.diffstatui(util.iterlines(chunks),
width=width,
git=diffopts.git):
write(chunk, label=label)
else:
for chunk, label in patch.diffui(repo, node1, node2, match,
changes, diffopts, prefix=prefix):
write(chunk, label=label)
if listsubrepos:
ctx1 = repo[node1]
ctx2 = repo[node2]
for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
tempnode2 = node2
try:
if node2 is not None:
tempnode2 = ctx2.substate[subpath][1]
except KeyError:
# A subrepo that existed in node1 was deleted between node1 and
# node2 (inclusive). Thus, ctx2's substate won't contain that
# subpath. The best we can do is to ignore it.
tempnode2 = None
submatch = matchmod.narrowmatcher(subpath, match)
sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
stat=stat, fp=fp, prefix=prefix)
class changeset_printer(object):
'''show changeset information when templating not requested.'''
def __init__(self, ui, repo, patch, diffopts, buffered):
self.ui = ui
self.repo = repo
self.buffered = buffered
self.patch = patch
self.diffopts = diffopts
self.header = {}
self.hunk = {}
self.lastheader = None
self.footer = None
def flush(self, rev):
if rev in self.header:
h = self.header[rev]
if h != self.lastheader:
self.lastheader = h
self.ui.write(h)
del self.header[rev]
if rev in self.hunk:
self.ui.write(self.hunk[rev])
del self.hunk[rev]
return 1
return 0
def close(self):
if self.footer:
self.ui.write(self.footer)
def show(self, ctx, copies=None, matchfn=None, **props):
if self.buffered:
self.ui.pushbuffer()
self._show(ctx, copies, matchfn, props)
self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
else:
self._show(ctx, copies, matchfn, props)
def _show(self, ctx, copies, matchfn, props):
'''show a single changeset or file revision'''
changenode = ctx.node()
rev = ctx.rev()
if self.ui.quiet:
self.ui.write("%d:%s\n" % (rev, short(changenode)),
label='log.node')
return
log = self.repo.changelog
date = util.datestr(ctx.date())
hexfunc = self.ui.debugflag and hex or short
parents = [(p, hexfunc(log.node(p)))
for p in self._meaningful_parentrevs(log, rev)]
# i18n: column positioning for "hg log"
self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
label='log.changeset changeset.%s' % ctx.phasestr())
branch = ctx.branch()
# don't show the default branch name
if branch != 'default':
# i18n: column positioning for "hg log"
self.ui.write(_("branch: %s\n") % branch,
label='log.branch')
for bookmark in self.repo.nodebookmarks(changenode):
# i18n: column positioning for "hg log"
self.ui.write(_("bookmark: %s\n") % bookmark,
label='log.bookmark')
for tag in self.repo.nodetags(changenode):
# i18n: column positioning for "hg log"
self.ui.write(_("tag: %s\n") % tag,
label='log.tag')
if self.ui.debugflag and ctx.phase():
# i18n: column positioning for "hg log"
self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
label='log.phase')
for parent in parents:
# i18n: column positioning for "hg log"
self.ui.write(_("parent: %d:%s\n") % parent,
label='log.parent changeset.%s' % ctx.phasestr())
if self.ui.debugflag:
mnode = ctx.manifestnode()
# i18n: column positioning for "hg log"
self.ui.write(_("manifest: %d:%s\n") %
(self.repo.manifest.rev(mnode), hex(mnode)),
label='ui.debug log.manifest')
# i18n: column positioning for "hg log"
self.ui.write(_("user: %s\n") % ctx.user(),
label='log.user')
# i18n: column positioning for "hg log"
self.ui.write(_("date: %s\n") % date,
label='log.date')
if self.ui.debugflag:
files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
for key, value in zip([# i18n: column positioning for "hg log"
_("files:"),
# i18n: column positioning for "hg log"
_("files+:"),
# i18n: column positioning for "hg log"
_("files-:")], files):
if value:
self.ui.write("%-12s %s\n" % (key, " ".join(value)),
label='ui.debug log.files')
elif ctx.files() and self.ui.verbose:
# i18n: column positioning for "hg log"
self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
label='ui.note log.files')
if copies and self.ui.verbose:
copies = ['%s (%s)' % c for c in copies]
# i18n: column positioning for "hg log"
self.ui.write(_("copies: %s\n") % ' '.join(copies),
label='ui.note log.copies')
extra = ctx.extra()
if extra and self.ui.debugflag:
for key, value in sorted(extra.items()):
# i18n: column positioning for "hg log"
self.ui.write(_("extra: %s=%s\n")
% (key, value.encode('string_escape')),
label='ui.debug log.extra')
description = ctx.description().strip()
if description:
if self.ui.verbose:
self.ui.write(_("description:\n"),
label='ui.note log.description')
self.ui.write(description,
label='ui.note log.description')
self.ui.write("\n\n")
else:
# i18n: column positioning for "hg log"
self.ui.write(_("summary: %s\n") %
description.splitlines()[0],
label='log.summary')
self.ui.write("\n")
self.showpatch(changenode, matchfn)
def showpatch(self, node, matchfn):
if not matchfn:
matchfn = self.patch
if matchfn:
stat = self.diffopts.get('stat')
diff = self.diffopts.get('patch')
diffopts = patch.diffopts(self.ui, self.diffopts)
prev = self.repo.changelog.parents(node)[0]
if stat:
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
match=matchfn, stat=True)
if diff:
if stat:
self.ui.write("\n")
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
match=matchfn, stat=False)
self.ui.write("\n")
def _meaningful_parentrevs(self, log, rev):
"""Return list of meaningful (or all if debug) parentrevs for rev.
For merges (two non-nullrev revisions) both parents are meaningful.
Otherwise the first parent revision is considered meaningful if it
is not the preceding revision.
"""
parents = log.parentrevs(rev)
if not self.ui.debugflag and parents[1] == nullrev:
if parents[0] >= rev - 1:
parents = []
else:
parents = [parents[0]]
return parents
class changeset_templater(changeset_printer):
'''format changeset information.'''
def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
defaulttempl = {
'parent': '{rev}:{node|formatnode} ',
'manifest': '{rev}:{node|formatnode}',
'file_copy': '{name} ({source})',
'extra': '{key}={value|stringescape}'
}
# filecopy is preserved for compatibility reasons
defaulttempl['filecopy'] = defaulttempl['file_copy']
self.t = templater.templater(mapfile, {'formatnode': formatnode},
cache=defaulttempl)
self.cache = {}
def use_template(self, t):
'''set template string to use'''
self.t.cache['changeset'] = t
def _meaningful_parentrevs(self, ctx):
"""Return list of meaningful (or all if debug) parentrevs for rev.
"""
parents = ctx.parents()
if len(parents) > 1:
return parents
if self.ui.debugflag:
return [parents[0], self.repo['null']]
if parents[0].rev() >= ctx.rev() - 1:
return []
return parents
def _show(self, ctx, copies, matchfn, props):
'''show a single changeset or file revision'''
showlist = templatekw.showlist
# showparents() behaviour depends on ui trace level which
# causes unexpected behaviours at templating level and makes
# it harder to extract it in a standalone function. Its
# behaviour cannot be changed so leave it here for now.
def showparents(**args):
ctx = args['ctx']
parents = [[('rev', p.rev()), ('node', p.hex())]
for p in self._meaningful_parentrevs(ctx)]
return showlist('parent', parents, **args)
props = props.copy()
props.update(templatekw.keywords)
props['parents'] = showparents
props['templ'] = self.t
props['ctx'] = ctx
props['repo'] = self.repo
props['revcache'] = {'copies': copies}
props['cache'] = self.cache
# find correct templates for current mode
tmplmodes = [
(True, None),
(self.ui.verbose, 'verbose'),
(self.ui.quiet, 'quiet'),
(self.ui.debugflag, 'debug'),
]
types = {'header': '', 'footer':'', 'changeset': 'changeset'}
for mode, postfix in tmplmodes:
for type in types:
cur = postfix and ('%s_%s' % (type, postfix)) or type
if mode and cur in self.t:
types[type] = cur
try:
# write header
if types['header']:
h = templater.stringify(self.t(types['header'], **props))
if self.buffered:
self.header[ctx.rev()] = h
else:
if self.lastheader != h:
self.lastheader = h
self.ui.write(h)
# write changeset metadata, then patch if requested
key = types['changeset']
self.ui.write(templater.stringify(self.t(key, **props)))
self.showpatch(ctx.node(), matchfn)
if types['footer']:
if not self.footer:
self.footer = templater.stringify(self.t(types['footer'],
**props))
except KeyError, inst:
msg = _("%s: no key named '%s'")
raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
except SyntaxError, inst:
raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
def show_changeset(ui, repo, opts, buffered=False):
"""show one changeset using template or regular display.
Display format will be the first non-empty hit of:
1. option 'template'
2. option 'style'
3. [ui] setting 'logtemplate'
4. [ui] setting 'style'
If all of these values are either the unset or the empty string,
regular display via changeset_printer() is done.
"""
# options
patch = False
if opts.get('patch') or opts.get('stat'):
patch = scmutil.matchall(repo)
tmpl = opts.get('template')
style = None
if tmpl:
tmpl = templater.parsestring(tmpl, quoted=False)
else:
style = opts.get('style')
# ui settings
if not (tmpl or style):
tmpl = ui.config('ui', 'logtemplate')
if tmpl:
try:
tmpl = templater.parsestring(tmpl)
except SyntaxError:
tmpl = templater.parsestring(tmpl, quoted=False)
else:
style = util.expandpath(ui.config('ui', 'style', ''))
if not (tmpl or style):
return changeset_printer(ui, repo, patch, opts, buffered)
mapfile = None
if style and not tmpl:
mapfile = style
if not os.path.split(mapfile)[0]:
mapname = (templater.templatepath('map-cmdline.' + mapfile)
or templater.templatepath(mapfile))
if mapname:
mapfile = mapname
try:
t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
except SyntaxError, inst:
raise util.Abort(inst.args[0])
if tmpl:
t.use_template(tmpl)
return t
def finddate(ui, repo, date):
"""Find the tipmost changeset that matches the given date spec"""
df = util.matchdate(date)
m = scmutil.matchall(repo)
results = {}
def prep(ctx, fns):
d = ctx.date()
if df(d[0]):
results[ctx.rev()] = d
for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
rev = ctx.rev()
if rev in results:
ui.status(_("found revision %s from %s\n") %
(rev, util.datestr(results[rev])))
return str(rev)
raise util.Abort(_("revision matching date not found"))
def increasingwindows(start, end, windowsize=8, sizelimit=512):
if start < end:
while start < end:
yield start, min(windowsize, end - start)
start += windowsize
if windowsize < sizelimit:
windowsize *= 2
else:
while start > end:
yield start, min(windowsize, start - end - 1)
start -= windowsize
if windowsize < sizelimit:
windowsize *= 2
def walkchangerevs(repo, match, opts, prepare):
'''Iterate over files and the revs in which they changed.
Callers most commonly need to iterate backwards over the history
in which they are interested. Doing so has awful (quadratic-looking)
performance, so we use iterators in a "windowed" way.
We walk a window of revisions in the desired order. Within the
window, we first walk forwards to gather data, then in the desired
order (usually backwards) to display it.
This function returns an iterator yielding contexts. Before
yielding each context, the iterator will first call the prepare
function on each context in the window in forward order.'''
follow = opts.get('follow') or opts.get('follow_first')
if opts.get('rev'):
revs = scmutil.revrange(repo, opts.get('rev'))
elif follow:
revs = repo.revs('reverse(:.)')
else:
revs = list(repo)
revs.reverse()
if not revs:
return []
wanted = set()
slowpath = match.anypats() or (match.files() and opts.get('removed'))
fncache = {}
change = repo.changectx
# First step is to fill wanted, the set of revisions that we want to yield.
# When it does not induce extra cost, we also fill fncache for revisions in
# wanted: a cache of filenames that were changed (ctx.files()) and that
# match the file filtering conditions.
if not slowpath and not match.files():
# No files, no patterns. Display all revs.
wanted = set(revs)
copies = []
if not slowpath and match.files():
# We only have to read through the filelog to find wanted revisions
minrev, maxrev = min(revs), max(revs)
def filerevgen(filelog, last):
"""
Only files, no patterns. Check the history of each file.
Examines filelog entries within minrev, maxrev linkrev range
Returns an iterator yielding (linkrev, parentlinkrevs, copied)
tuples in backwards order
"""
cl_count = len(repo)
revs = []
for j in xrange(0, last + 1):
linkrev = filelog.linkrev(j)
if linkrev < minrev:
continue
# only yield rev for which we have the changelog, it can
# happen while doing "hg log" during a pull or commit
if linkrev >= cl_count:
break
parentlinkrevs = []
for p in filelog.parentrevs(j):
if p != nullrev:
parentlinkrevs.append(filelog.linkrev(p))
n = filelog.node(j)
revs.append((linkrev, parentlinkrevs,
follow and filelog.renamed(n)))
return reversed(revs)
def iterfiles():
pctx = repo['.']
for filename in match.files():
if follow:
if filename not in pctx:
raise util.Abort(_('cannot follow file not in parent '
'revision: "%s"') % filename)
yield filename, pctx[filename].filenode()
else:
yield filename, None
for filename_node in copies:
yield filename_node
for file_, node in iterfiles():
filelog = repo.file(file_)
if not len(filelog):
if node is None:
# A zero count may be a directory or deleted file, so
# try to find matching entries on the slow path.
if follow:
raise util.Abort(
_('cannot follow nonexistent file: "%s"') % file_)
slowpath = True
break
else:
continue
if node is None:
last = len(filelog) - 1
else:
last = filelog.rev(node)
# keep track of all ancestors of the file
ancestors = set([filelog.linkrev(last)])
# iterate from latest to oldest revision
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
if not follow:
if rev > maxrev:
continue
else:
# Note that last might not be the first interesting
# rev to us:
# if the file has been changed after maxrev, we'll
# have linkrev(last) > maxrev, and we still need
# to explore the file graph
if rev not in ancestors:
continue
# XXX insert 1327 fix here
if flparentlinkrevs:
ancestors.update(flparentlinkrevs)
fncache.setdefault(rev, []).append(file_)
wanted.add(rev)
if copied:
copies.append(copied)
# We decided to fall back to the slowpath because at least one
# of the paths was not a file. Check to see if at least one of them
# existed in history, otherwise simply return
if slowpath:
for path in match.files():
if path == '.' or path in repo.store:
break
else:
return []
if slowpath:
# We have to read the changelog to match filenames against
# changed files
if follow:
raise util.Abort(_('can only follow copies/renames for explicit '
'filenames'))
# The slow path checks files modified in every changeset.
for i in sorted(revs):
ctx = change(i)
matches = filter(match, ctx.files())
if matches:
fncache[i] = matches
wanted.add(i)
class followfilter(object):
def __init__(self, onlyfirst=False):
self.startrev = nullrev
self.roots = set()
self.onlyfirst = onlyfirst
def match(self, rev):
def realparents(rev):
if self.onlyfirst:
return repo.changelog.parentrevs(rev)[0:1]
else:
return filter(lambda x: x != nullrev,
repo.changelog.parentrevs(rev))
if self.startrev == nullrev:
self.startrev = rev
return True
if rev > self.startrev:
# forward: all descendants
if not self.roots:
self.roots.add(self.startrev)
for parent in realparents(rev):
if parent in self.roots:
self.roots.add(rev)
return True
else:
# backwards: all parents
if not self.roots:
self.roots.update(realparents(self.startrev))
if rev in self.roots:
self.roots.remove(rev)
self.roots.update(realparents(rev))
return True
return False
# it might be worthwhile to do this in the iterator if the rev range
# is descending and the prune args are all within that range
for rev in opts.get('prune', ()):
rev = repo[rev].rev()
ff = followfilter()
stop = min(revs[0], revs[-1])
for x in xrange(rev, stop - 1, -1):
if ff.match(x):
wanted.discard(x)
# Choose a small initial window if we will probably only visit a
# few commits.
limit = loglimit(opts)
windowsize = 8
if limit:
windowsize = min(limit, windowsize)
# Now that wanted is correctly initialized, we can iterate over the
# revision range, yielding only revisions in wanted.
def iterate():
if follow and not match.files():
ff = followfilter(onlyfirst=opts.get('follow_first'))
def want(rev):
return ff.match(rev) and rev in wanted
else:
def want(rev):
return rev in wanted
for i, window in increasingwindows(0, len(revs), windowsize):
nrevs = [rev for rev in revs[i:i + window] if want(rev)]
for rev in sorted(nrevs):
fns = fncache.get(rev)
ctx = change(rev)
if not fns:
def fns_generator():
for f in ctx.files():
if match(f):
yield f
fns = fns_generator()
prepare(ctx, fns)
for rev in nrevs:
yield change(rev)
return iterate()
def _makegraphfilematcher(repo, pats, followfirst):
# When displaying a revision with --patch --follow FILE, we have
# to know which file of the revision must be diffed. With
# --follow, we want the names of the ancestors of FILE in the
# revision, stored in "fcache". "fcache" is populated by
# reproducing the graph traversal already done by --follow revset
# and relating linkrevs to file names (which is not "correct" but
# good enough).
fcache = {}
fcacheready = [False]
pctx = repo['.']
wctx = repo[None]
def populate():
for fn in pats:
for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
for c in i:
fcache.setdefault(c.linkrev(), set()).add(c.path())
def filematcher(rev):
if not fcacheready[0]:
# Lazy initialization
fcacheready[0] = True
populate()
return scmutil.match(wctx, fcache.get(rev, []), default='path')
return filematcher
def _makegraphlogrevset(repo, pats, opts, revs):
"""Return (expr, filematcher) where expr is a revset string built
from log options and file patterns or None. If --stat or --patch
are not passed filematcher is None. Otherwise it is a callable
taking a revision number and returning a match objects filtering
the files to be detailed when displaying the revision.
"""
opt2revset = {
'no_merges': ('not merge()', None),
'only_merges': ('merge()', None),
'_ancestors': ('ancestors(%(val)s)', None),
'_fancestors': ('_firstancestors(%(val)s)', None),
'_descendants': ('descendants(%(val)s)', None),
'_fdescendants': ('_firstdescendants(%(val)s)', None),
'_matchfiles': ('_matchfiles(%(val)s)', None),
'date': ('date(%(val)r)', None),
'branch': ('branch(%(val)r)', ' or '),
'_patslog': ('filelog(%(val)r)', ' or '),
'_patsfollow': ('follow(%(val)r)', ' or '),
'_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
'keyword': ('keyword(%(val)r)', ' or '),
'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
'user': ('user(%(val)r)', ' or '),
}
opts = dict(opts)
# follow or not follow?
follow = opts.get('follow') or opts.get('follow_first')
followfirst = opts.get('follow_first') and 1 or 0
# --follow with FILE behaviour depends on revs...
startrev = revs[0]
followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
# branch and only_branch are really aliases and must be handled at
# the same time
opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
# pats/include/exclude are passed to match.match() directly in
# _matchfiles() revset but walkchangerevs() builds its matcher with
# scmutil.match(). The difference is input pats are globbed on
# platforms without shell expansion (windows).
pctx = repo[None]
match, pats = scmutil.matchandpats(pctx, pats, opts)
slowpath = match.anypats() or (match.files() and opts.get('removed'))
if not slowpath:
for f in match.files():
if follow and f not in pctx:
raise util.Abort(_('cannot follow file not in parent '
'revision: "%s"') % f)
filelog = repo.file(f)
if not len(filelog):
# A zero count may be a directory or deleted file, so
# try to find matching entries on the slow path.
if follow:
raise util.Abort(
_('cannot follow nonexistent file: "%s"') % f)
slowpath = True
# We decided to fall back to the slowpath because at least one
# of the paths was not a file. Check to see if at least one of them
# existed in history - in that case, we'll continue down the
# slowpath; otherwise, we can turn off the slowpath
if slowpath:
for path in match.files():
if path == '.' or path in repo.store:
break
else:
slowpath = False
if slowpath:
# See walkchangerevs() slow path.
#
if follow:
raise util.Abort(_('can only follow copies/renames for explicit '
'filenames'))
# pats/include/exclude cannot be represented as separate
# revset expressions as their filtering logic applies at file
# level. For instance "-I a -X a" matches a revision touching
# "a" and "b" while "file(a) and not file(b)" does
# not. Besides, filesets are evaluated against the working
# directory.
matchargs = ['r:', 'd:relpath']
for p in pats:
matchargs.append('p:' + p)
for p in opts.get('include', []):
matchargs.append('i:' + p)
for p in opts.get('exclude', []):
matchargs.append('x:' + p)
matchargs = ','.join(('%r' % p) for p in matchargs)
opts['_matchfiles'] = matchargs
else:
if follow:
fpats = ('_patsfollow', '_patsfollowfirst')
fnopats = (('_ancestors', '_fancestors'),
('_descendants', '_fdescendants'))
if pats:
# follow() revset interprets its file argument as a
# manifest entry, so use match.files(), not pats.
opts[fpats[followfirst]] = list(match.files())
else:
opts[fnopats[followdescendants][followfirst]] = str(startrev)
else:
opts['_patslog'] = list(pats)
filematcher = None
if opts.get('patch') or opts.get('stat'):
if follow:
filematcher = _makegraphfilematcher(repo, pats, followfirst)
else:
filematcher = lambda rev: match
expr = []
for op, val in opts.iteritems():
if not val:
continue
if op not in opt2revset:
continue
revop, andor = opt2revset[op]
if '%(val)' not in revop:
expr.append(revop)
else:
if not isinstance(val, list):
e = revop % {'val': val}
else:
e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
expr.append(e)
if expr:
expr = '(' + ' and '.join(expr) + ')'
else:
expr = None
return expr, filematcher
def getgraphlogrevs(repo, pats, opts):
"""Return (revs, expr, filematcher) where revs is an iterable of
revision numbers, expr is a revset string built from log options
and file patterns or None, and used to filter 'revs'. If --stat or
--patch are not passed filematcher is None. Otherwise it is a
callable taking a revision number and returning a match objects
filtering the files to be detailed when displaying the revision.
"""
if not len(repo):
return [], None, None
limit = loglimit(opts)
# Default --rev value depends on --follow but --follow behaviour
# depends on revisions resolved from --rev...
follow = opts.get('follow') or opts.get('follow_first')
possiblyunsorted = False # whether revs might need sorting
if opts.get('rev'):
revs = scmutil.revrange(repo, opts['rev'])
# Don't sort here because _makegraphlogrevset might depend on the
# order of revs
possiblyunsorted = True
else:
if follow and len(repo) > 0:
revs = repo.revs('reverse(:.)')
else:
revs = list(repo.changelog)
revs.reverse()
if not revs:
return [], None, None
expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
if possiblyunsorted:
revs.sort(reverse=True)
if expr:
# Revset matchers often operate faster on revisions in changelog
# order, because most filters deal with the changelog.
revs.reverse()
matcher = revset.match(repo.ui, expr)
# Revset matches can reorder revisions. "A or B" typically returns
# returns the revision matching A then the revision matching B. Sort
# again to fix that.
revs = matcher(repo, revs)
revs.sort(reverse=True)
if limit is not None:
revs = revs[:limit]
return revs, expr, filematcher
def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
filematcher=None):
seen, state = [], graphmod.asciistate()
for rev, type, ctx, parents in dag:
char = 'o'
if ctx.node() in showparents:
char = '@'
elif ctx.obsolete():
char = 'x'
copies = None
if getrenamed and ctx.rev():
copies = []
for fn in ctx.files():
rename = getrenamed(fn, ctx.rev())
if rename:
copies.append((fn, rename[0]))
revmatchfn = None
if filematcher is not None:
revmatchfn = filematcher(ctx.rev())
displayer.show(ctx, copies=copies, matchfn=revmatchfn)
lines = displayer.hunk.pop(rev).split('\n')
if not lines[-1]:
del lines[-1]
displayer.flush(rev)
edges = edgefn(type, char, lines, seen, rev, parents)
for type, char, lines, coldata in edges:
graphmod.ascii(ui, state, type, char, lines, coldata)
displayer.close()
def graphlog(ui, repo, *pats, **opts):
# Parameters are identical to log command ones
revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
revdag = graphmod.dagwalker(repo, revs)
getrenamed = None
if opts.get('copies'):
endrev = None
if opts.get('rev'):
endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
displayer = show_changeset(ui, repo, opts, buffered=True)
showparents = [ctx.node() for ctx in repo[None].parents()]
displaygraph(ui, revdag, displayer, showparents,
graphmod.asciiedges, getrenamed, filematcher)
def checkunsupportedgraphflags(pats, opts):
for op in ["newest_first"]:
if op in opts and opts[op]:
raise util.Abort(_("-G/--graph option is incompatible with --%s")
% op.replace("_", "-"))
def graphrevs(repo, nodes, opts):
limit = loglimit(opts)
nodes.reverse()
if limit is not None:
nodes = nodes[:limit]
return graphmod.nodes(repo, nodes)
def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
join = lambda f: os.path.join(prefix, f)
bad = []
oldbad = match.bad
match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
names = []
wctx = repo[None]
cca = None
abort, warn = scmutil.checkportabilityalert(ui)
if abort or warn:
cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
for f in repo.walk(match):
exact = match.exact(f)
if exact or not explicitonly and f not in repo.dirstate:
if cca:
cca(f)
names.append(f)
if ui.verbose or not exact:
ui.status(_('adding %s\n') % match.rel(join(f)))
for subpath in sorted(wctx.substate):
sub = wctx.sub(subpath)
try:
submatch = matchmod.narrowmatcher(subpath, match)
if listsubrepos:
bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
False))
else:
bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
True))
except error.LookupError:
ui.status(_("skipping missing subrepository: %s\n")
% join(subpath))
if not dryrun:
rejected = wctx.add(names, prefix)
bad.extend(f for f in rejected if f in match.files())
return bad
def forget(ui, repo, match, prefix, explicitonly):
join = lambda f: os.path.join(prefix, f)
bad = []
oldbad = match.bad
match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
wctx = repo[None]
forgot = []
s = repo.status(match=match, clean=True)
forget = sorted(s[0] + s[1] + s[3] + s[6])
if explicitonly:
forget = [f for f in forget if match.exact(f)]
for subpath in sorted(wctx.substate):
sub = wctx.sub(subpath)
try:
submatch = matchmod.narrowmatcher(subpath, match)
subbad, subforgot = sub.forget(ui, submatch, prefix)
bad.extend([subpath + '/' + f for f in subbad])
forgot.extend([subpath + '/' + f for f in subforgot])
except error.LookupError:
ui.status(_("skipping missing subrepository: %s\n")
% join(subpath))
if not explicitonly:
for f in match.files():
if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
if f not in forgot:
if os.path.exists(match.rel(join(f))):
ui.warn(_('not removing %s: '
'file is already untracked\n')
% match.rel(join(f)))
bad.append(f)
for f in forget:
if ui.verbose or not match.exact(f):
ui.status(_('removing %s\n') % match.rel(join(f)))
rejected = wctx.forget(forget, prefix)
bad.extend(f for f in rejected if f in match.files())
forgot.extend(forget)
return bad, forgot
def duplicatecopies(repo, rev, fromrev):
'''reproduce copies from fromrev to rev in the dirstate'''
for dst, src in copies.pathcopies(repo[fromrev], repo[rev]).iteritems():
# copies.pathcopies returns backward renames, so dst might not
# actually be in the dirstate
if repo.dirstate[dst] in "nma":
repo.dirstate.copy(src, dst)
def commit(ui, repo, commitfunc, pats, opts):
'''commit the specified files or all outstanding changes'''
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
message = logmessage(ui, opts)
# extract addremove carefully -- this function can be called from a command
# that doesn't support addremove
if opts.get('addremove'):
scmutil.addremove(repo, pats, opts)
return commitfunc(ui, repo, message,
scmutil.match(repo[None], pats, opts), opts)
def amend(ui, repo, commitfunc, old, extra, pats, opts):
ui.note(_('amending changeset %s\n') % old)
base = old.p1()
wlock = lock = newid = None
try:
wlock = repo.wlock()
lock = repo.lock()
tr = repo.transaction('amend')
try:
# See if we got a message from -m or -l, if not, open the editor
# with the message of the changeset to amend
message = logmessage(ui, opts)
# ensure logfile does not conflict with later enforcement of the
# message. potential logfile content has been processed by
# `logmessage` anyway.
opts.pop('logfile')
# First, do a regular commit to record all changes in the working
# directory (if there are any)
ui.callhooks = False
currentbookmark = repo._bookmarkcurrent
try:
repo._bookmarkcurrent = None
opts['message'] = 'temporary amend commit for %s' % old
node = commit(ui, repo, commitfunc, pats, opts)
finally:
repo._bookmarkcurrent = currentbookmark
ui.callhooks = True
ctx = repo[node]
# Participating changesets:
#
# node/ctx o - new (intermediate) commit that contains changes
# | from working dir to go into amending commit
# | (or a workingctx if there were no changes)
# |
# old o - changeset to amend
# |
# base o - parent of amending changeset
# Update extra dict from amended commit (e.g. to preserve graft
# source)
extra.update(old.extra())
# Also update it from the intermediate commit or from the wctx
extra.update(ctx.extra())
if len(old.parents()) > 1:
# ctx.files() isn't reliable for merges, so fall back to the
# slower repo.status() method
files = set([fn for st in repo.status(base, old)[:3]
for fn in st])
else:
files = set(old.files())
# Second, we use either the commit we just did, or if there were no
# changes the parent of the working directory as the version of the
# files in the final amend commit
if node:
ui.note(_('copying changeset %s to %s\n') % (ctx, base))
user = ctx.user()
date = ctx.date()
# Recompute copies (avoid recording a -> b -> a)
copied = copies.pathcopies(base, ctx)
# Prune files which were reverted by the updates: if old
# introduced file X and our intermediate commit, node,
# renamed that file, then those two files are the same and
# we can discard X from our list of files. Likewise if X
# was deleted, it's no longer relevant
files.update(ctx.files())
def samefile(f):
if f in ctx.manifest():
a = ctx.filectx(f)
if f in base.manifest():
b = base.filectx(f)
return (not a.cmp(b)
and a.flags() == b.flags())
else:
return False
else:
return f not in base.manifest()
files = [f for f in files if not samefile(f)]
def filectxfn(repo, ctx_, path):
try:
fctx = ctx[path]
flags = fctx.flags()
mctx = context.memfilectx(fctx.path(), fctx.data(),
islink='l' in flags,
isexec='x' in flags,
copied=copied.get(path))
return mctx
except KeyError:
raise IOError
else:
ui.note(_('copying changeset %s to %s\n') % (old, base))
# Use version of files as in the old cset
def filectxfn(repo, ctx_, path):
try:
return old.filectx(path)
except KeyError:
raise IOError
user = opts.get('user') or old.user()
date = opts.get('date') or old.date()
editmsg = False
if not message:
editmsg = True
message = old.description()
pureextra = extra.copy()
extra['amend_source'] = old.hex()
new = context.memctx(repo,
parents=[base.node(), old.p2().node()],
text=message,
files=files,
filectxfn=filectxfn,
user=user,
date=date,
extra=extra)
if editmsg:
new._text = commitforceeditor(repo, new, [])
newdesc = changelog.stripdesc(new.description())
if ((not node)
and newdesc == old.description()
and user == old.user()
and date == old.date()
and pureextra == old.extra()):
# nothing changed. continuing here would create a new node
# anyway because of the amend_source noise.
#
# This not what we expect from amend.
return old.node()
ph = repo.ui.config('phases', 'new-commit', phases.draft)
try:
repo.ui.setconfig('phases', 'new-commit', old.phase())
newid = repo.commitctx(new)
finally:
repo.ui.setconfig('phases', 'new-commit', ph)
if newid != old.node():
# Reroute the working copy parent to the new changeset
repo.setparents(newid, nullid)
# Move bookmarks from old parent to amend commit
bms = repo.nodebookmarks(old.node())
if bms:
marks = repo._bookmarks
for bm in bms:
marks[bm] = newid
marks.write()
#commit the whole amend process
if obsolete._enabled and newid != old.node():
# mark the new changeset as successor of the rewritten one
new = repo[newid]
obs = [(old, (new,))]
if node:
obs.append((ctx, ()))
obsolete.createmarkers(repo, obs)
tr.close()
finally:
tr.release()
if (not obsolete._enabled) and newid != old.node():
# Strip the intermediate commit (if there was one) and the amended
# commit
if node:
ui.note(_('stripping intermediate changeset %s\n') % ctx)
ui.note(_('stripping amended changeset %s\n') % old)
repair.strip(ui, repo, old.node(), topic='amend-backup')
finally:
if newid is None:
repo.dirstate.invalidate()
lockmod.release(lock, wlock)
return newid
def commiteditor(repo, ctx, subs):
if ctx.description():
return ctx.description()
return commitforceeditor(repo, ctx, subs)
def commitforceeditor(repo, ctx, subs):
edittext = []
modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
if ctx.description():
edittext.append(ctx.description())
edittext.append("")
edittext.append("") # Empty line between message and comments.
edittext.append(_("HG: Enter commit message."
" Lines beginning with 'HG:' are removed."))
edittext.append(_("HG: Leave message empty to abort commit."))
edittext.append("HG: --")
edittext.append(_("HG: user: %s") % ctx.user())
if ctx.p2():
edittext.append(_("HG: branch merge"))
if ctx.branch():
edittext.append(_("HG: branch '%s'") % ctx.branch())
if bookmarks.iscurrent(repo):
edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
edittext.extend([_("HG: subrepo %s") % s for s in subs])
edittext.extend([_("HG: added %s") % f for f in added])
edittext.extend([_("HG: changed %s") % f for f in modified])
edittext.extend([_("HG: removed %s") % f for f in removed])
if not added and not modified and not removed:
edittext.append(_("HG: no files changed"))
edittext.append("")
# run editor in the repository root
olddir = os.getcwd()
os.chdir(repo.root)
text = repo.ui.edit("\n".join(edittext), ctx.user())
text = re.sub("(?m)^HG:.*(\n|$)", "", text)
os.chdir(olddir)
if not text.strip():
raise util.Abort(_("empty commit message"))
return text
def commitstatus(repo, node, branch, bheads=None, opts={}):
ctx = repo[node]
parents = ctx.parents()
if (not opts.get('amend') and bheads and node not in bheads and not
[x for x in parents if x.node() in bheads and x.branch() == branch]):
repo.ui.status(_('created new head\n'))
# The message is not printed for initial roots. For the other
# changesets, it is printed in the following situations:
#
# Par column: for the 2 parents with ...
# N: null or no parent
# B: parent is on another named branch
# C: parent is a regular non head changeset
# H: parent was a branch head of the current branch
# Msg column: whether we print "created new head" message
# In the following, it is assumed that there already exists some
# initial branch heads of the current branch, otherwise nothing is
# printed anyway.
#
# Par Msg Comment
# N N y additional topo root
#
# B N y additional branch root
# C N y additional topo head
# H N n usual case
#
# B B y weird additional branch root
# C B y branch merge
# H B n merge with named branch
#
# C C y additional head from merge
# C H n merge with a head
#
# H H n head merge: head count decreases
if not opts.get('close_branch'):
for r in parents:
if r.closesbranch() and r.branch() == branch:
repo.ui.status(_('reopening closed branch head %d\n') % r)
if repo.ui.debugflag:
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
elif repo.ui.verbose:
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
def revert(ui, repo, ctx, parents, *pats, **opts):
parent, p2 = parents
node = ctx.node()
mf = ctx.manifest()
if node == parent:
pmf = mf
else:
pmf = None
# need all matching names in dirstate and manifest of target rev,
# so have to walk both. do not print errors if files exist in one
# but not other.
names = {}
wlock = repo.wlock()
try:
# walk dirstate.
m = scmutil.match(repo[None], pats, opts)
m.bad = lambda x, y: False
for abs in repo.walk(m):
names[abs] = m.rel(abs), m.exact(abs)
# walk target manifest.
def badfn(path, msg):
if path in names:
return
if path in ctx.substate:
return
path_ = path + '/'
for f in names:
if f.startswith(path_):
return
ui.warn("%s: %s\n" % (m.rel(path), msg))
m = scmutil.match(ctx, pats, opts)
m.bad = badfn
for abs in ctx.walk(m):
if abs not in names:
names[abs] = m.rel(abs), m.exact(abs)
# get the list of subrepos that must be reverted
targetsubs = sorted(s for s in ctx.substate if m(s))
m = scmutil.matchfiles(repo, names)
changes = repo.status(match=m)[:4]
modified, added, removed, deleted = map(set, changes)
# if f is a rename, also revert the source
cwd = repo.getcwd()
for f in added:
src = repo.dirstate.copied(f)
if src and src not in names and repo.dirstate[src] == 'r':
removed.add(src)
names[src] = (repo.pathto(src, cwd), True)
def removeforget(abs):
if repo.dirstate[abs] == 'a':
return _('forgetting %s\n')
return _('removing %s\n')
revert = ([], _('reverting %s\n'))
add = ([], _('adding %s\n'))
remove = ([], removeforget)
undelete = ([], _('undeleting %s\n'))
disptable = (
# dispatch table:
# file state
# action if in target manifest
# action if not in target manifest
# make backup if in target manifest
# make backup if not in target manifest
(modified, revert, remove, True, True),
(added, revert, remove, True, False),
(removed, undelete, None, False, False),
(deleted, revert, remove, False, False),
)
for abs, (rel, exact) in sorted(names.items()):
mfentry = mf.get(abs)
target = repo.wjoin(abs)
def handle(xlist, dobackup):
xlist[0].append(abs)
if (dobackup and not opts.get('no_backup') and
os.path.lexists(target)):
bakname = "%s.orig" % rel
ui.note(_('saving current version of %s as %s\n') %
(rel, bakname))
if not opts.get('dry_run'):
util.rename(target, bakname)
if ui.verbose or not exact:
msg = xlist[1]
if not isinstance(msg, basestring):
msg = msg(abs)
ui.status(msg % rel)
for table, hitlist, misslist, backuphit, backupmiss in disptable:
if abs not in table:
continue
# file has changed in dirstate
if mfentry:
handle(hitlist, backuphit)
elif misslist is not None:
handle(misslist, backupmiss)
break
else:
if abs not in repo.dirstate:
if mfentry:
handle(add, True)
elif exact:
ui.warn(_('file not managed: %s\n') % rel)
continue
# file has not changed in dirstate
if node == parent:
if exact:
ui.warn(_('no changes needed to %s\n') % rel)
continue
if pmf is None:
# only need parent manifest in this unlikely case,
# so do not read by default
pmf = repo[parent].manifest()
if abs in pmf and mfentry:
# if version of file is same in parent and target
# manifests, do nothing
if (pmf[abs] != mfentry or
pmf.flags(abs) != mf.flags(abs)):
handle(revert, False)
else:
handle(remove, False)
if not opts.get('dry_run'):
def checkout(f):
fc = ctx[f]
repo.wwrite(f, fc.data(), fc.flags())
audit_path = scmutil.pathauditor(repo.root)
for f in remove[0]:
if repo.dirstate[f] == 'a':
repo.dirstate.drop(f)
continue
audit_path(f)
try:
util.unlinkpath(repo.wjoin(f))
except OSError:
pass
repo.dirstate.remove(f)
normal = None
if node == parent:
# We're reverting to our parent. If possible, we'd like status
# to report the file as clean. We have to use normallookup for
# merges to avoid losing information about merged/dirty files.
if p2 != nullid:
normal = repo.dirstate.normallookup
else:
normal = repo.dirstate.normal
for f in revert[0]:
checkout(f)
if normal:
normal(f)
for f in add[0]:
checkout(f)
repo.dirstate.add(f)
normal = repo.dirstate.normallookup
if node == parent and p2 == nullid:
normal = repo.dirstate.normal
for f in undelete[0]:
checkout(f)
normal(f)
copied = copies.pathcopies(repo[parent], ctx)
for f in add[0] + undelete[0] + revert[0]:
if f in copied:
repo.dirstate.copy(copied[f], f)
if targetsubs:
# Revert the subrepos on the revert list
for sub in targetsubs:
ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
finally:
wlock.release()
def command(table):
'''returns a function object bound to table which can be used as
a decorator for populating table as a command table'''
def cmd(name, options=(), synopsis=None):
def decorator(func):
if synopsis:
table[name] = func, list(options), synopsis
else:
table[name] = func, list(options)
return func
return decorator
return cmd
| apache-2.0 |
matthiasdiener/spack | var/spack/repos/builtin/packages/r-xlsx/package.py | 5 | 1669 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RXlsx(RPackage):
"""Provide R functions to read/write/format Excel 2007 and Excel
97/2000/XP/2003 file formats."""
homepage = "http://code.google.com/p/rexcel/"
url = "https://cran.rstudio.com/src/contrib/xlsx_0.5.7.tar.gz"
version('0.5.7', '36b1b16f29c54b6089b1dae923180dd5')
depends_on('r-rjava', type=('build', 'run'))
depends_on('r-xlsxjars', type=('build', 'run'))
| lgpl-2.1 |
neurodata/ndstore | test/params.py | 2 | 1086 | # Copyright 2014 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ndlib.ndtype import *
class Params:
"""Arguments Class"""
def __init__ (self):
self.token = 'unittest'
self.project = 'unittest'
self.time = [0, 100]
self.window = [0, 500]
self.voxel = [4.0, 4.0, 3.0]
self.channel_type = TIMESERIES
self.datatype = UINT8
self.resolution = 0
self.channels = ['TIME1', 'TIME2']
self.num_objects = 1
self.args = None
self.annoid = 0
self.field = None
self.value = None
self.anntype = 1
| apache-2.0 |
unnikrishnankgs/va | venv/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/connection.py | 511 | 11617 | from __future__ import absolute_import
import datetime
import logging
import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from .packages import six
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException # noqa: unused in this module
except ImportError:
from httplib import HTTPConnection as _HTTPConnection
from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .exceptions import (
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
from .util import connection
from ._collections import HTTPHeaderDict
log = logging.getLogger(__name__)
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = HTTPHeaderDict(headers if headers is not None else {})
skip_accept_encoding = 'accept-encoding' in headers
self.putrequest(method, url, skip_accept_encoding=skip_accept_encoding)
for header, value in headers.items():
self.putheader(header, value)
if 'transfer-encoding' not in headers:
self.putheader('Transfer-Encoding', 'chunked')
self.endheaders()
if body is not None:
stringish_types = six.string_types + (six.binary_type,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
if not isinstance(chunk, six.binary_type):
chunk = chunk.encode('utf8')
len_str = hex(len(chunk))[2:]
self.send(len_str.encode('utf-8'))
self.send(b'\r\n')
self.send(chunk)
self.send(b'\r\n')
# After the if clause, to always have a closed body
self.send(b'0\r\n\r\n')
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None):
if (ca_certs or ca_cert_dir) and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
_match_hostname(cert, self.assert_hostname or hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None)
def _match_hostname(cert, asserted_hostname):
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.error(
'Certificate did not match expected hostname: %s. '
'Certificate: %s', asserted_hostname, cert
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection
| bsd-2-clause |
Edu-Glez/Bank_sentiment_analysis | env/lib/python3.6/site-packages/setuptools/command/install_egg_info.py | 412 | 2203 | from distutils import log, dir_util
import os
from setuptools import Command
from setuptools import namespaces
from setuptools.archive_util import unpack_archive
import pkg_resources
class install_egg_info(namespaces.Installer, Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',
('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = []
def run(self):
self.run_command('egg_info')
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
| apache-2.0 |
SiCKRAGETV/SickRage | sickrage/providers/torrent/torrentz.py | 2 | 3890 | # Author: echel0n <[email protected]>
# URL: https://sickrage.ca
# Git: https://git.sickrage.ca/SiCKRAGE/sickrage
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
import sickrage
from sickrage.core.caches.tv_cache import TVCache
from sickrage.core.helpers import bs4_parser, convert_size
from sickrage.providers import TorrentProvider
class TORRENTZProvider(TorrentProvider):
def __init__(self):
super(TORRENTZProvider, self).__init__("Torrentz", 'https://torrentz2.eu', False)
self.confirmed = True
self.minseed = None
self.minleech = None
self.urls.update({
'verified': '{base_url}/feed_verified'.format(**self.urls),
'feed': '{base_url}/feed'.format(**self.urls)
})
self.cache = TVCache(self, min_time=15)
@staticmethod
def _split_description(description):
match = re.findall(r'[0-9]+', description)
return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2])
def search(self, search_strings, age=0, ep_obj=None, **kwargs):
results = []
for mode in search_strings:
sickrage.app.log.debug('Search Mode: {}'.format(mode))
for search_string in search_strings[mode]:
search_url = self.urls['feed']
if mode != 'RSS':
sickrage.app.log.debug('Search string: {}'.format(search_string))
try:
data = self.session.get(search_url, params={'f': search_string}).text
results += self.parse(data, mode)
except Exception:
sickrage.app.log.debug('No data returned from provider')
return results
def parse(self, data, mode, **kwargs):
"""
Parse search results from data
:param data: response data
:param mode: search mode
:return: search results
"""
results = []
if not data.startswith('<?xml'):
sickrage.app.log.info('Expected xml but got something else, is your mirror failing?')
return results
with bs4_parser(data) as parser:
for item in parser('item'):
try:
if item.category and 'tv' not in item.category.get_text(strip=True).lower():
continue
title = item.title.get_text(strip=True)
t_hash = item.guid.get_text(strip=True).rsplit('/', 1)[-1]
if not all([title, t_hash]):
continue
download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title
torrent_size, seeders, leechers = self._split_description(item.find('description').text)
size = convert_size(torrent_size, -1)
results += [
{'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
]
if mode != 'RSS':
sickrage.app.log.debug("Found result: {}".format(title))
except Exception:
sickrage.app.log.error("Failed parsing provider.")
return results
| gpl-3.0 |
kreatorkodi/repository.torrentbr | plugin.video.yatp/site-packages/hachoir_metadata/image.py | 73 | 10800 | from hachoir_metadata.metadata import (registerExtractor,
Metadata, RootMetadata, MultipleMetadata)
from hachoir_parser.image import (
BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile,
XcfFile, TargaFile, WMF_File, PsdFile)
from hachoir_parser.image.png import getBitsPerPixel as pngBitsPerPixel
from hachoir_parser.image.xcf import XcfProperty
from hachoir_core.i18n import _
from hachoir_metadata.safe import fault_tolerant
def computeComprRate(meta, compr_size):
"""
Compute image compression rate. Skip size of color palette, focus on
image pixels. Original size is width x height x bpp. Compressed size
is an argument (in bits).
Set "compr_data" with a string like "1.52x".
"""
if not meta.has("width") \
or not meta.has("height") \
or not meta.has("bits_per_pixel"):
return
if not compr_size:
return
orig_size = meta.get('width') * meta.get('height') * meta.get('bits_per_pixel')
meta.compr_rate = float(orig_size) / compr_size
class BmpMetadata(RootMetadata):
def extract(self, image):
if "header" not in image:
return
hdr = image["header"]
self.width = hdr["width"].value
self.height = hdr["height"].value
bpp = hdr["bpp"].value
if bpp:
if bpp <= 8 and "used_colors" in hdr:
self.nb_colors = hdr["used_colors"].value
self.bits_per_pixel = bpp
self.compression = hdr["compression"].display
self.format_version = u"Microsoft Bitmap version %s" % hdr.getFormatVersion()
self.width_dpi = hdr["horizontal_dpi"].value
self.height_dpi = hdr["vertical_dpi"].value
if "pixels" in image:
computeComprRate(self, image["pixels"].size)
class TiffMetadata(RootMetadata):
key_to_attr = {
"img_width": "width",
"img_height": "width",
# TODO: Enable that (need link to value)
# "description": "comment",
# "doc_name": "title",
# "orientation": "image_orientation",
}
def extract(self, tiff):
if "ifd" in tiff:
self.useIFD(tiff["ifd"])
def useIFD(self, ifd):
for field in ifd:
try:
attrname = self.key_to_attr[field.name]
except KeyError:
continue
if "value" not in field:
continue
value = field["value"].value
setattr(self, attrname, value)
class IcoMetadata(MultipleMetadata):
color_to_bpp = {
2: 1,
16: 4,
256: 8
}
def extract(self, icon):
for index, header in enumerate(icon.array("icon_header")):
image = Metadata(self)
# Read size and colors from header
image.width = header["width"].value
image.height = header["height"].value
bpp = header["bpp"].value
nb_colors = header["nb_color"].value
if nb_colors != 0:
image.nb_colors = nb_colors
if bpp == 0 and nb_colors in self.color_to_bpp:
bpp = self.color_to_bpp[nb_colors]
elif bpp == 0:
bpp = 8
image.bits_per_pixel = bpp
image.setHeader(_("Icon #%u (%sx%s)")
% (1+index, image.get("width", "?"), image.get("height", "?")))
# Read compression from data (if available)
key = "icon_data[%u]/header/codec" % index
if key in icon:
image.compression = icon[key].display
key = "icon_data[%u]/pixels" % index
if key in icon:
computeComprRate(image, icon[key].size)
# Store new image
self.addGroup("image[%u]" % index, image)
class PcxMetadata(RootMetadata):
@fault_tolerant
def extract(self, pcx):
self.width = 1 + pcx["xmax"].value
self.height = 1 + pcx["ymax"].value
self.width_dpi = pcx["horiz_dpi"].value
self.height_dpi = pcx["vert_dpi"].value
self.bits_per_pixel = pcx["bpp"].value
if 1 <= pcx["bpp"].value <= 8:
self.nb_colors = 2 ** pcx["bpp"].value
self.compression = _("Run-length encoding (RLE)")
self.format_version = "PCX: %s" % pcx["version"].display
if "image_data" in pcx:
computeComprRate(self, pcx["image_data"].size)
class XcfMetadata(RootMetadata):
# Map image type to bits/pixel
TYPE_TO_BPP = {0: 24, 1: 8, 2: 8}
def extract(self, xcf):
self.width = xcf["width"].value
self.height = xcf["height"].value
try:
self.bits_per_pixel = self.TYPE_TO_BPP[ xcf["type"].value ]
except KeyError:
pass
self.format_version = xcf["type"].display
self.readProperties(xcf)
@fault_tolerant
def processProperty(self, prop):
type = prop["type"].value
if type == XcfProperty.PROP_PARASITES:
for field in prop["data"]:
if "name" not in field or "data" not in field:
continue
if field["name"].value == "gimp-comment":
self.comment = field["data"].value
elif type == XcfProperty.PROP_COMPRESSION:
self.compression = prop["data/compression"].display
elif type == XcfProperty.PROP_RESOLUTION:
self.width_dpi = int(prop["data/xres"].value)
self.height_dpi = int(prop["data/yres"].value)
def readProperties(self, xcf):
for prop in xcf.array("property"):
self.processProperty(prop)
class PngMetadata(RootMetadata):
TEXT_TO_ATTR = {
"software": "producer",
}
def extract(self, png):
if "header" in png:
self.useHeader(png["header"])
if "time" in png:
self.useTime(png["time"])
if "physical" in png:
self.usePhysical(png["physical"])
for comment in png.array("text"):
if "text" not in comment:
continue
keyword = comment["keyword"].value
text = comment["text"].value
try:
key = self.TEXT_TO_ATTR[keyword.lower()]
setattr(self, key, text)
except KeyError:
if keyword.lower() != "comment":
self.comment = "%s=%s" % (keyword, text)
else:
self.comment = text
compr_size = sum( data.size for data in png.array("data") )
computeComprRate(self, compr_size)
@fault_tolerant
def useTime(self, field):
self.creation_date = field.value
@fault_tolerant
def usePhysical(self, field):
self.width_dpi = field["pixel_per_unit_x"].value
self.height_dpi = field["pixel_per_unit_y"].value
@fault_tolerant
def useHeader(self, header):
self.width = header["width"].value
self.height = header["height"].value
# Read number of colors and pixel format
if "/palette/size" in header:
nb_colors = header["/palette/size"].value // 3
else:
nb_colors = None
if not header["has_palette"].value:
if header["has_alpha"].value:
self.pixel_format = _("RGBA")
else:
self.pixel_format = _("RGB")
elif "/transparency" in header:
self.pixel_format = _("Color index with transparency")
if nb_colors:
nb_colors -= 1
else:
self.pixel_format = _("Color index")
self.bits_per_pixel = pngBitsPerPixel(header)
if nb_colors:
self.nb_colors = nb_colors
# Read compression, timestamp, etc.
self.compression = header["compression"].display
class GifMetadata(RootMetadata):
def extract(self, gif):
self.useScreen(gif["/screen"])
if self.has("bits_per_pixel"):
self.nb_colors = (1 << self.get('bits_per_pixel'))
self.compression = _("LZW")
self.format_version = "GIF version %s" % gif["version"].value
for comments in gif.array("comments"):
for comment in gif.array(comments.name + "/comment"):
self.comment = comment.value
if "graphic_ctl/has_transp" in gif and gif["graphic_ctl/has_transp"].value:
self.pixel_format = _("Color index with transparency")
else:
self.pixel_format = _("Color index")
@fault_tolerant
def useScreen(self, screen):
self.width = screen["width"].value
self.height = screen["height"].value
self.bits_per_pixel = (1 + screen["size_global_map"].value)
class TargaMetadata(RootMetadata):
def extract(self, tga):
self.width = tga["width"].value
self.height = tga["height"].value
self.bits_per_pixel = tga["bpp"].value
if tga["nb_color"].value:
self.nb_colors = tga["nb_color"].value
self.compression = tga["codec"].display
if "pixels" in tga:
computeComprRate(self, tga["pixels"].size)
class WmfMetadata(RootMetadata):
def extract(self, wmf):
if wmf.isAPM():
if "amf_header/rect" in wmf:
rect = wmf["amf_header/rect"]
self.width = (rect["right"].value - rect["left"].value)
self.height = (rect["bottom"].value - rect["top"].value)
self.bits_per_pixel = 24
elif wmf.isEMF():
emf = wmf["emf_header"]
if "description" in emf:
desc = emf["description"].value
if "\0" in desc:
self.producer, self.title = desc.split("\0", 1)
else:
self.producer = desc
if emf["nb_colors"].value:
self.nb_colors = emf["nb_colors"].value
self.bits_per_pixel = 8
else:
self.bits_per_pixel = 24
self.width = emf["width_px"].value
self.height = emf["height_px"].value
class PsdMetadata(RootMetadata):
@fault_tolerant
def extract(self, psd):
self.width = psd["width"].value
self.height = psd["height"].value
self.bits_per_pixel = psd["depth"].value * psd["nb_channels"].value
self.pixel_format = psd["color_mode"].display
self.compression = psd["compression"].display
registerExtractor(IcoFile, IcoMetadata)
registerExtractor(GifFile, GifMetadata)
registerExtractor(XcfFile, XcfMetadata)
registerExtractor(TargaFile, TargaMetadata)
registerExtractor(PcxFile, PcxMetadata)
registerExtractor(BmpFile, BmpMetadata)
registerExtractor(PngFile, PngMetadata)
registerExtractor(TiffFile, TiffMetadata)
registerExtractor(WMF_File, WmfMetadata)
registerExtractor(PsdFile, PsdMetadata)
| gpl-2.0 |
michaelkourlas/gini | frontend/src/gbuilder/Core/Interfaceable.py | 1 | 10393 | """A device that can have interfaces"""
from Device import *
from Attachable import *
class Interfaceable(Attachable):
def __init__(self):
"""
Create a device that can have interfaces.
"""
Attachable.__init__(self)
self.adjacentRouterList = []
self.adjacentSubnetList = []
self.con_int = {} # the connection and interface pair
def generateToolTip(self):
"""
Add IP address(es) to the tool tip for easier lookup.
"""
tooltip = self.getName()
for interface in self.getInterfaces():
tooltip += "\n\nTarget: " + interface[QtCore.QString("target")].getName() + "\n"
tooltip += "IP: " + interface[QtCore.QString("ipv4")]
self.setToolTip(tooltip)
def addInterface(self, node):
"""
Add an interface to the list of interfaces with node as target.
"""
for interface in self.interfaces:
if interface[QtCore.QString("target")] == node:
return
self.interfaces.append({
QtCore.QString("target"):node,
QtCore.QString("ipv4"):QtCore.QString(""),
QtCore.QString("mac"):QtCore.QString(""),
QtCore.QString("routing"):[]})
def removeInterface(self, node):
"""
Remove the interface from the list of interfaces where node is the target.
"""
interface = None
for interface in self.interfaces:
if interface[QtCore.QString("target")] == node:
break
interface = None
if interface:
self.interfaces.remove(interface)
def getInterfaces(self):
"""
Return the list of interfaces.
"""
return self.interfaces
def getInterface(self, node=None, subnet=None):
"""
Return an interface from the list of interfaces specified by node or subnet.
"""
if not node and not subnet:
return self.interfaces[0]
elif subnet:
for interface in self.interfaces:
if interface[QtCore.QString("subnet")] == subnet:
return interface
else:
for interface in self.interfaces:
if interface[QtCore.QString("target")] == node:
return interface
def getInterfaceProperty(self, propName, node=None, subnet=None, index=0):
"""
Return an interface property specified by node or subnet.
"""
if not node and not subnet:
return self.interfaces[index][QtCore.QString(propName)]
interface = self.getInterface(node, subnet)
if interface:
return interface[QtCore.QString(propName)]
def setInterfaceProperty(self, prop, value, node=None, subnet=None, index=0):
"""
Set an interface property specified by node or subnet.
"""
if not node and not subnet:
self.interfaces[index][QtCore.QString(prop)] = QtCore.QString(value)
else:
interface = self.getInterface(node, subnet)
if not interface:
return
interface[QtCore.QString(prop)] = QtCore.QString(value)
if prop == "ipv4":
self.generateToolTip()
def getTable(self, node=None):
"""
Return the route table from the interface specified by node.
"""
return self.getInterfaceProperty("routing", node)
def getEntry(self, subnet, target):
"""
Return an entry from the route table specified by subnet and target.
"""
table = self.getInterfaceProperty("routing", target)
for entry in table:
if entry[QtCore.QString("subnet")] == subnet:
return entry
def getEntryProperty(self, prop, subnet, target):
"""
Return a property from the entry specified by subnet and target.
"""
entry = self.getEntry(subnet, target)
return entry[QtCore.QString(prop)]
def setEntryProperty(self, prop, value, subnet, target):
"""
Set a property from the entry specified by subnet and target.
"""
entry = self.getEntry(subnet, target)
entry[QtCore.QString(prop)] = value
def addEntry(self, mask, gateway, subnet, target):
"""
Add an entry to the table specified by subnet and target.
"""
entry = {QtCore.QString("netmask"):mask, QtCore.QString("gw"):gateway, QtCore.QString("subnet"):subnet}
table = self.getTable(target)
table.append(entry)
def removeEntry(self, entry, target):
"""
Remove an entry from the table specified by subnet and target.
"""
table = self.getTable(target)
table.remove(entry)
def addAdjacentRouter(self, router, interface):
"""
Add a router to the list of adjacent ones for route computations.
"""
self.adjacentRouterList.append([router, interface])
def getAdjacentRouters(self):
"""
Return the list of adjacent routers.
"""
return self.adjacentRouterList
def addAdjacentSubnet(self, subnet):
"""
Add a subnet to the list of adjacent ones for route computations.
"""
self.adjacentSubnetList.append(subnet)
def getAdjacentSubnets(self):
"""
Return the list of adjacent subnets.
"""
return self.adjacentSubnetList
def emptyAdjacentLists(self):
"""
Clear the list of adjacent routers and subnets.
"""
self.adjacentRouterList = []
self.adjacentSubnetList = []
def emptyRouteTable(self):
"""
Clear the route table.
"""
for interface in self.interfaces:
interface[QtCore.QString("routing")] = []
def hasSubnet(self, subnet):
"""
Check if the specified subnet is in the adjacent list.
"""
for sub in self.adjacentSubnetList:
if sub == subnet:
return True
return False
def searchSubnet(self, subnet):
"""
Search the specified subnet in the whole network.
"""
routerList=self.adjacentRouterList[:]
# Save all found routers in the list, so that we don't visit a router twice
foundList=[]
for r in routerList:
foundList.append(r[0])
while len(routerList) > 0:
theOne = routerList.pop(0)
if theOne[0].hasSubnet(subnet):
return (theOne[0], theOne[1])
else:
# Add its adjacent router list to the list
for router, interface in theOne[0].getAdjacentRouters():
# Check if the router is already visited or is in the to be visited list
if not router in foundList:
newOne = [router, theOne[1]]
routerList.append(newOne)
foundList.append(router)
return (None, None)
def addRoutingEntry(self, subnet):
"""
Add an entry to the route table.
"""
if not self.hasSubnet(subnet):
device, interface = self.searchSubnet(subnet)
if interface:
target = interface[QtCore.QString("target")]
if interface[QtCore.QString("subnet")] == subnet \
and self.device_type == "UML" or self.device_type == "REALM":
self.addEntry(interface[QtCore.QString("mask")],
"",
" ",
target)
elif interface[QtCore.QString("subnet")] == subnet \
and self.device_type == "REALM":
self.addEntry(interface[QtCore.QString("mask")],
"",
" ",
target)
else:
if target.device_type == "Switch":
# interfaceable = target.getTarget(self)
# gateway = interfaceable.getInterface(target)[QtCore.QString("ipv4")]
gateway = target.getGateway()
else:
gateway = target.getInterface(self)[QtCore.QString("ipv4")]
self.addEntry(interface[QtCore.QString("mask")],
gateway,
subnet,
target)
else:
if self.device_type == "Router":
interface = self.getInterface(None, subnet)
self.addEntry(interface[QtCore.QString("mask")],
"0.0.0.0",
subnet,
interface[QtCore.QString("target")])
def toString(self):
"""
Reimplemented to provide route information.
"""
devInfo = Device.toString(self)
interfaceInfo = ""
for interface in self.interfaces:
if interface.has_key(QtCore.QString("target")):
interfaceInfo += "\t\tinterface:" + interface[QtCore.QString("target")].getName() + "\n"
else:
interfaceInfo += "\t\twireless interface:\n"
for prop, value in interface.iteritems():
if prop == "target":
pass
elif prop == "routing":
for route in value:
interfaceInfo += "\t\t\t\troute:" + route[QtCore.QString("subnet")] + "\n"
for pr, val in route.iteritems():
if pr != "subnet":
interfaceInfo += "\t\t\t\t\t" + pr + ":" + val + "\n"
else:
interfaceInfo += "\t\t\t" + prop + ":" + value + "\n"
return devInfo + interfaceInfo
| mit |
hryamzik/ansible | lib/ansible/modules/cloud/google/gce.py | 43 | 27529 | #!/usr/bin/python
# Copyright 2013 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce
version_added: "1.4"
short_description: create or terminate GCE instances
description:
- Creates or terminates Google Compute Engine (GCE) instances. See
U(https://cloud.google.com/compute) for an overview.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
image:
description:
- image string to use for the instance (default will follow latest
stable debian image)
default: "debian-8"
image_family:
description:
- image family from which to select the image. The most recent
non-deprecated image in the family will be used.
version_added: "2.4"
external_projects:
description:
- A list of other projects (accessible with the provisioning credentials)
to be searched for the image.
version_added: "2.4"
instance_names:
description:
- a comma-separated list of instance names to create or destroy
machine_type:
description:
- machine type to use for the instance, use 'n1-standard-1' by default
default: "n1-standard-1"
metadata:
description:
- a hash/dictionary of custom data for the instance;
'{"key":"value", ...}'
service_account_email:
version_added: "1.5.1"
description:
- service account email
service_account_permissions:
version_added: "2.0"
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
pem_file:
version_added: "1.5.1"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
project_id:
version_added: "1.5.1"
description:
- your GCE project ID
name:
description:
- either a name of a single instance or when used with 'num_instances',
the base name of a cluster of nodes
aliases: ['base_name']
num_instances:
description:
- can be used with 'name', specifies
the number of nodes to provision using 'name'
as a base name
version_added: "2.3"
network:
description:
- name of the network, 'default' will be used if not specified
default: "default"
subnetwork:
description:
- name of the subnetwork in which the instance should be created
version_added: "2.2"
persistent_boot_disk:
description:
- if set, create the instance with a persistent boot disk
type: bool
default: 'no'
disks:
description:
- a list of persistent disks to attach to the instance; a string value
gives the name of the disk; alternatively, a dictionary value can
define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry
will be the boot disk (which must be READ_WRITE).
version_added: "1.7"
state:
description:
- desired state of the resource
default: "present"
choices: ["active", "present", "absent", "deleted", "started", "stopped", "terminated"]
tags:
description:
- a comma-separated list of tags to associate with the instance
zone:
description:
- the GCE zone to use. The list of available zones is at U(https://cloud.google.com/compute/docs/regions-zones/regions-zones#available).
required: true
default: "us-central1-a"
ip_forward:
version_added: "1.9"
description:
- set to C(yes) if the instance can forward ip packets (useful for
gateways)
type: bool
default: 'no'
external_ip:
version_added: "1.9"
description:
- type of external ip, ephemeral by default; alternatively, a fixed gce ip or ip name can be given. Specify 'none' if no external ip is desired.
default: "ephemeral"
disk_auto_delete:
version_added: "1.9"
description:
- if set boot disk will be removed after instance destruction
type: bool
default: 'yes'
preemptible:
version_added: "2.1"
description:
- if set to C(yes), instances will be preemptible and time-limited.
(requires libcloud >= 0.20.0)
type: bool
default: 'no'
disk_size:
description:
- The size of the boot disk created for this instance (in GB)
default: 10
version_added: "2.3"
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials,
>= 0.20.0 if using preemptible option"
notes:
- Either I(instance_names) or I(name) is required.
- JSON credentials strongly preferred.
author: "Eric Johnson (@erjohnso) <[email protected]>, Tom Melendez (@supertom) <[email protected]>"
'''
EXAMPLES = '''
# Basic provisioning example. Create a single Debian 8 instance in the
# us-central1-a Zone of the n1-standard-1 machine type.
# Create multiple instances by specifying multiple names, separated by
# commas in the instance_names field
# (e.g. my-test-instance1,my-test-instance2)
- gce:
instance_names: my-test-instance1
zone: us-central1-a
machine_type: n1-standard-1
image: debian-8
state: present
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
disk_size: 32
# Create a single instance of an image from the "my-base-image" image family
# in the us-central1-a Zone of the n1-standard-1 machine type.
# This image family is in the "my-other-project" GCP project.
- gce:
instance_names: my-test-instance1
zone: us-central1-a
machine_type: n1-standard-1
image_family: my-base-image
external_projects:
- my-other-project
state: present
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
disk_size: 32
# Create a single Debian 8 instance in the us-central1-a Zone
# Use existing disks, custom network/subnetwork, set service account permissions
# add tags and metadata.
- gce:
instance_names: my-test-instance
zone: us-central1-a
machine_type: n1-standard-1
state: present
metadata: '{"db":"postgres", "group":"qa", "id":500}'
tags:
- http-server
- my-other-tag
disks:
- name: disk-2
mode: READ_WRITE
- name: disk-3
mode: READ_ONLY
disk_auto_delete: false
network: foobar-network
subnetwork: foobar-subnetwork-1
preemptible: true
ip_forward: true
service_account_permissions:
- storage-full
- taskqueue
- bigquery
- https://www.googleapis.com/auth/ndev.clouddns.readwrite
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
---
# Example Playbook
- name: Compute Engine Instance Examples
hosts: localhost
vars:
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create multiple instances
# Basic provisioning example. Create multiple Debian 8 instances in the
# us-central1-a Zone of n1-standard-1 machine type.
gce:
instance_names: test1,test2,test3
zone: us-central1-a
machine_type: n1-standard-1
image: debian-8
state: present
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
metadata : '{ "startup-script" : "apt-get update" }'
register: gce
- name: Save host data
add_host:
hostname: "{{ item.public_ip }}"
groupname: gce_instances_ips
with_items: "{{ gce.instance_data }}"
- name: Wait for SSH for instances
wait_for:
delay: 1
host: "{{ item.public_ip }}"
port: 22
state: started
timeout: 30
with_items: "{{ gce.instance_data }}"
- name: Configure Hosts
hosts: gce_instances_ips
become: yes
become_method: sudo
roles:
- my-role-one
- my-role-two
tags:
- config
- name: delete test-instances
# Basic termination of instance.
gce:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
instance_names: "{{ gce.instance_names }}"
zone: us-central1-a
state: absent
tags:
- delete
'''
import socket
import logging
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
import libcloud
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
from libcloud.compute.drivers.gce import GCEAddress
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect, unexpected_error_msg
from ansible.module_utils.gcp import get_valid_location
from ansible.module_utils.six.moves import reduce
def get_instance_info(inst):
"""Retrieves instance information from an instance object and returns it
as a dictionary.
"""
metadata = {}
if 'metadata' in inst.extra and 'items' in inst.extra['metadata']:
for md in inst.extra['metadata']['items']:
metadata[md['key']] = md['value']
try:
netname = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
except:
netname = None
try:
subnetname = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1]
except:
subnetname = None
if 'disks' in inst.extra:
disk_names = [disk_info['source'].split('/')[-1]
for disk_info
in sorted(inst.extra['disks'],
key=lambda disk_info: disk_info['index'])]
else:
disk_names = []
if len(inst.public_ips) == 0:
public_ip = None
else:
public_ip = inst.public_ips[0]
return ({
'image': inst.image is not None and inst.image.split('/')[-1] or None,
'disks': disk_names,
'machine_type': inst.size,
'metadata': metadata,
'name': inst.name,
'network': netname,
'subnetwork': subnetname,
'private_ip': inst.private_ips[0],
'public_ip': public_ip,
'status': ('status' in inst.extra) and inst.extra['status'] or None,
'tags': ('tags' in inst.extra) and inst.extra['tags'] or [],
'zone': ('zone' in inst.extra) and inst.extra['zone'].name or None,
})
def create_instances(module, gce, instance_names, number, lc_zone):
"""Creates new instances. Attributes other than instance_names are picked
up from 'module'
module : AnsibleModule object
gce: authenticated GCE libcloud driver
instance_names: python list of instance names to create
number: number of instances to create
lc_zone: GCEZone object
Returns:
A list of dictionaries with instance information
about the instances that were launched.
"""
image = module.params.get('image')
image_family = module.params.get('image_family')
external_projects = module.params.get('external_projects')
machine_type = module.params.get('machine_type')
metadata = module.params.get('metadata')
network = module.params.get('network')
subnetwork = module.params.get('subnetwork')
persistent_boot_disk = module.params.get('persistent_boot_disk')
disks = module.params.get('disks')
tags = module.params.get('tags')
ip_forward = module.params.get('ip_forward')
external_ip = module.params.get('external_ip')
disk_auto_delete = module.params.get('disk_auto_delete')
preemptible = module.params.get('preemptible')
disk_size = module.params.get('disk_size')
service_account_permissions = module.params.get('service_account_permissions')
if external_ip == "none":
instance_external_ip = None
elif external_ip != "ephemeral":
instance_external_ip = external_ip
try:
# check if instance_external_ip is an ip or a name
try:
socket.inet_aton(instance_external_ip)
instance_external_ip = GCEAddress(id='unknown', name='unknown', address=instance_external_ip, region='unknown', driver=gce)
except socket.error:
instance_external_ip = gce.ex_get_address(instance_external_ip)
except GoogleBaseError as e:
module.fail_json(msg='Unexpected error attempting to get a static ip %s, error: %s' % (external_ip, e.value))
else:
instance_external_ip = external_ip
new_instances = []
changed = False
lc_disks = []
disk_modes = []
for i, disk in enumerate(disks or []):
if isinstance(disk, dict):
lc_disks.append(gce.ex_get_volume(disk['name'], lc_zone))
disk_modes.append(disk['mode'])
else:
lc_disks.append(gce.ex_get_volume(disk, lc_zone))
# boot disk is implicitly READ_WRITE
disk_modes.append('READ_ONLY' if i > 0 else 'READ_WRITE')
lc_network = gce.ex_get_network(network)
lc_machine_type = gce.ex_get_size(machine_type, lc_zone)
# Try to convert the user's metadata value into the format expected
# by GCE. First try to ensure user has proper quoting of a
# dictionary-like syntax using 'literal_eval', then convert the python
# dict into a python list of 'key' / 'value' dicts. Should end up
# with:
# [ {'key': key1, 'value': value1}, {'key': key2, 'value': value2}, ...]
if metadata:
if isinstance(metadata, dict):
md = metadata
else:
try:
md = literal_eval(str(metadata))
if not isinstance(md, dict):
raise ValueError('metadata must be a dict')
except ValueError as e:
module.fail_json(msg='bad metadata: %s' % str(e))
except SyntaxError as e:
module.fail_json(msg='bad metadata syntax')
if hasattr(libcloud, '__version__') and libcloud.__version__ < '0.15':
items = []
for k, v in md.items():
items.append({"key": k, "value": v})
metadata = {'items': items}
else:
metadata = md
lc_image = LazyDiskImage(module, gce, image, lc_disks, family=image_family, projects=external_projects)
ex_sa_perms = []
bad_perms = []
if service_account_permissions:
for perm in service_account_permissions:
if perm not in gce.SA_SCOPES_MAP and not perm.startswith('https://www.googleapis.com/auth'):
bad_perms.append(perm)
if len(bad_perms) > 0:
module.fail_json(msg='bad permissions: %s' % str(bad_perms))
ex_sa_perms.append({'email': "default"})
ex_sa_perms[0]['scopes'] = service_account_permissions
# These variables all have default values but check just in case
if not lc_network or not lc_machine_type or not lc_zone:
module.fail_json(msg='Missing required create instance variable',
changed=False)
gce_args = dict(
location=lc_zone,
ex_network=network, ex_tags=tags, ex_metadata=metadata,
ex_can_ip_forward=ip_forward,
external_ip=instance_external_ip, ex_disk_auto_delete=disk_auto_delete,
ex_service_accounts=ex_sa_perms
)
if preemptible is not None:
gce_args['ex_preemptible'] = preemptible
if subnetwork is not None:
gce_args['ex_subnetwork'] = subnetwork
if isinstance(instance_names, str) and not number:
instance_names = [instance_names]
if isinstance(instance_names, str) and number:
instance_responses = gce.ex_create_multiple_nodes(instance_names, lc_machine_type,
lc_image(), number, **gce_args)
for resp in instance_responses:
n = resp
if isinstance(resp, libcloud.compute.drivers.gce.GCEFailedNode):
try:
n = gce.ex_get_node(n.name, lc_zone)
except ResourceNotFoundError:
pass
else:
# Assure that at least one node has been created to set changed=True
changed = True
new_instances.append(n)
else:
for instance in instance_names:
pd = None
if lc_disks:
pd = lc_disks[0]
elif persistent_boot_disk:
try:
pd = gce.ex_get_volume("%s" % instance, lc_zone)
except ResourceNotFoundError:
pd = gce.create_volume(disk_size, "%s" % instance, image=lc_image())
gce_args['ex_boot_disk'] = pd
inst = None
try:
inst = gce.ex_get_node(instance, lc_zone)
except ResourceNotFoundError:
inst = gce.create_node(
instance, lc_machine_type, lc_image(), **gce_args
)
changed = True
except GoogleBaseError as e:
module.fail_json(msg='Unexpected error attempting to create ' +
'instance %s, error: %s' % (instance, e.value))
if inst:
new_instances.append(inst)
for inst in new_instances:
for i, lc_disk in enumerate(lc_disks):
# Check whether the disk is already attached
if (len(inst.extra['disks']) > i):
attached_disk = inst.extra['disks'][i]
if attached_disk['source'] != lc_disk.extra['selfLink']:
module.fail_json(
msg=("Disk at index %d does not match: requested=%s found=%s" % (
i, lc_disk.extra['selfLink'], attached_disk['source'])))
elif attached_disk['mode'] != disk_modes[i]:
module.fail_json(
msg=("Disk at index %d is in the wrong mode: requested=%s found=%s" % (
i, disk_modes[i], attached_disk['mode'])))
else:
continue
gce.attach_volume(inst, lc_disk, ex_mode=disk_modes[i])
# Work around libcloud bug: attached volumes don't get added
# to the instance metadata. get_instance_info() only cares about
# source and index.
if len(inst.extra['disks']) != i + 1:
inst.extra['disks'].append(
{'source': lc_disk.extra['selfLink'], 'index': i})
instance_names = []
instance_json_data = []
for inst in new_instances:
d = get_instance_info(inst)
instance_names.append(d['name'])
instance_json_data.append(d)
return (changed, instance_json_data, instance_names)
def change_instance_state(module, gce, instance_names, number, zone, state):
"""Changes the state of a list of instances. For example,
change from started to stopped, or started to absent.
module: Ansible module object
gce: authenticated GCE connection object
instance_names: a list of instance names to terminate
zone: GCEZone object where the instances reside prior to termination
state: 'state' parameter passed into module as argument
Returns a dictionary of instance names that were changed.
"""
changed = False
nodes = []
state_instance_names = []
if isinstance(instance_names, str) and number:
node_names = ['%s-%03d' % (instance_names, i) for i in range(number)]
elif isinstance(instance_names, str) and not number:
node_names = [instance_names]
else:
node_names = instance_names
for name in node_names:
inst = None
try:
inst = gce.ex_get_node(name, zone)
except ResourceNotFoundError:
state_instance_names.append(name)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
else:
nodes.append(inst)
state_instance_names.append(name)
if state in ['absent', 'deleted'] and number:
changed_nodes = gce.ex_destroy_multiple_nodes(nodes) or [False]
changed = reduce(lambda x, y: x or y, changed_nodes)
else:
for node in nodes:
if state in ['absent', 'deleted']:
gce.destroy_node(node)
changed = True
elif state == 'started' and node.state == libcloud.compute.types.NodeState.STOPPED:
gce.ex_start_node(node)
changed = True
elif state in ['stopped', 'terminated'] and node.state == libcloud.compute.types.NodeState.RUNNING:
gce.ex_stop_node(node)
changed = True
return (changed, state_instance_names)
def main():
module = AnsibleModule(
argument_spec=dict(
image=dict(default='debian-8'),
image_family=dict(),
external_projects=dict(type='list'),
instance_names=dict(),
machine_type=dict(default='n1-standard-1'),
metadata=dict(),
name=dict(aliases=['base_name']),
num_instances=dict(type='int'),
network=dict(default='default'),
subnetwork=dict(),
persistent_boot_disk=dict(type='bool', default=False),
disks=dict(type='list'),
state=dict(choices=['active', 'present', 'absent', 'deleted',
'started', 'stopped', 'terminated'],
default='present'),
tags=dict(type='list'),
zone=dict(default='us-central1-a'),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
pem_file=dict(type='path'),
credentials_file=dict(type='path'),
project_id=dict(),
ip_forward=dict(type='bool', default=False),
external_ip=dict(default='ephemeral'),
disk_auto_delete=dict(type='bool', default=True),
disk_size=dict(type='int', default=10),
preemptible=dict(type='bool', default=None),
),
mutually_exclusive=[('instance_names', 'name')]
)
if not HAS_PYTHON26:
module.fail_json(msg="GCE module requires python's 'ast' module, python v2.6+")
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.17.0+) required for this module')
gce = gce_connect(module)
image = module.params.get('image')
image_family = module.params.get('image_family')
external_projects = module.params.get('external_projects')
instance_names = module.params.get('instance_names')
name = module.params.get('name')
number = module.params.get('num_instances')
subnetwork = module.params.get('subnetwork')
state = module.params.get('state')
zone = module.params.get('zone')
preemptible = module.params.get('preemptible')
changed = False
inames = None
if isinstance(instance_names, list):
inames = instance_names
elif isinstance(instance_names, str):
inames = instance_names.split(',')
if name:
inames = name
if not inames:
module.fail_json(msg='Must specify a "name" or "instance_names"',
changed=False)
if not zone:
module.fail_json(msg='Must specify a "zone"', changed=False)
lc_zone = get_valid_location(module, gce, zone)
if preemptible is not None and hasattr(libcloud, '__version__') and libcloud.__version__ < '0.20':
module.fail_json(msg="Apache Libcloud 0.20.0+ is required to use 'preemptible' option",
changed=False)
if subnetwork is not None and not hasattr(gce, 'ex_get_subnetwork'):
module.fail_json(msg="Apache Libcloud 1.0.0+ is required to use 'subnetwork' option",
changed=False)
json_output = {'zone': zone}
if state in ['absent', 'deleted', 'started', 'stopped', 'terminated']:
json_output['state'] = state
(changed, state_instance_names) = change_instance_state(
module, gce, inames, number, lc_zone, state)
# based on what user specified, return the same variable, although
# value could be different if an instance could not be destroyed
if instance_names or name and number:
json_output['instance_names'] = state_instance_names
elif name:
json_output['name'] = name
elif state in ['active', 'present']:
json_output['state'] = 'present'
(changed, instance_data, instance_name_list) = create_instances(
module, gce, inames, number, lc_zone)
json_output['instance_data'] = instance_data
if instance_names:
json_output['instance_names'] = instance_name_list
elif name:
json_output['name'] = name
json_output['changed'] = changed
module.exit_json(**json_output)
class LazyDiskImage:
"""
Object for lazy instantiation of disk image
gce.ex_get_image is a very expensive call, so we want to avoid calling it as much as possible.
"""
def __init__(self, module, gce, name, has_pd, family=None, projects=None):
self.image = None
self.was_called = False
self.gce = gce
self.name = name
self.has_pd = has_pd
self.module = module
self.family = family
self.projects = projects
def __call__(self):
if not self.was_called:
self.was_called = True
if not self.has_pd:
if self.family:
self.image = self.gce.ex_get_image_from_family(self.family, ex_project_list=self.projects)
else:
self.image = self.gce.ex_get_image(self.name, ex_project_list=self.projects)
if not self.image:
self.module.fail_json(msg='image or disks missing for create instance', changed=False)
return self.image
if __name__ == '__main__':
main()
| gpl-3.0 |
ramcn/demo2 | crowdsourcing/models.py | 5 | 14284 | from django.contrib.auth.models import User
from django.db import models
from django.utils import timezone
class RegistrationModel(models.Model):
user = models.OneToOneField(User)
activation_key = models.CharField(max_length=40)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class PasswordResetModel(models.Model):
user = models.OneToOneField(User)
reset_key = models.CharField(max_length=40)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Region(models.Model):
name = models.CharField(max_length=64, error_messages={'required': 'Please specify the region!', })
code = models.CharField(max_length=16, error_messages={'required': 'Please specify the region code!', })
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Country(models.Model):
name = models.CharField(max_length=64, error_messages={'required': 'Please specify the country!', })
code = models.CharField(max_length=8, error_messages={'required': 'Please specify the country code!', })
region = models.ForeignKey(Region)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class City(models.Model):
name = models.CharField(max_length=64, error_messages={'required': 'Please specify the city!', })
country = models.ForeignKey(Country)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Address(models.Model):
street = models.CharField(max_length=128, error_messages={'required': 'Please specify the street name!', })
country = models.ForeignKey(Country)
city = models.ForeignKey(City)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Role(models.Model):
name = models.CharField(max_length=32, unique=True, error_messages={'required': 'Please specify the role name!', 'unique': 'The role %(value)r already exists. Please provide another name!'})
is_active = models.BooleanField(default=True)
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Language(models.Model):
name = models.CharField(max_length=64, error_messages={'required': 'Please specify the language!'})
iso_code = models.CharField(max_length=8)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class UserProfile(models.Model):
user = models.OneToOneField(User)
gender_choices = (('M', 'Male'),('F', 'Female'))
gender = models.CharField(max_length=1, choices=gender_choices)
address = models.ForeignKey(Address, null=True)
birthday = models.DateField(null=True, error_messages={'invalid': "Please enter a correct date format"})
nationality = models.ManyToManyField(Country, through='UserCountry')
verified = models.BooleanField(default=False)
picture = models.BinaryField(null=True)
friends = models.ManyToManyField('self', through='Friendship',
symmetrical=False)
roles = models.ManyToManyField(Role, through='UserRole')
deleted = models.BooleanField(default=False)
languages = models.ManyToManyField(Language, through='UserLanguage')
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class UserCountry(models.Model):
country = models.ForeignKey(Country)
user = models.ForeignKey(UserProfile)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Skill(models.Model):
name = models.CharField(max_length=128, error_messages={'required': "Please enter the skill name!"})
description = models.CharField(max_length=512, error_messages={'required': "Please enter the skill description!"})
verified = models.BooleanField(default=False)
parent = models.ForeignKey('self', null=True)
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Worker(models.Model):
profile = models.OneToOneField(UserProfile)
skills = models.ManyToManyField(Skill, through='WorkerSkill')
class WorkerSkill(models.Model):
worker = models.ForeignKey(Worker)
skill = models.ForeignKey(Skill)
level = models.IntegerField(null=True)
verified = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Requester(models.Model):
profile = models.OneToOneField(UserProfile)
class UserRole(models.Model):
user_profile = models.ForeignKey(UserProfile)
role = models.ForeignKey(Role)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Friendship(models.Model):
user_source = models.ForeignKey(UserProfile, related_name='user_source')
user_target = models.ForeignKey(UserProfile, related_name='user_target')
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Category(models.Model):
name = models.CharField(max_length=128, error_messages={'required': "Please enter the category name!"})
parent = models.ForeignKey('self', null=True)
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Project(models.Model):
name = models.CharField(max_length=128, error_messages={'required': "Please enter the project name!"})
collaborators = models.ManyToManyField(Requester, through='ProjectRequester')
deadline = models.DateTimeField(auto_now_add=True, auto_now=False)
keywords = models.TextField()
deleted = models.BooleanField(default=False)
categories = models.ManyToManyField(Category, through='ProjectCategory')
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class ProjectRequester(models.Model):
"""
Tracks the list of requesters that collaborate on a specific project
"""
requester = models.ForeignKey(Requester)
project = models.ForeignKey(Project)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Module(models.Model):
"""
This is a group of similar tasks of the same kind.
Fields
-repetition: number of times a task needs to be performed
"""
name = models.CharField(max_length=128, error_messages={'required': "Please enter the module name!"})
description = models.TextField(error_messages={'required': "Please enter the module description!"})
owner = models.ForeignKey(Requester)
project = models.ForeignKey(Project)
categories = models.ManyToManyField(Category, through='ModuleCategory')
keywords = models.TextField()
#TODO: To be refined
statuses = ((1, "Created"),
(2, 'In Progress'),
(3, 'In Review'),
(4, 'Finished')
)
status = models.IntegerField(choices=statuses, default=1)
price = models.FloatField()
repetition = models.IntegerField()
module_timeout = models.IntegerField()
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class ModuleCategory(models.Model):
module = models.ForeignKey(Module)
category = models.ForeignKey(Category)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class ProjectCategory(models.Model):
project = models.ForeignKey(Project)
category = models.ForeignKey(Category)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Template(models.Model):
name = models.CharField(max_length=128, error_messages={'required': "Please enter the template name!"})
owner = models.ForeignKey(Requester)
source_html = models.TextField()
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class TemplateItem(models.Model):
name = models.CharField(max_length=128, error_messages={'required': "Please enter the name of the template item!"})
template = models.ForeignKey(Template)
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class TemplateItemProperties(models.Model):
template_item = models.ForeignKey(TemplateItem)
attribute = models.CharField(max_length=128)
operator = models.CharField(max_length=128)
value1 = models.CharField(max_length=128)
value2 = models.CharField(max_length=128)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Task(models.Model):
module = models.ForeignKey(Module)
#TODO: To be refined
statuses = ((1, "Created"),
(2, 'Accepted'),
(3, 'Reviewed'),
(4, 'Finished')
)
status = models.IntegerField(choices=statuses, default=1)
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class TaskWorker(models.Model):
task = models.ForeignKey(Task)
worker = models.ForeignKey(Worker)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class TaskWorkerResult(models.Model):
task_worker = models.ForeignKey(TaskWorker)
template_item = models.ForeignKey(TemplateItem)
#TODO: To be refined
statuses = ((1, "Created"),
(2, 'Accepted'),
(3, 'Reviewed'),
(4, 'Finished')
)
status = models.IntegerField(choices=statuses, default=1)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class WorkerModuleApplication(models.Model):
worker = models.ForeignKey(Worker)
module = models.ForeignKey(Module)
#TODO: To be refined
statuses = ((1, "Created"),
(2, 'Accepted'),
(3, 'Rejected')
)
status = models.IntegerField(choices=statuses, default=1)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class ActivityLog(models.Model):
"""
Track all user's activities: Create, Update and Delete
"""
activity = models.CharField(max_length=512)
author = models.ForeignKey(User)
created_timestamp = models.DateTimeField(auto_now_add=False, auto_now=True)
class Qualification(models.Model):
module = models.ForeignKey(Module)
#TODO: To be refined
types = ((1, "Strict"),
(2, 'Flexible'))
type = models.IntegerField(choices=types, default=1)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class QualificationItem(models.Model):
qualification = models.ForeignKey(Qualification)
attribute = models.CharField(max_length=128)
operator = models.CharField(max_length=128)
value1 = models.CharField(max_length=128)
value2 = models.CharField(max_length=128)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class UserLanguage(models.Model):
language = models.ForeignKey(Language)
user = models.ForeignKey(UserProfile)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Currency(models.Model):
name = models.CharField(max_length=32)
iso_code = models.CharField(max_length=8)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class UserPreferences(models.Model):
user = models.OneToOneField(User)
language = models.ForeignKey(Language)
currency = models.ForeignKey(Currency)
login_alerts = models.SmallIntegerField(default=0)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class RequesterRanking(models.Model):
requester_name = models.CharField(max_length=128)
requester_payRank = models.FloatField()
requester_fairRank = models.FloatField()
requester_speedRank = models.FloatField()
requester_communicationRank = models.FloatField()
requester_numberofReviews = models.IntegerField(default=0)
| mit |
stackforge/solum | solum/api/controllers/v1/app.py | 2 | 6442 | # Copyright 2015 - Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import re
from oslo_config import cfg
from oslo_log import log as logging
import pecan
from pecan import rest
import wsmeext.pecan as wsme_pecan
from solum.api.controllers.v1.datamodel import app
from solum.api.controllers.v1 import workflow
from solum.api.handlers import app_handler
from solum.common import exception
from solum.common import request
from solum.common import yamlutils
from solum import objects
LOG = logging.getLogger(__name__)
class AppController(rest.RestController):
"""Manages operations on a single app."""
def __init__(self, app_id):
super(AppController, self).__init__()
self._id = app_id
@exception.wrap_wsme_pecan_controller_exception
@pecan.expose()
def _lookup(self, resource, *remainder):
if remainder and not remainder[-1]:
remainder = remainder[:-1]
if resource == 'workflows':
return workflow.WorkflowsController(self._id), remainder
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(app.App)
def get(self):
"""Return this app."""
request.check_request_for_https()
handler = app_handler.AppHandler(pecan.request.security_context)
app_model = handler.get(self._id)
host_url = pecan.request.application_url.rstrip('/')
app_model = app.App.from_db_model(app_model, host_url)
return app_model
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(app.App, body=app.App, status_code=200)
def patch(self, data):
"""Modify this app."""
request.check_request_for_https()
handler = app_handler.AppHandler(pecan.request.security_context)
handler.get(self._id)
if not data:
raise exception.BadRequest(reason="No body detected")
updated_app = handler.patch(self._id, data)
host_url = pecan.request.application_url.rstrip('/')
updated_app = app.App.from_db_model(updated_app, host_url)
return updated_app
@exception.wrap_pecan_controller_exception
@wsme_pecan.wsexpose(status_code=202)
def delete(self):
"""Delete this app."""
handler = app_handler.AppHandler(pecan.request.security_context)
handler.delete(self._id)
class AppsController(rest.RestController):
"""Manages operations on the apps collection."""
def _validate(self, app_data):
# check max apps created for given tenant
handler = app_handler.AppHandler(pecan.request.security_context)
if len(handler.get_all()) >= cfg.CONF.api.max_apps_per_tenant:
msg = "Cannot create application as maximum allowed limit reached."
raise exception.ResourceLimitExceeded(reason=msg)
if not app_data.languagepack:
raise exception.BadRequest(reason="Languagepack not specified.")
if not app_data.name:
raise exception.BadRequest(reason='App name cannot be empty.')
msg = ("Application name must be 1-100 characters long, only contain "
"a-z,0-9,-,_ and start with an alphabet character.")
# check if app name contains any invalid characters
if not app_data.name or not app_data.name[0].isalpha():
raise exception.BadRequest(reason=msg)
try:
re.match(r'^([a-z0-9-_]{1,100})$', app_data.name).group(0)
except AttributeError:
raise exception.BadRequest(reason=msg)
msg = "Application description must be less than 255 characters."
if app_data.description and len(app_data.description) > 255:
raise exception.BadRequest(reason=msg)
# check if languagepack exists or not
if str(app_data.languagepack).lower() != "false":
try:
objects.registry.Image.get_lp_by_name_or_uuid(
pecan.request.security_context,
app_data.languagepack,
include_operators_lp=True)
except exception.ResourceNotFound:
raise exception.ObjectNotFound(name="Languagepack",
id=app_data.languagepack)
@pecan.expose()
def _lookup(self, app_id, *remainder):
if remainder and not remainder[-1]:
remainder = remainder[:-1]
return AppController(app_id), remainder
@exception.wrap_pecan_controller_exception
@wsme_pecan.wsexpose(app.App, body=app.App, status_code=201)
def post(self, data):
"""Create a new app."""
request.check_request_for_https()
if not data:
raise exception.BadRequest(reason='No data.')
self._validate(data)
handler = app_handler.AppHandler(pecan.request.security_context)
app_data = data.as_dict(app.App)
try:
raw_content = yamlutils.load(pecan.request.body)
except ValueError:
try:
raw_content = json.loads(pecan.request.body)
except ValueError as exp:
LOG.exception(exp)
raise exception.BadRequest(reason='Invalid app data.')
app_data['raw_content'] = json.dumps(raw_content)
new_app = handler.create(app_data)
host_url = pecan.request.application_url.rstrip('/')
created_app = app.App.from_db_model(new_app, host_url)
return created_app
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose([app.App])
def get_all(self):
"""Return all apps, based on the query provided."""
request.check_request_for_https()
handler = app_handler.AppHandler(pecan.request.security_context)
host_url = pecan.request.application_url.rstrip('/')
all_apps = [app.App.from_db_model(obj, host_url)
for obj in handler.get_all()]
return all_apps
| apache-2.0 |
wwj718/murp-edx | lms/djangoapps/courseware/tests/test_grades.py | 33 | 5102 | """
Test grade calculation.
"""
from django.http import Http404
from django.test.utils import override_settings
from mock import patch
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from courseware.grades import grade, iterate_grades_for
def _grade_with_errors(student, request, course, keep_raw_scores=False):
"""This fake grade method will throw exceptions for student3 and
student4, but allow any other students to go through normal grading.
It's meant to simulate when something goes really wrong while trying to
grade a particular student, so we can test that we won't kill the entire
course grading run.
"""
if student.username in ['student3', 'student4']:
raise Exception("I don't like {}".format(student.username))
return grade(student, request, course, keep_raw_scores=keep_raw_scores)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestGradeIteration(ModuleStoreTestCase):
"""
Test iteration through student gradesets.
"""
COURSE_NUM = "1000"
COURSE_NAME = "grading_test_course"
def setUp(self):
"""
Create a course and a handful of users to assign grades
"""
self.course = CourseFactory.create(
display_name=self.COURSE_NAME,
number=self.COURSE_NUM
)
self.students = [
UserFactory.create(username='student1'),
UserFactory.create(username='student2'),
UserFactory.create(username='student3'),
UserFactory.create(username='student4'),
UserFactory.create(username='student5'),
]
def test_empty_student_list(self):
"""If we don't pass in any students, it should return a zero-length
iterator, but it shouldn't error."""
gradeset_results = list(iterate_grades_for(self.course.id, []))
self.assertEqual(gradeset_results, [])
def test_nonexistent_course(self):
"""If the course we want to get grades for does not exist, a `Http404`
should be raised. This is a horrible crossing of abstraction boundaries
and should be fixed, but for now we're just testing the behavior. :-("""
with self.assertRaises(Http404):
gradeset_results = iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), [])
gradeset_results.next()
def test_all_empty_grades(self):
"""No students have grade entries"""
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)
self.assertEqual(len(all_errors), 0)
for gradeset in all_gradesets.values():
self.assertIsNone(gradeset['grade'])
self.assertEqual(gradeset['percent'], 0.0)
@patch('courseware.grades.grade', _grade_with_errors)
def test_grading_exception(self):
"""Test that we correctly capture exception messages that bubble up from
grading. Note that we only see errors at this level if the grading
process for this student fails entirely due to an unexpected event --
having errors in the problem sets will not trigger this.
We patch the grade() method with our own, which will generate the errors
for student3 and student4.
"""
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)
student1, student2, student3, student4, student5 = self.students
self.assertEqual(
all_errors,
{
student3: "I don't like student3",
student4: "I don't like student4"
}
)
# But we should still have five gradesets
self.assertEqual(len(all_gradesets), 5)
# Even though two will simply be empty
self.assertFalse(all_gradesets[student3])
self.assertFalse(all_gradesets[student4])
# The rest will have grade information in them
self.assertTrue(all_gradesets[student1])
self.assertTrue(all_gradesets[student2])
self.assertTrue(all_gradesets[student5])
################################# Helpers #################################
def _gradesets_and_errors_for(self, course_id, students):
"""Simple helper method to iterate through student grades and give us
two dictionaries -- one that has all students and their respective
gradesets, and one that has only students that could not be graded and
their respective error messages."""
students_to_gradesets = {}
students_to_errors = {}
for student, gradeset, err_msg in iterate_grades_for(course_id, students):
students_to_gradesets[student] = gradeset
if err_msg:
students_to_errors[student] = err_msg
return students_to_gradesets, students_to_errors
| agpl-3.0 |
ajdawson/colormaps | setup.py | 1 | 2163 | """Build and install the colormaps package."""
# Copyright (c) 2012 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from distutils.core import setup
for line in open('lib/colormaps/__init__.py').readlines():
if (line.startswith('__version__')):
exec(line.strip())
package_data = {'colormaps': ['palette/*.txt', 'palette/ncl/*.txt',
'palette/brewer/diverging/*.txt',
'palette/brewer/qualitative/*.txt',
'palette/brewer/sequential/*.txt']}
if __name__ == '__main__':
setup(
name='colormaps',
version=__version__,
description='Easily generate colormaps for matplotlib',
author='Andrew Dawson',
author_email='[email protected]',
url='http://github.com/ajdawson/colormaps',
long_description="""
colormaps can generate colormaps of varying lengths from sets of
base colors. It is designed to allow total control of colormaps
in matplotlib.
""",
packages=['colormaps'],
package_dir={'': 'lib'},
package_data=package_data,)
| mit |
lammps/lammps | tools/python/neb_combine.py | 51 | 2170 | #!/usr/bin/env python
# make new dump file by combining snapshots from multiple NEB replica dumps
# Syntax: neb_combine.py -switch arg(s) -switch arg(s) ...
# -o outfile = new dump file
# each snapshot has NEB atoms from all replicas
# -r dump1 dump2 ... = replica dump files of NEB atoms
# can be in any order
# -b dumpfile = background atoms (optional)
# first snapshot in this file used as static non-NEB atoms
import sys,os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
# parse args
outfile = ""
backfile = ""
rfiles = []
argv = sys.argv
iarg = 1
narg = len(argv)
while iarg < narg:
if argv[iarg] == "-o":
outfile = argv[iarg+1]
iarg += 2
elif argv[iarg] == "-b":
backfile = argv[iarg+1]
iarg += 2
elif argv[iarg] == "-r":
ilast = iarg + 1
while ilast < narg and argv[ilast][0] != '-': ilast += 1
rfiles = argv[iarg+1:ilast]
iarg = ilast
else: break
if iarg < narg or not outfile or not rfiles:
print "Syntax: neb_combine.py -o outfile -b backfile -r dump1 dump2 ..."
sys.exit()
if os.path.exists(outfile): os.remove(outfile)
# ntotal = total atoms in each snapshot
# reset IDs of atoms in each NEB dump file
ntotal = 0
d = []
for file in rfiles:
one = dump(file)
nnew = one.snaps[0].nselect
idvec = range(ntotal+1,ntotal+nnew+1)
one.setv("id",idvec)
ntotal += nnew
d.append(one)
# nback = additional atoms in each snapshot
# reset IDs of atoms in background file
if backfile:
back = dump(backfile)
t = back.time()
back.tselect.one(t[0])
nback = back.snaps[0].nselect
idvec = range(ntotal+1,ntotal+nback+1)
back.setv("id",idvec)
else: nback = 0
ntotal += nback
# write out each snapshot
# natoms = ntotal, by overwriting nselect
# add background atoms if requested
times = d[0].time()
for time in times:
d[0].tselect.one(time)
i = d[0].findtime(time)
hold = d[0].snaps[i].nselect
d[0].snaps[i].nselect = ntotal
d[0].write(outfile,1,1)
d[0].snaps[i].nselect = hold
for one in d[1:]:
one.tselect.one(time)
one.write(outfile,0,1)
if backfile: back.write(outfile,0,1)
| gpl-2.0 |
stefan-balke/librosa | tests/test_display.py | 2 | 9298 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# CREATED:2015-02-14 22:51:01 by Brian McFee <[email protected]>
'''Unit tests for display module'''
import warnings
# Disable cache
import os
try:
os.environ.pop('LIBROSA_CACHE_DIR')
except KeyError:
pass
import matplotlib
matplotlib.use('Agg')
matplotlib.rcParams.update(matplotlib.rcParamsDefault)
import matplotlib.style
matplotlib.style.use('seaborn-ticks')
import matplotlib.pyplot as plt
import librosa
import librosa.display
import numpy as np
from nose.tools import nottest, raises, eq_
from mpl_ic import image_comparison
warnings.resetwarnings()
warnings.simplefilter('always')
@nottest
def get_spec(y, sr):
C = np.abs(librosa.cqt(y, sr=sr))
return librosa.stft(y), C, sr
__EXAMPLE_FILE = 'data/test1_22050.wav'
y, sr = librosa.load(__EXAMPLE_FILE)
S, C, sr = get_spec(y, sr)
S_abs = np.abs(S)
S_signed = np.abs(S) - np.median(np.abs(S))
S_bin = S_signed > 0
tempo, beats = librosa.beat.beat_track(y=y, sr=sr, trim=False)
beats = librosa.util.fix_frames(beats, x_max=C.shape[1])
beat_t = librosa.frames_to_time(beats, sr=sr)
Csync = librosa.util.sync(C, beats, aggregate=np.median)
@image_comparison(baseline_images=['complex'], extensions=['png'])
def test_complex_input():
plt.figure()
librosa.display.specshow(S)
@image_comparison(baseline_images=['abs'], extensions=['png'])
def test_abs_input():
plt.figure()
librosa.display.specshow(S_abs)
@image_comparison(baseline_images=['cqt_note'], extensions=['png'])
def test_cqt_note():
plt.figure()
librosa.display.specshow(C, y_axis='cqt_note')
@image_comparison(baseline_images=['cqt_hz'], extensions=['png'])
def test_cqt_hz():
plt.figure()
librosa.display.specshow(C, y_axis='cqt_hz')
@image_comparison(baseline_images=['tempo'], extensions=['png'])
def test_tempo():
T = librosa.feature.tempogram(y=y, sr=sr)
plt.figure()
librosa.display.specshow(T, y_axis='tempo', cmap='magma')
@image_comparison(baseline_images=['tonnetz'], extensions=['png'])
def test_tonnetz():
plt.figure()
chroma = librosa.feature.chroma_cqt(C=C)
ton = librosa.feature.tonnetz(chroma=chroma)
librosa.display.specshow(ton, y_axis='tonnetz')
@image_comparison(baseline_images=['chroma'], extensions=['png'])
def test_chroma():
plt.figure()
plt.subplot(3, 1, 1)
chr1 = librosa.feature.chroma_stft(S=S_abs**2, sr=sr)
librosa.display.specshow(chr1, y_axis='chroma')
plt.subplot(3, 1, 2)
chr2 = librosa.feature.chroma_stft(S=S_abs**2, sr=sr, n_chroma=2*12)
librosa.display.specshow(chr2, y_axis='chroma', bins_per_octave=2*12)
plt.subplot(3, 1, 3)
chr3 = librosa.feature.chroma_stft(S=S_abs**2, sr=sr, n_chroma=3*12)
librosa.display.specshow(chr3, y_axis='chroma', bins_per_octave=3*12)
@image_comparison(baseline_images=['double_chroma'], extensions=['png'])
def test_double_chroma():
plt.figure()
chr1 = librosa.feature.chroma_stft(S=S_abs**2, sr=sr)
chr1 = np.vstack((chr1, chr1))
librosa.display.specshow(chr1, y_axis='chroma', bins_per_octave=12)
@image_comparison(baseline_images=['x_mel'], extensions=['png'])
def test_x_mel():
plt.figure()
M = librosa.feature.melspectrogram(S=S_abs**2)
librosa.display.specshow(M.T, x_axis='mel')
@image_comparison(baseline_images=['y_mel'], extensions=['png'])
def test_y_mel():
plt.figure()
M = librosa.feature.melspectrogram(S=S_abs**2)
librosa.display.specshow(M, y_axis='mel')
@image_comparison(baseline_images=['y_mel_bounded'], extensions=['png'])
def test_y_mel_bounded():
plt.figure()
fmin, fmax = 110, 880
M = librosa.feature.melspectrogram(S=S_abs**2, fmin=fmin, fmax=fmax)
librosa.display.specshow(M, y_axis='mel', fmin=fmin, fmax=fmax)
@image_comparison(baseline_images=['x_none_y_linear'], extensions=['png'])
def test_xaxis_none_yaxis_linear():
plt.figure()
plt.subplot(3, 1, 1)
librosa.display.specshow(S_abs, y_axis='linear')
plt.subplot(3, 1, 2)
librosa.display.specshow(S_signed, y_axis='linear')
plt.subplot(3, 1, 3)
librosa.display.specshow(S_bin, y_axis='linear')
@image_comparison(baseline_images=['x_none_y_log'], extensions=['png'])
def test_xaxis_none_yaxis_log():
plt.figure()
plt.subplot(3, 1, 1)
librosa.display.specshow(S_abs, y_axis='log')
plt.subplot(3, 1, 2)
librosa.display.specshow(S_signed, y_axis='log')
plt.subplot(3, 1, 3)
librosa.display.specshow(S_bin, y_axis='log')
@image_comparison(baseline_images=['x_linear_y_none'], extensions=['png'])
def test_xaxis_linear_yaxis_none():
plt.figure()
plt.subplot(3, 1, 1)
librosa.display.specshow(S_abs.T, x_axis='linear')
plt.subplot(3, 1, 2)
librosa.display.specshow(S_signed.T, x_axis='linear')
plt.subplot(3, 1, 3)
librosa.display.specshow(S_bin.T, x_axis='linear')
@image_comparison(baseline_images=['x_log_y_none'], extensions=['png'])
def test_xaxis_log_yaxis_none():
plt.figure()
plt.subplot(3, 1, 1)
librosa.display.specshow(S_abs.T, x_axis='log')
plt.subplot(3, 1, 2)
librosa.display.specshow(S_signed.T, x_axis='log')
plt.subplot(3, 1, 3)
librosa.display.specshow(S_bin.T, x_axis='log')
@image_comparison(baseline_images=['x_time_y_none'], extensions=['png'])
def test_xaxis_time_yaxis_none():
plt.figure()
librosa.display.specshow(S_abs, x_axis='time')
@image_comparison(baseline_images=['x_none_y_time'], extensions=['png'])
def test_xaxis_none_yaxis_time():
plt.figure()
librosa.display.specshow(S_abs.T, y_axis='time')
@image_comparison(baseline_images=['x_frames_y_none'], extensions=['png'])
def test_xaxis_frames_yaxis_none():
plt.figure()
librosa.display.specshow(S_abs, x_axis='frames')
@image_comparison(baseline_images=['x_none_y_frames'], extensions=['png'])
def test_xaxis_none_yaxis_frames():
plt.figure()
librosa.display.specshow(S_abs.T, y_axis='frames')
@image_comparison(baseline_images=['x_lag_y_none'], extensions=['png'])
def test_xaxis_lag_yaxis_none():
plt.figure()
librosa.display.specshow(S_abs, x_axis='lag')
@image_comparison(baseline_images=['x_none_y_lag'], extensions=['png'])
def test_xaxis_time_yaxis_lag():
plt.figure()
librosa.display.specshow(S_abs.T, y_axis='lag')
@image_comparison(baseline_images=['time_scales_auto'], extensions=['png'])
def test_time_scales_auto():
# sr = 22050, hop_length = 512, S.shape[1] = 198
# 197 * 512 / 22050 ~= 4.6s
plt.figure()
plt.subplot(4, 1, 1)
# sr * 10 -> ms
librosa.display.specshow(S_abs, sr=10 * sr, x_axis='time')
plt.subplot(4, 1, 2)
# sr -> s
librosa.display.specshow(S_abs, sr=sr, x_axis='time')
plt.subplot(4, 1, 3)
# sr / 20 -> m
librosa.display.specshow(S_abs, sr=sr // 20, x_axis='time')
plt.subplot(4, 1, 4)
# sr / (60 * 20) -> h
librosa.display.specshow(S_abs, sr=sr // (60 * 20), x_axis='time')
plt.tight_layout()
@image_comparison(baseline_images=['waveplot_mono'], extensions=['png'])
def test_waveplot_mono():
plt.figure()
plt.subplot(3, 1, 1)
librosa.display.waveplot(y, sr=sr, max_points=None, x_axis='off')
plt.subplot(3, 1, 2)
librosa.display.waveplot(y, sr=sr, x_axis='off')
plt.subplot(3, 1, 3)
librosa.display.waveplot(y, sr=sr, x_axis='time')
@image_comparison(baseline_images=['waveplot_stereo'], extensions=['png'])
def test_waveplot_stereo():
ys = np.vstack([y[np.newaxis, :], 2 * y[np.newaxis, :]])
plt.figure()
librosa.display.waveplot(ys, sr=sr)
@raises(librosa.ParameterError)
def test_unknown_wavaxis():
plt.figure()
librosa.display.waveplot(y, sr=sr, x_axis='something not in the axis map')
@raises(librosa.ParameterError)
def test_waveplot_bad_maxsr():
plt.figure()
librosa.display.waveplot(y, sr=sr, max_sr=0)
@raises(librosa.ParameterError)
def test_waveplot_bad_maxploints():
plt.figure()
librosa.display.waveplot(y, sr=sr, max_points=0)
def test_unknown_axis():
@raises(librosa.ParameterError)
def __test(axis):
kwargs = dict()
kwargs.setdefault(axis, 'something not in the axis map')
plt.figure()
librosa.display.specshow(S_abs, **kwargs)
yield __test, 'x_axis'
yield __test, 'y_axis'
def test_cmap_robust():
def __test(data):
cmap1 = librosa.display.cmap(data, robust=False)
cmap2 = librosa.display.cmap(data, robust=True)
assert type(cmap1) is type(cmap2)
if isinstance(cmap1, matplotlib.colors.ListedColormap):
assert np.allclose(cmap1.colors, cmap2.colors)
elif isinstance(cmap1, matplotlib.colors.LinearSegmentedColormap):
eq_(cmap1.name, cmap2.name)
else:
eq_(cmap1, cmap2)
# Inputs here are constructed to not need robust sign estimation
for D in [1.0 + S_abs, -(1.0 + S_abs), S_signed, S_bin]:
yield __test, D
@image_comparison(baseline_images=['coords'], extensions=['png'])
def test_coords():
plt.figure()
librosa.display.specshow(Csync, x_coords=beat_t, x_axis='time', y_axis='cqt_note')
@raises(librosa.ParameterError)
def test_bad_coords():
librosa.display.specshow(S_abs, x_coords=np.arange(S.shape[1] // 2))
| isc |
hbhzwj/imalse | core/configure/Anomaly.py | 2 | 10446 | #!/usr/bin/env python
### -- [2012-03-04 14:56:03] FlowRate and FlowSize anomaly parameter has be changed as
### -- ratio instead of absolute value
##-- [2012-04-08 22:31:20] Add GenAnomalyDot
##-- [2012-04-09 18:31:22] refactoring the whole file
## -- [2012-04-10 01:14:07] FLOW_RATE can work
##-- [2012-04-10 17:16:27] add _infect_modulator, make anomaly more general
# import numpy as np
import sys
sys.path.append("..")
import settings
from util import Load
from mod_util import choose_ip_addr
import cPickle as pickle
# from numpy import cumsum, diff
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def diff(x):
res = []
for i in xrange(len(x)-1):
res.append(x[i+1]-x[i])
return res
def get_pos(l, v):
"""index of largest element in l that is less than v"""
for i in xrange(len(l)):
if l[i] < v :
continue
return i - 1
def insert_break_pt(b, dur, num):
"""b is a break point that will break dur, for example,
if b = 35, and dur = (20, 20, 10), num = (1, 2, 1)the result will be
(20, 15, 5, 10), the new num will be (1, 2, 2, 1)"""
t = [0] + list(cumsum( dur ))
nt = copy.deepcopy(t)
new_num = list(copy.deepcopy(num))
i = get_pos(t, b)
if i is None:
raise Exception('[insert_break_pt], maybe you have insert an anomaly in an unsuitable time? ')
elif i == -1 or i == len(t) - 1:
return dur, num, i+1;
else:
nt.insert(i+1, b)
new_num.insert(i+1, num[i])
new_dur = list(diff(nt))
return new_dur, new_num, i+1
class BadConfigError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
import copy
class Anomaly(object):
"""basis class for anomaly. Its subclass will provide run() method
ano_desc:
**T**: start, end time for the anomaly
**change**: a dictionary specify how the attributes of the existing modules are
changed. the value is a string, if the first char is '=', it means change the attribute
to the value behind '='. If the first char is '+', it means add the attribute by
the value behind '+'. Likewise if the first char is 'x', it means multiply the attribute by
the value behind 'x'.
for example:
change = {'flow_size_mean':'x2', 'flow_arrival_rate':'=6', 'flow_size_var=+3'},
means: change the flow_size_mean to two times of the orginal value, change flow_arrival_rate
to be 6 and add the flow_size_var by 3.
"""
def __init__(self, ano_desc):
self.ano_desc = ano_desc
self.ano_node = None
def get_profile_with_ano(self, mod_start, mod_profile, ano_t):
"""in fs, one modulator can only one behaviour
toe simulate the change of behaviour of the modulator,
the abnormal haviour will be generator by new modulators."""
start, end = ano_t
start -= mod_start
end -= mod_start
d, n, i1 = insert_break_pt(start, mod_profile[0], mod_profile[1])
d, n, i2 = insert_break_pt(end, d, n)
normal_profile_1 = ( tuple(d[:i1]), tuple(n[:i1]) )
abnormal_profile = ( tuple(d[i1:i2]), tuple(n[i1:i2]) )
normal_profile_2 = ( tuple(d[i2:]), tuple(n[i2:]) )
return normal_profile_1, abnormal_profile, normal_profile_2
# def cut_profile(profile, status):
# """cut into three pieces"""
def _infect_modulator(self, ano_t, m_id, mod):
ano_node = self.ano_node
generator = ano_node.generator
mod_start = eval(mod['start'])
mod_profile = mod['profile']
np1, ap, np2 = self.get_profile_with_ano(mod_start, mod_profile, ano_t)
s_id = mod['generator'] # get id for source generator
ano_node.add_modulator(start=str(mod_start), profile=np1, generator = [generator[s_id]])
start, end = ano_t
# st = mod_start + float(np.sum(np1[0]))
st = mod_start + float(sum(np1[0]))
assert(st == start)
self.new_generator = generator[s_id].get_new_gen(self.ano_desc['change'])
ano_node.add_modulator(start=str(start),
profile=ap,
generator = [ self.new_generator ])
# export para to help to export ano flo
self._export_ano_flow_para(self.new_generator)
# st = mod_start + float(np.sum(np1[0])) + float(np.sum(ap[0]))
st = mod_start + float(sum(np1[0])) + float(sum(ap[0]))
assert(st == end)
ano_node.add_modulator(start=str(end), profile=np2, generator=[ generator[s_id] ])
# delete original modulator
del ano_node.modulator[m_id]
del ano_node.generator[s_id]
def _export_ano_flow_para(self, new_generator):
"""export para to help to export ano flows"""
ano_flow_para = copy.deepcopy(new_generator.para)
ano_flow_para['ano_type'] = self.ano_desc['anoType']
pickle.dump(ano_flow_para, open(settings.EXPORT_ABNORMAL_FLOW_PARA_FILE, 'w')) # For export abnormal flows
def run(self, net):
"""inject itself into the network"""
self.ano_node = net.node_list[self.ano_desc['ano_node_seq']]
ano_t = self.ano_desc['T']
m_back = copy.deepcopy(self.ano_node.modulator)
for m_id, mod in m_back.iteritems(): # infect each modulator, change attribute by ratio
self._infect_modulator(ano_t, m_id, mod)
class AddModulatorAnomaly(Anomaly):
"""instead of changing parameters of existing modulators, simply add new modulators
ano_desc:
- **dst_nodes**: the destination node of the modulators, will add one modulator
for each dst_nodes
- **gen_desc**: the descriptor for the generator of the modulator
- **T**: a two element list or tuple, the start, end time for the anomaly.
"""
def run(self, net):
self.ano_node = net.node_list[self.ano_desc['ano_node_seq']]
self.net = net
self._config_traffic()
def _config_traffic(self):
"""add modulator to each srv"""
nn = len(self.net.node_list)
srv_node_list = [self.net.node_list[i] for i in xrange(nn) if i in self.ano_desc['dst_nodes'] ]
start, end = self.ano_desc['T']
for srv_node in srv_node_list:
gen_desc = Load(self.ano_desc['gen_desc'])
gen_desc['ipsrc'] = choose_ip_addr(self.ano_node.ipdests).rsplit('/')[0]
gen_desc['ipdst'] = choose_ip_addr(srv_node.ipdests).rsplit('/')[0]
self.ano_node.add_modulator(start=str(start),
profile='((%d,),(1,))' %(end-start),
generator=[get_generator(gen_desc)] )
from Edge import NEdge
from Node import NNode
from Generator import get_generator
class AtypicalUserAnomaly(Anomaly):
"""anomaly of atypical user. an atypical user joins to the network during some time.
Atypical user refer those user has large IP distance with users in the network."""
ATIP = None # Atypical IP Set. Will Select IP from this set and add a node with atypical ip
idx = 0 # A indicator to seperate the IP that has been selected or not
NAME = 'AtypicaUser'
def __init__(self, ano_desc):
"""link_to is a list of variables representing the connection to all other nodes
* link_to[i] == 1 means there is link from atypical node to node i.
* link_to[i] == -1 means there is link from node i to this atypical node.
"""
self.ano_desc = ano_desc
Anomaly.__init__(self, ano_desc)
if self.ATIP == None:
self.ATIP = ano_desc['ATIP']
self.net = None
self.ano_node = None
def _change_topology(self):
link_to = self.ano_desc['link_to']
link_attr = self.ano_desc['link_attr']
for i in xrange(len(link_to)):
if link_to[i] == 1:
edge = NEdge(self.ano_node, self.net.node_list[i], link_attr )
elif link_to[i] == -1:
edge = NEdge(self.net.node_list[i], self.ano._ode, link_attr )
else:
raise ValueError('unknown link_to value')
self.net.add_edge(edge)
def _config_traffic(self):
nn = len(self.net.node_list)
srv_node_list = [self.net.node_list[i] for i in xrange(nn) if i in self.net.net_desc['srv_list'] ]
start, end = self.ano_desc['T']
for srv_node in srv_node_list:
gen_desc = Load(self.ano_desc['gen_desc'])
gen_desc['ipsrc'] = choose_ip_addr(self.ano_node.ipdests).rsplit('/')[0]
gen_desc['ipdst'] = choose_ip_addr(srv_node.ipdests).rsplit('/')[0]
self.ano_node.add_modulator(start=str(start),
profile='((%d,),(1,))' %(end-start),
generator=get_generator(gen_desc) )
def _get_ano_node(self):
ipdest = [ self.ATIP[self.idx] ]
self.idx += 1
nn = len(self.net.node_list) # Add by J.W
self.ano_node = NNode(ipdest, nn)
self._config_traffic()
def _export_ip_addr(self):
fid = open(settings.EXPORT_ABNORMAL_FLOW_PARA_FILE, 'w')
fid.write( ' '.join([str(i) for i in self.ano_node.ipdests]) )
fid.close()
def _export_ano_flow_para(self):
"""export para to help to export ano flows"""
self._export_ip_addr()
def run(self, net):
'''will add a node for atypical user to the network.
The IP address for atypical user is selected from. settings.atypical_ip_file'''
self.net = net
self._get_ano_node()
net.add_node(self.ano_node)
self._change_topology()
self._export_ano_flow_para()
class TargetOneServer(Anomaly):
"""Only change the behaviour in one server
ano_desc should have id **srv_id** of that sever node"""
def run(self, net):
self.ano_node = net.node_list[self.ano_desc['ano_node_seq']]
ano_t = self.ano_desc['T']
srv_id = self.ano_desc['srv_id']
srv_ip_addr = net.node_list[srv_id].ipdests
m_back = copy.deepcopy(self.ano_node.modulator)
for m_id, mod in m_back.iteritems(): # For each modulator
s_id = mod['generator'] # get id for source generator
if self.ano_node.generator[s_id]['ipdst'] not in srv_ip_addr:
continue
self._infect_modulator(ano_t, m_id, mod)
| gpl-3.0 |
solitone/sdsreader | sdsreader.py | 1 | 1549 | import arrow, logging, time
from pmuploader import PmDataUploader
from sdserror import SdsError, SdsNoPacketError
from sdssensor import SdsSensor
LOGLEVEL = logging.INFO
PORT = '/dev/serial0'
# Time delta between measurements, in seconds:
SAMPLING_PERIOD = 15
URL = "http://dusty.pythonanywhere.com/pm/save/"
def loop():
while True:
try:
measurement = pmSensor.getMeasurement()
measurement['time'] = arrow.now()
uploader.sendMeasurement(measurement)
time.sleep(pmSensor.samplingPeriod)
except SdsError as e:
logging.error("SDS error: %s %s", type(e), e.args)
if __name__ == "__main__":
logging.basicConfig(format = '%(asctime)s [%(levelname)s] %(message)s',
level = LOGLEVEL)
print("Starting reading SDS PM sensor on port", PORT)
try:
pmSensor = SdsSensor(PORT, SAMPLING_PERIOD)
pmSensor.setId()
print("Sensor ID:", pmSensor.id)
print("Sampling period:", pmSensor.samplingPeriod, "s")
uploader = PmDataUploader(URL)
loop()
except SdsNoPacketError as e:
print(e.message)
print("---> SDS to RasPi connection:", "\n",
" - RED wire <-> pin 4", "\n",
" - BLACK wire <-> pin 6", "\n",
" - YELLOW wire <-> pin 8", "\n",
" - BLUE wire <-> pin 10")
except Exception as e:
logging.error("%s %s", type(e), e.args)
raise
| gpl-3.0 |
oopy/micropython | tests/float/complex1.py | 2 | 2018 | # test basic complex number functionality
# constructor
print(complex(1))
print(complex(1.2))
print(complex(1.2j))
print(complex("1"))
print(complex("1.2"))
print(complex("1.2j"))
print(complex(1, 2))
print(complex(1j, 2j))
# unary ops
print(bool(1j))
print(+(1j))
print(-(1 + 2j))
# binary ops
print(1j + False)
print(1j + True)
print(1j + 2)
print(1j + 2j)
print(1j - 2)
print(1j - 2j)
print(1j * 2)
print(1j * 2j)
print(1j / 2)
print((1j / 2j).real)
print(1j / (1 + 2j))
ans = 0j ** 0; print("%.5g %.5g" % (ans.real, ans.imag))
ans = 0j ** 1; print("%.5g %.5g" % (ans.real, ans.imag))
ans = 0j ** 0j; print("%.5g %.5g" % (ans.real, ans.imag))
ans = 1j ** 2.5; print("%.5g %.5g" % (ans.real, ans.imag))
ans = 1j ** 2.5j; print("%.5g %.5g" % (ans.real, ans.imag))
# comparison
print(1j == 1)
print(1j == 1j)
# comparison of nan is special
nan = float('nan') * 1j
print(nan == 1j)
print(nan == nan)
# builtin abs
print(abs(1j))
print("%.5g" % abs(1j + 2))
# builtin hash
print(hash(1 + 0j))
print(type(hash(1j)))
# float on lhs should delegate to complex
print(1.2 + 3j)
# check printing of inf/nan
print(float('nan') * 1j)
print(float('inf') * (1 + 1j))
print(float('-inf') * (1 + 1j))
# can't assign to attributes
try:
(1j).imag = 0
except AttributeError:
print('AttributeError')
# can't convert rhs to complex
try:
1j + []
except TypeError:
print("TypeError")
# unsupported unary op
try:
~(1j)
except TypeError:
print("TypeError")
# unsupported binary op
try:
1j // 2
except TypeError:
print("TypeError")
# unsupported binary op
try:
1j < 2j
except TypeError:
print("TypeError")
#small int on LHS, complex on RHS, unsupported op
try:
print(1 | 1j)
except TypeError:
print('TypeError')
# zero division
try:
1j / 0
except ZeroDivisionError:
print("ZeroDivisionError")
# zero division via power
try:
0j ** -1
except ZeroDivisionError:
print("ZeroDivisionError")
try:
0j ** 1j
except ZeroDivisionError:
print("ZeroDivisionError")
| mit |
dmlc/tvm | tests/python/frontend/mxnet/model_zoo/inception_v3.py | 5 | 11306 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Inception V3, suitable for images with around 299 x 299
Reference:
Szegedy, Christian, et al. "Rethinking the Inception Architecture for Computer Vision." arXiv preprint arXiv:1512.00567 (2015).
Adopted from https://github.com/apache/incubator-mxnet/blob/
master/example/image-classification/symbols/inception-v3.py
"""
import mxnet as mx
import numpy as np
def Conv(data, num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=None, suffix=""):
conv = mx.sym.Convolution(
data=data,
num_filter=num_filter,
kernel=kernel,
stride=stride,
pad=pad,
no_bias=True,
name="%s%s_conv2d" % (name, suffix),
)
bn = mx.sym.BatchNorm(data=conv, eps=2e-5, name="%s%s_batchnorm" % (name, suffix))
act = mx.sym.Activation(data=bn, act_type="relu", name="%s%s_relu" % (name, suffix))
return act
def Inception7A(
data, num_1x1, num_3x3_red, num_3x3_1, num_3x3_2, num_5x5_red, num_5x5, pool, proj, name
):
tower_1x1 = Conv(data, num_1x1, name=("%s_conv" % name))
tower_5x5 = Conv(data, num_5x5_red, name=("%s_tower" % name), suffix="_conv")
tower_5x5 = Conv(
tower_5x5, num_5x5, kernel=(5, 5), pad=(2, 2), name=("%s_tower" % name), suffix="_conv_1"
)
tower_3x3 = Conv(data, num_3x3_red, name=("%s_tower_1" % name), suffix="_conv")
tower_3x3 = Conv(
tower_3x3,
num_3x3_1,
kernel=(3, 3),
pad=(1, 1),
name=("%s_tower_1" % name),
suffix="_conv_1",
)
tower_3x3 = Conv(
tower_3x3,
num_3x3_2,
kernel=(3, 3),
pad=(1, 1),
name=("%s_tower_1" % name),
suffix="_conv_2",
)
pooling = mx.sym.Pooling(
data=data,
kernel=(3, 3),
stride=(1, 1),
pad=(1, 1),
pool_type=pool,
name=("%s_pool_%s_pool" % (pool, name)),
)
cproj = Conv(pooling, proj, name=("%s_tower_2" % name), suffix="_conv")
concat = mx.sym.Concat(
*[tower_1x1, tower_5x5, tower_3x3, cproj], name="ch_concat_%s_chconcat" % name
)
return concat
# First Downsample
def Inception7B(data, num_3x3, num_d3x3_red, num_d3x3_1, num_d3x3_2, pool, name):
tower_3x3 = Conv(
data, num_3x3, kernel=(3, 3), pad=(0, 0), stride=(2, 2), name=("%s_conv" % name)
)
tower_d3x3 = Conv(data, num_d3x3_red, name=("%s_tower" % name), suffix="_conv")
tower_d3x3 = Conv(
tower_d3x3,
num_d3x3_1,
kernel=(3, 3),
pad=(1, 1),
stride=(1, 1),
name=("%s_tower" % name),
suffix="_conv_1",
)
tower_d3x3 = Conv(
tower_d3x3,
num_d3x3_2,
kernel=(3, 3),
pad=(0, 0),
stride=(2, 2),
name=("%s_tower" % name),
suffix="_conv_2",
)
pooling = mx.sym.Pooling(
data=data,
kernel=(3, 3),
stride=(2, 2),
pad=(0, 0),
pool_type="max",
name=("max_pool_%s_pool" % name),
)
concat = mx.sym.Concat(*[tower_3x3, tower_d3x3, pooling], name="ch_concat_%s_chconcat" % name)
return concat
def Inception7C(
data,
num_1x1,
num_d7_red,
num_d7_1,
num_d7_2,
num_q7_red,
num_q7_1,
num_q7_2,
num_q7_3,
num_q7_4,
pool,
proj,
name,
):
tower_1x1 = Conv(data=data, num_filter=num_1x1, kernel=(1, 1), name=("%s_conv" % name))
tower_d7 = Conv(data=data, num_filter=num_d7_red, name=("%s_tower" % name), suffix="_conv")
tower_d7 = Conv(
data=tower_d7,
num_filter=num_d7_1,
kernel=(1, 7),
pad=(0, 3),
name=("%s_tower" % name),
suffix="_conv_1",
)
tower_d7 = Conv(
data=tower_d7,
num_filter=num_d7_2,
kernel=(7, 1),
pad=(3, 0),
name=("%s_tower" % name),
suffix="_conv_2",
)
tower_q7 = Conv(data=data, num_filter=num_q7_red, name=("%s_tower_1" % name), suffix="_conv")
tower_q7 = Conv(
data=tower_q7,
num_filter=num_q7_1,
kernel=(7, 1),
pad=(3, 0),
name=("%s_tower_1" % name),
suffix="_conv_1",
)
tower_q7 = Conv(
data=tower_q7,
num_filter=num_q7_2,
kernel=(1, 7),
pad=(0, 3),
name=("%s_tower_1" % name),
suffix="_conv_2",
)
tower_q7 = Conv(
data=tower_q7,
num_filter=num_q7_3,
kernel=(7, 1),
pad=(3, 0),
name=("%s_tower_1" % name),
suffix="_conv_3",
)
tower_q7 = Conv(
data=tower_q7,
num_filter=num_q7_4,
kernel=(1, 7),
pad=(0, 3),
name=("%s_tower_1" % name),
suffix="_conv_4",
)
pooling = mx.sym.Pooling(
data=data,
kernel=(3, 3),
stride=(1, 1),
pad=(1, 1),
pool_type=pool,
name=("%s_pool_%s_pool" % (pool, name)),
)
cproj = Conv(
data=pooling, num_filter=proj, kernel=(1, 1), name=("%s_tower_2" % name), suffix="_conv"
)
# concat
concat = mx.sym.Concat(
*[tower_1x1, tower_d7, tower_q7, cproj], name="ch_concat_%s_chconcat" % name
)
return concat
def Inception7D(
data, num_3x3_red, num_3x3, num_d7_3x3_red, num_d7_1, num_d7_2, num_d7_3x3, pool, name
):
tower_3x3 = Conv(data=data, num_filter=num_3x3_red, name=("%s_tower" % name), suffix="_conv")
tower_3x3 = Conv(
data=tower_3x3,
num_filter=num_3x3,
kernel=(3, 3),
pad=(0, 0),
stride=(2, 2),
name=("%s_tower" % name),
suffix="_conv_1",
)
tower_d7_3x3 = Conv(
data=data, num_filter=num_d7_3x3_red, name=("%s_tower_1" % name), suffix="_conv"
)
tower_d7_3x3 = Conv(
data=tower_d7_3x3,
num_filter=num_d7_1,
kernel=(1, 7),
pad=(0, 3),
name=("%s_tower_1" % name),
suffix="_conv_1",
)
tower_d7_3x3 = Conv(
data=tower_d7_3x3,
num_filter=num_d7_2,
kernel=(7, 1),
pad=(3, 0),
name=("%s_tower_1" % name),
suffix="_conv_2",
)
tower_d7_3x3 = Conv(
data=tower_d7_3x3,
num_filter=num_d7_3x3,
kernel=(3, 3),
stride=(2, 2),
name=("%s_tower_1" % name),
suffix="_conv_3",
)
pooling = mx.sym.Pooling(
data=data,
kernel=(3, 3),
stride=(2, 2),
pool_type=pool,
name=("%s_pool_%s_pool" % (pool, name)),
)
# concat
concat = mx.sym.Concat(*[tower_3x3, tower_d7_3x3, pooling], name="ch_concat_%s_chconcat" % name)
return concat
def Inception7E(
data,
num_1x1,
num_d3_red,
num_d3_1,
num_d3_2,
num_3x3_d3_red,
num_3x3,
num_3x3_d3_1,
num_3x3_d3_2,
pool,
proj,
name,
):
tower_1x1 = Conv(data=data, num_filter=num_1x1, kernel=(1, 1), name=("%s_conv" % name))
tower_d3 = Conv(data=data, num_filter=num_d3_red, name=("%s_tower" % name), suffix="_conv")
tower_d3_a = Conv(
data=tower_d3,
num_filter=num_d3_1,
kernel=(1, 3),
pad=(0, 1),
name=("%s_tower" % name),
suffix="_mixed_conv",
)
tower_d3_b = Conv(
data=tower_d3,
num_filter=num_d3_2,
kernel=(3, 1),
pad=(1, 0),
name=("%s_tower" % name),
suffix="_mixed_conv_1",
)
tower_3x3_d3 = Conv(
data=data, num_filter=num_3x3_d3_red, name=("%s_tower_1" % name), suffix="_conv"
)
tower_3x3_d3 = Conv(
data=tower_3x3_d3,
num_filter=num_3x3,
kernel=(3, 3),
pad=(1, 1),
name=("%s_tower_1" % name),
suffix="_conv_1",
)
tower_3x3_d3_a = Conv(
data=tower_3x3_d3,
num_filter=num_3x3_d3_1,
kernel=(1, 3),
pad=(0, 1),
name=("%s_tower_1" % name),
suffix="_mixed_conv",
)
tower_3x3_d3_b = Conv(
data=tower_3x3_d3,
num_filter=num_3x3_d3_2,
kernel=(3, 1),
pad=(1, 0),
name=("%s_tower_1" % name),
suffix="_mixed_conv_1",
)
pooling = mx.sym.Pooling(
data=data,
kernel=(3, 3),
stride=(1, 1),
pad=(1, 1),
pool_type=pool,
name=("%s_pool_%s_pool" % (pool, name)),
)
cproj = Conv(
data=pooling, num_filter=proj, kernel=(1, 1), name=("%s_tower_2" % name), suffix="_conv"
)
# concat
concat = mx.sym.Concat(
*[tower_1x1, tower_d3_a, tower_d3_b, tower_3x3_d3_a, tower_3x3_d3_b, cproj],
name="ch_concat_%s_chconcat" % name,
)
return concat
def get_symbol(num_classes=1000, **kwargs):
data = mx.sym.Variable(name="data")
# stage 1
conv = Conv(data, 32, kernel=(3, 3), stride=(2, 2), name="conv")
conv_1 = Conv(conv, 32, kernel=(3, 3), name="conv_1")
conv_2 = Conv(conv_1, 64, kernel=(3, 3), pad=(1, 1), name="conv_2")
pool = mx.sym.Pooling(data=conv_2, kernel=(3, 3), stride=(2, 2), pool_type="max", name="pool")
# stage 2
conv_3 = Conv(pool, 80, kernel=(1, 1), name="conv_3")
conv_4 = Conv(conv_3, 192, kernel=(3, 3), name="conv_4")
pool1 = mx.sym.Pooling(data=conv_4, kernel=(3, 3), stride=(2, 2), pool_type="max", name="pool1")
# # stage 3
in3a = Inception7A(pool1, 64, 64, 96, 96, 48, 64, "avg", 32, "mixed")
in3b = Inception7A(in3a, 64, 64, 96, 96, 48, 64, "avg", 64, "mixed_1")
in3c = Inception7A(in3b, 64, 64, 96, 96, 48, 64, "avg", 64, "mixed_2")
in3d = Inception7B(in3c, 384, 64, 96, 96, "max", "mixed_3")
# stage 4
in4a = Inception7C(in3d, 192, 128, 128, 192, 128, 128, 128, 128, 192, "avg", 192, "mixed_4")
in4b = Inception7C(in4a, 192, 160, 160, 192, 160, 160, 160, 160, 192, "avg", 192, "mixed_5")
in4c = Inception7C(in4b, 192, 160, 160, 192, 160, 160, 160, 160, 192, "avg", 192, "mixed_6")
in4d = Inception7C(in4c, 192, 192, 192, 192, 192, 192, 192, 192, 192, "avg", 192, "mixed_7")
in4e = Inception7D(in4d, 192, 320, 192, 192, 192, 192, "max", "mixed_8")
# stage 5
in5a = Inception7E(in4e, 320, 384, 384, 384, 448, 384, 384, 384, "avg", 192, "mixed_9")
in5b = Inception7E(in5a, 320, 384, 384, 384, 448, 384, 384, 384, "max", 192, "mixed_10")
# pool
pool = mx.sym.Pooling(
data=in5b, kernel=(8, 8), stride=(1, 1), pool_type="avg", name="global_pool"
)
flatten = mx.sym.Flatten(data=pool, name="flatten")
fc1 = mx.sym.FullyConnected(data=flatten, num_hidden=num_classes, name="fc1", flatten=False)
softmax = mx.sym.SoftmaxOutput(data=fc1, name="softmax")
return softmax
| apache-2.0 |
devendermishrajio/nova | nova/scheduler/filters/availability_zone_filter.py | 1 | 2359 | # Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from nova.scheduler import filters
from nova.scheduler.filters import utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('default_availability_zone', 'nova.availability_zones')
class AvailabilityZoneFilter(filters.BaseHostFilter):
"""Filters Hosts by availability zone.
Works with aggregate metadata availability zones, using the key
'availability_zone'
Note: in theory a compute node can be part of multiple availability_zones
"""
# Availability zones do not change within a request
run_filter_once_per_request = True
@filters.compat_legacy_props
def host_passes(self, host_state, filter_properties):
spec = filter_properties.get('request_spec', {})
props = spec.get('instance_properties', {})
availability_zone = props.get('availability_zone')
if not availability_zone:
return True
metadata = utils.aggregate_metadata_get_by_host(
host_state, key='availability_zone')
if 'availability_zone' in metadata:
hosts_passes = availability_zone in metadata['availability_zone']
host_az = metadata['availability_zone']
else:
hosts_passes = availability_zone == CONF.default_availability_zone
host_az = CONF.default_availability_zone
if not hosts_passes:
LOG.debug("Availability Zone '%(az)s' requested. "
"%(host_state)s has AZs: %(host_az)s",
{'host_state': host_state,
'az': availability_zone,
'host_az': host_az})
return hosts_passes
| apache-2.0 |
downpoured/lnzscript | lnzeditor/app/pyversion/python_extension_src/scite225_mainline/scintilla/test/XiteWin.py | 5 | 16993 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import os, sys, unittest
import ctypes
from ctypes import wintypes
from ctypes import c_int, c_ulong, c_char_p, c_wchar_p, c_ushort
user32=ctypes.windll.user32
gdi32=ctypes.windll.gdi32
kernel32=ctypes.windll.kernel32
from MessageNumbers import msgs, sgsm
import XiteMenu
scintillaDirectory = ".."
scintillaIncludeDirectory = os.path.join(scintillaDirectory, "include")
sys.path.append(scintillaIncludeDirectory)
import Face
scintillaBinDirectory = os.path.join(scintillaDirectory, "bin")
os.environ['PATH'] = os.environ['PATH'] + ";" + scintillaBinDirectory
#print(os.environ['PATH'])
WFUNC = ctypes.WINFUNCTYPE(c_int, c_int, c_int, c_int, c_int)
WS_CHILD = 0x40000000
WS_CLIPCHILDREN = 0x2000000
WS_OVERLAPPEDWINDOW = 0xcf0000
WS_VISIBLE = 0x10000000
WS_HSCROLL = 0x100000
WS_VSCROLL = 0x200000
WA_INACTIVE = 0
MF_POPUP = 16
MF_SEPARATOR = 0x800
IDYES = 6
OFN_HIDEREADONLY = 4
MB_OK = 0
MB_YESNOCANCEL = 3
MF_CHECKED = 8
MF_UNCHECKED = 0
SW_SHOW = 5
PM_REMOVE = 1
VK_SHIFT = 16
VK_CONTROL = 17
VK_MENU = 18
class OPENFILENAME(ctypes.Structure):
_fields_ = (("lStructSize", c_int),
("hwndOwner", c_int),
("hInstance", c_int),
("lpstrFilter", c_wchar_p),
("lpstrCustomFilter", c_char_p),
("nMaxCustFilter", c_int),
("nFilterIndex", c_int),
("lpstrFile", c_wchar_p),
("nMaxFile", c_int),
("lpstrFileTitle", c_wchar_p),
("nMaxFileTitle", c_int),
("lpstrInitialDir", c_wchar_p),
("lpstrTitle", c_wchar_p),
("flags", c_int),
("nFileOffset", c_ushort),
("nFileExtension", c_ushort),
("lpstrDefExt", c_char_p),
("lCustData", c_int),
("lpfnHook", c_char_p),
("lpTemplateName", c_char_p),
("pvReserved", c_char_p),
("dwReserved", c_int),
("flagsEx", c_int))
def __init__(self, win, title):
ctypes.Structure.__init__(self)
self.lStructSize = ctypes.sizeof(OPENFILENAME)
self.nMaxFile = 1024
self.hwndOwner = win
self.lpstrTitle = title
self.Flags = OFN_HIDEREADONLY
trace = False
#~ trace = True
def WindowSize(w):
rc = ctypes.wintypes.RECT()
user32.GetClientRect(w, ctypes.byref(rc))
return rc.right - rc.left, rc.bottom - rc.top
def IsKeyDown(key):
return (user32.GetKeyState(key) & 0x8000) != 0
def KeyTranslate(w):
tr = { 9: "Tab", 0xD:"Enter", 0x1B: "Esc" }
if w in tr:
return tr[w]
elif ord("A") <= w <= ord("Z"):
return chr(w)
elif 0x70 <= w <= 0x7b:
return "F" + str(w-0x70+1)
else:
return "Unknown_" + hex(w)
class WNDCLASS(ctypes.Structure):
_fields_= (\
('style', c_int),
('lpfnWndProc', WFUNC),
('cls_extra', c_int),
('wnd_extra', c_int),
('hInst', c_int),
('hIcon', c_int),
('hCursor', c_int),
('hbrBackground', c_int),
('menu_name', c_wchar_p),
('lpzClassName', c_wchar_p),
)
class XTEXTRANGE(ctypes.Structure):
_fields_= (\
('cpMin', c_int),
('cpMax', c_int),
('lpstrText', c_char_p),
)
class TEXTRANGE(ctypes.Structure):
_fields_= (\
('cpMin', c_int),
('cpMax', c_int),
('lpstrText', ctypes.POINTER(ctypes.c_char)),
)
class FINDTEXT(ctypes.Structure):
_fields_= (\
('cpMin', c_int),
('cpMax', c_int),
('lpstrText', c_char_p),
('cpMinText', c_int),
('cpMaxText', c_int),
)
hinst = ctypes.windll.kernel32.GetModuleHandleW(0)
def RegisterClass(name, func, background = 0):
# register a window class for toplevel windows.
wc = WNDCLASS()
wc.style = 0
wc.lpfnWndProc = func
wc.cls_extra = 0
wc.wnd_extra = 0
wc.hInst = hinst
wc.hIcon = 0
wc.hCursor = 0
wc.hbrBackground = background
wc.menu_name = 0
wc.lpzClassName = name
user32.RegisterClassW(ctypes.byref(wc))
class SciCall:
def __init__(self, fn, ptr, msg):
self._fn = fn
self._ptr = ptr
self._msg = msg
def __call__(self, w=0, l=0):
return self._fn(self._ptr, self._msg, w, l)
class Scintilla:
def __init__(self, face, hwndParent, hinstance):
self.__dict__["face"] = face
self.__dict__["used"] = set()
self.__dict__["all"] = set()
# The k member is for accessing constants as a dictionary
self.__dict__["k"] = {}
for f in face.features:
self.all.add(f)
if face.features[f]["FeatureType"] == "val":
self.k[f] = int(self.face.features[f]["Value"], 0)
elif face.features[f]["FeatureType"] == "evt":
self.k["SCN_"+f] = int(self.face.features[f]["Value"], 0)
# Get the function first as that also loads the DLL
self.__dict__["_scifn"] = ctypes.windll.SciLexer.Scintilla_DirectFunction
self.__dict__["_hwnd"] = user32.CreateWindowExW(0,
"Scintilla", "Source",
WS_CHILD | WS_VSCROLL | WS_HSCROLL | WS_CLIPCHILDREN,
0, 0, 100, 100, hwndParent, 0, hinstance, 0)
self.__dict__["_sciptr"] = user32.SendMessageW(self._hwnd,
int(self.face.features["GetDirectPointer"]["Value"], 0), 0,0)
user32.ShowWindow(self._hwnd, SW_SHOW)
def __getattr__(self, name):
if name in self.face.features:
self.used.add(name)
feature = self.face.features[name]
value = int(feature["Value"], 0)
#~ print("Feature", name, feature)
if feature["FeatureType"] == "val":
self.__dict__[name] = value
return value
else:
return SciCall(self._scifn, self._sciptr, value)
elif ("Get" + name) in self.face.features:
self.used.add("Get" + name)
feature = self.face.features["Get" + name]
value = int(feature["Value"], 0)
if feature["FeatureType"] == "get" and \
not name.startswith("Get") and \
not feature["Param1Type"] and \
not feature["Param2Type"] and \
feature["ReturnType"] in ["bool", "int", "position"]:
#~ print("property", feature)
return self._scifn(self._sciptr, value, 0, 0)
elif name.startswith("SCN_") and name in self.k:
self.used.add(name)
feature = self.face.features[name[4:]]
value = int(feature["Value"], 0)
#~ print("Feature", name, feature)
if feature["FeatureType"] == "val":
return value
raise AttributeError(name)
def __setattr__(self, name, val):
if ("Set" + name) in self.face.features:
self.used.add("Set" + name)
feature = self.face.features["Set" + name]
value = int(feature["Value"], 0)
#~ print("setproperty", feature)
if feature["FeatureType"] == "set" and not name.startswith("Set"):
if feature["Param1Type"] in ["bool", "int", "position"]:
return self._scifn(self._sciptr, value, val, 0)
elif feature["Param2Type"] in ["string"]:
return self._scifn(self._sciptr, value, 0, val)
raise AttributeError(name)
raise AttributeError(name)
def getvalue(self, name):
if name in self.face.features:
feature = self.face.features[name]
if feature["FeatureType"] != "evt":
try:
return int(feature["Value"], 0)
except ValueError:
return -1
return -1
def ByteRange(self, start, end):
tr = TEXTRANGE()
tr.cpMin = start
tr.cpMax = end
length = end - start
tr.lpstrText = ctypes.create_string_buffer(length + 1)
self.GetTextRange(0, ctypes.byref(tr))
text = tr.lpstrText[:length]
text += b"\0" * (length - len(text))
return text
def StyledTextRange(self, start, end):
tr = TEXTRANGE()
tr.cpMin = start
tr.cpMax = end
length = 2 * (end - start)
tr.lpstrText = ctypes.create_string_buffer(length + 2)
self.GetStyledText(0, ctypes.byref(tr))
styledText = tr.lpstrText[:length]
styledText += b"\0" * (length - len(styledText))
return styledText
def FindBytes(self, start, end, s, flags):
ft = FINDTEXT()
ft.cpMin = start
ft.cpMax = end
ft.lpstrText = s
ft.cpMinText = 0
ft.cpMaxText = 0
pos = self.FindText(flags, ctypes.byref(ft))
#~ print(start, end, ft.cpMinText, ft.cpMaxText)
return pos
def Contents(self):
return self.ByteRange(0, self.Length)
def SizeTo(self, width, height):
user32.SetWindowPos(self._hwnd, 0, 0, 0, width, height, 0)
def FocusOn(self):
user32.SetFocus(self._hwnd)
class XiteWin():
def __init__(self, test=""):
self.face = Face.Face()
self.face.ReadFromFile(os.path.join(scintillaIncludeDirectory, "Scintilla.iface"))
self.titleDirty = True
self.fullPath = ""
self.test = test
self.appName = "xite"
self.cmds = {}
self.windowName = "XiteWindow"
self.wfunc = WFUNC(self.WndProc)
RegisterClass(self.windowName, self.wfunc)
user32.CreateWindowExW(0, self.windowName, self.appName, \
WS_VISIBLE | WS_OVERLAPPEDWINDOW | WS_CLIPCHILDREN, \
0, 0, 500, 700, 0, 0, hinst, 0)
args = sys.argv[1:]
self.SetMenus()
if args:
self.GrabFile(args[0])
self.ed.FocusOn()
self.ed.GotoPos(self.ed.Length)
print(self.test)
if self.test:
for k in self.cmds:
if self.cmds[k] == "Test":
user32.PostMessageW(self.win, msgs["WM_COMMAND"], k, 0)
def OnSize(self):
width, height = WindowSize(self.win)
self.ed.SizeTo(width, height)
user32.InvalidateRect(self.win, 0, 0)
def OnCreate(self, hwnd):
self.win = hwnd
self.ed = Scintilla(self.face, hwnd, hinst)
self.ed.FocusOn()
def Invalidate(self):
user32.InvalidateRect(self.win, 0, 0)
def WndProc(self, h, m, w, l):
ms = sgsm.get(m, "XXX")
if trace:
print("%s %s %s %s" % (hex(h)[2:],ms,w,l))
if ms == "WM_CLOSE":
user32.PostQuitMessage(0)
elif ms == "WM_CREATE":
self.OnCreate(h)
return 0
elif ms == "WM_SIZE":
# Work out size
if w != 1:
self.OnSize()
return 0
elif ms == "WM_COMMAND":
cmdCode = w & 0xffff
if cmdCode in self.cmds:
self.Command(self.cmds[cmdCode])
return 0
elif ms == "WM_ACTIVATE":
if w != WA_INACTIVE:
self.ed.FocusOn()
return 0
else:
return user32.DefWindowProcW(h, m, w, l)
return 0
def Command(self, name):
name = name.replace(" ", "")
method = "Cmd" + name
cmd = None
try:
cmd = getattr(self, method)
except AttributeError:
return
if cmd:
cmd()
def KeyDown(self, w, prefix = ""):
keyName = prefix
if IsKeyDown(VK_CONTROL):
keyName += "<control>"
if IsKeyDown(VK_SHIFT):
keyName += "<shift>"
keyName += KeyTranslate(w)
if trace:
print("Key:", keyName)
if keyName in self.keys:
method = "Cmd" + self.keys[keyName]
getattr(self, method)()
return True
#~ print("UKey:", keyName)
return False
def Accelerator(self, msg):
ms = sgsm.get(msg.message, "XXX")
if ms == "WM_KEYDOWN":
return self.KeyDown(msg.wParam)
elif ms == "WM_SYSKEYDOWN":
return self.KeyDown(msg.wParam, "<alt>")
return False
def AppLoop(self):
msg = ctypes.wintypes.MSG()
lpmsg = ctypes.byref(msg)
while user32.GetMessageW(lpmsg, 0, 0, 0):
if trace and msg.message != msgs["WM_TIMER"]:
print('mm', hex(msg.hWnd)[2:],sgsm.get(msg.message, "XXX"))
if not self.Accelerator(msg):
user32.TranslateMessage(lpmsg)
user32.DispatchMessageW(lpmsg)
def DoEvents(self):
msg = ctypes.wintypes.MSG()
lpmsg = ctypes.byref(msg)
cont = True
while cont:
cont = user32.PeekMessageW(lpmsg, 0, 0, 0, PM_REMOVE)
if cont:
if not self.Accelerator(msg):
user32.TranslateMessage(lpmsg)
user32.DispatchMessageW(lpmsg)
def SetTitle(self, changePath):
if changePath or self.titleDirty != self.ed.Modify:
self.titleDirty = self.ed.Modify
self.title = self.fullPath
if self.titleDirty:
self.title += " * "
else:
self.title += " - "
self.title += self.appName
if self.win:
user32.SetWindowTextW(self.win, self.title)
def Open(self):
ofx = OPENFILENAME(self.win, "Open File")
opath = "\0" * 1024
ofx.lpstrFile = opath
filters = ["Python (.py;.pyw)|*.py;*.pyw|All|*.*"]
filterText = "\0".join([f.replace("|", "\0") for f in filters])+"\0\0"
ofx.lpstrFilter = filterText
if ctypes.windll.comdlg32.GetOpenFileNameW(ctypes.byref(ofx)):
absPath = opath.replace("\0", "")
self.GrabFile(absPath)
self.ed.FocusOn()
self.ed.LexerLanguage = "python"
self.ed.Lexer = self.ed.SCLEX_PYTHON
self.ed.SetKeyWords(0, b"class def else for from if import print return while")
for style in [k for k in self.ed.k if k.startswith("SCE_P_")]:
self.ed.StyleSetFont(self.ed.k[style], b"Verdana")
if "COMMENT" in style:
self.ed.StyleSetFore(self.ed.k[style], 127 * 256)
self.ed.StyleSetFont(self.ed.k[style], b"Comic Sans MS")
elif "OPERATOR" in style:
self.ed.StyleSetBold(self.ed.k[style], 1)
self.ed.StyleSetFore(self.ed.k[style], 127 * 256 * 256)
elif "WORD" in style:
self.ed.StyleSetItalic(self.ed.k[style], 255)
self.ed.StyleSetFore(self.ed.k[style], 255 * 256 * 256)
elif "TRIPLE" in style:
self.ed.StyleSetFore(self.ed.k[style], 0xA0A0)
elif "STRING" in style or "CHARACTER" in style:
self.ed.StyleSetFore(self.ed.k[style], 0xA000A0)
else:
self.ed.StyleSetFore(self.ed.k[style], 0)
def SaveAs(self):
ofx = OPENFILENAME(self.win, "Save File")
opath = "\0" * 1024
ofx.lpstrFile = opath
if ctypes.windll.comdlg32.GetSaveFileNameW(ctypes.byref(ofx)):
self.fullPath = opath.replace("\0", "")
self.Save()
self.SetTitle(1)
self.ed.FocusOn()
def SetMenus(self):
ui = XiteMenu.MenuStructure
self.cmds = {}
self.keys = {}
cmdId = 0
self.menuBar = user32.CreateMenu()
for name, contents in ui:
cmdId += 1
menu = user32.CreateMenu()
for item in contents:
text, key = item
cmdText = text.replace("&", "")
cmdText = cmdText.replace("...", "")
cmdText = cmdText.replace(" ", "")
cmdId += 1
if key:
keyText = key.replace("<control>", "Ctrl+")
keyText = keyText.replace("<shift>", "Shift+")
text += "\t" + keyText
if text == "-":
user32.AppendMenuW(menu, MF_SEPARATOR, cmdId, text)
else:
user32.AppendMenuW(menu, 0, cmdId, text)
self.cmds[cmdId] = cmdText
self.keys[key] = cmdText
#~ print(cmdId, item)
user32.AppendMenuW(self.menuBar, MF_POPUP, menu, name)
user32.SetMenu(self.win, self.menuBar)
self.CheckMenuItem("Wrap", True)
user32.ShowWindow(self.win, SW_SHOW)
def CheckMenuItem(self, name, val):
#~ print(name, val)
if self.cmds:
for k,v in self.cmds.items():
if v == name:
#~ print(name, k)
user32.CheckMenuItem(user32.GetMenu(self.win), \
k, [MF_UNCHECKED, MF_CHECKED][val])
def Exit(self):
sys.exit(0)
def DisplayMessage(self, msg, ask):
return IDYES == user32.MessageBoxW(self.win, \
msg, self.appName, [MB_OK, MB_YESNOCANCEL][ask])
def NewDocument(self):
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.SetSavePoint()
def SaveIfUnsure(self):
if self.ed.Modify:
msg = "Save changes to \"" + self.fullPath + "\"?"
print(msg)
decision = self.DisplayMessage(msg, True)
if decision:
self.CmdSave()
return decision
return True
def New(self):
if self.SaveIfUnsure():
self.fullPath = ""
self.overrideMode = None
self.NewDocument()
self.SetTitle(1)
self.Invalidate()
def CheckMenus(self):
pass
def MoveSelection(self, caret, anchor=-1):
if anchor == -1:
anchor = caret
self.ed.SetSelectionStart(caret)
self.ed.SetSelectionEnd(anchor)
self.ed.ScrollCaret()
self.Invalidate()
def GrabFile(self, name):
self.fullPath = name
self.overrideMode = None
self.NewDocument()
fsr = open(name, "rb")
data = fsr.read()
fsr.close()
self.ed.AddText(len(data), data)
self.ed.EmptyUndoBuffer()
self.MoveSelection(0)
self.SetTitle(1)
def Save(self):
fos = open(self.fullPath, "wb")
blockSize = 1024
length = self.ed.Length
i = 0
while i < length:
grabSize = length - i
if grabSize > blockSize:
grabSize = blockSize
#~ print(i, grabSize, length)
data = self.ed.ByteRange(i, i + grabSize)
fos.write(data)
i += grabSize
fos.close()
self.ed.SetSavePoint()
self.SetTitle(0)
# Command handlers are called by menu actions
def CmdNew(self):
self.New()
def CmdOpen(self):
self.Open()
def CmdSave(self):
if (self.fullPath == None) or (len(self.fullPath) == 0):
self.SaveAs()
else:
self.Save()
def CmdSaveAs(self):
self.SaveAs()
def CmdTest(self):
runner = unittest.TextTestRunner()
if self.test:
tests = unittest.defaultTestLoader.loadTestsFromName(self.test)
else:
tests = unittest.defaultTestLoader.loadTestsFromName("simpleTests")
results = runner.run(tests)
#~ print(results)
if self.test:
user32.PostQuitMessage(0)
def CmdExercised(self):
print()
unused = sorted(self.ed.all.difference(self.ed.used))
print("Unused", len(unused))
print()
print("\n".join(unused))
print()
print("Used", len(self.ed.used))
print()
print("\n".join(sorted(self.ed.used)))
def Uncalled(self):
print()
unused = sorted(self.ed.all.difference(self.ed.used))
uu = {}
for u in unused:
v = self.ed.getvalue(u)
if v > 2000:
uu[v] = u
#~ for x in sorted(uu.keys())[150:]:
return uu
def CmdExit(self):
self.Exit()
def CmdUndo(self):
self.ed.Undo()
def CmdRedo(self):
self.ed.Redo()
def CmdCut(self):
self.ed.Cut()
def CmdCopy(self):
self.ed.Copy()
def CmdPaste(self):
self.ed.Paste()
def CmdDelete(self):
self.ed.Clear()
xiteFrame = None
def main(test):
global xiteFrame
xiteFrame = XiteWin(test)
xiteFrame.AppLoop()
#~ xiteFrame.CmdExercised()
return xiteFrame.Uncalled()
| gpl-3.0 |
kytvi2p/tahoe-lafs | setuptools-0.6c16dev4.egg/setuptools/command/alias.py | 7 | 2449 | import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from setuptools.command.setopt import edit_config, option_base, config_file
def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg: return repr(arg)
if arg.split()!=[arg]:
return repr(arg)
return arg
class alias(option_base):
"""Define a shortcut that invokes one or more commands"""
description = "define a shortcut to invoke one or more commands"
command_consumes_arguments = True
user_options = [
('remove', 'r', 'remove (unset) the alias'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.args = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.remove and len(self.args)!=1:
raise DistutilsOptionError(
"Must specify exactly one argument (the alias name) when "
"using --remove"
)
def run(self):
aliases = self.distribution.get_option_dict('aliases')
if not self.args:
print "Command Aliases"
print "---------------"
for alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
elif len(self.args)==1:
alias, = self.args
if self.remove:
command = None
elif alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
else:
print "No alias definition found for %r" % alias
return
else:
alias = self.args[0]
command = ' '.join(map(shquote,self.args[1:]))
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
def format_alias(name, aliases):
source, command = aliases[name]
if source == config_file('global'):
source = '--global-config '
elif source == config_file('user'):
source = '--user-config '
elif source == config_file('local'):
source = ''
else:
source = '--filename=%r' % source
return source+name+' '+command
| gpl-2.0 |
Jaironlanda/yowsup | yowsup/layers/protocol_groups/protocolentities/iq_groups_leave_success.py | 39 | 1424 | from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_iq.protocolentities import ResultIqProtocolEntity
class SuccessLeaveGroupsIqProtocolEntity(ResultIqProtocolEntity):
'''
<iq type="result" from="g.us" id="{{ID}}">
<leave>
<group id="{{GROUP_JID}}"></group>
</leave>
</iq>
'''
def __init__(self, _id, groupId):
super(SuccessLeaveGroupsIqProtocolEntity, self).\
__init__(_from="g.us", _id=_id)
self.setProps(groupId)
def setProps(self, groupId):
self.groupId = groupId
def __str__(self):
out = super(SuccessLeaveGroupsIqProtocolEntity, self).__str__()
out += "Group Id: %s\n" % self.groupId
return out
def toProtocolTreeNode(self):
node = super(SuccessLeaveGroupsIqProtocolEntity, self).\
toProtocolTreeNode()
leaveNode = ProtocolTreeNode(
"leave", {}, [ProtocolTreeNode("group", {"id": self.groupId})]
)
node.addChild(leaveNode)
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = super(SuccessLeaveGroupsIqProtocolEntity, SuccessLeaveGroupsIqProtocolEntity).fromProtocolTreeNode(node)
entity.__class__ = SuccessLeaveGroupsIqProtocolEntity
entity.setProps(
node.getChild("leave").getChild("group").getAttributeValue("id")
)
return entity
| gpl-3.0 |
DOAJ/doaj | portality/models/editors.py | 1 | 2710 | from portality.dao import DomainObject
from portality.models import Account
class EditorGroup(DomainObject):
__type__ = "editor_group"
@classmethod
def group_exists_by_name(cls, name):
q = EditorGroupQuery(name)
res = cls.query(q=q.query())
ids = [hit.get("_source", {}).get("id") for hit in res.get("hits", {}).get("hits", []) if "_source" in hit]
if len(ids) == 0:
return None
if len(ids) > 0:
return ids[0]
@classmethod
def groups_by_editor(cls, editor):
q = EditorGroupMemberQuery(editor=editor)
_iter = cls.iterate(q.query(), page_size=100)
return _iter
@classmethod
def groups_by_associate(cls, associate):
q = EditorGroupMemberQuery(associate=associate)
_iter = cls.iterate(q.query(), page_size=100)
return _iter
@property
def name(self):
return self.data.get("name")
def set_name(self, val):
self.data["name"] = val
@property
def editor(self):
return self.data.get("editor")
def set_editor(self, val):
self.data["editor"] = val
def get_editor_account(self):
return Account.pull(self.editor)
@property
def associates(self):
return self.data.get("associates", [])
def set_associates(self, val):
if not isinstance(val, list):
val = [val]
self.data["associates"] = val
def add_associate(self, val):
if "associates" not in self.data:
self.data["associates"] = []
self.data["associates"].append(val)
def get_associate_accounts(self):
accs = []
for a in self.associates:
acc = Account.pull(a)
accs.append(acc)
return accs
def is_member(self, account_name):
""" Determine if an account is a member of this Editor Group """
all_eds = self.associates + [self.editor]
return account_name in all_eds
class EditorGroupQuery(object):
def __init__(self, name):
self.name = name
def query(self):
q = {"query": {"term": {"name.exact": self.name}}}
return q
class EditorGroupMemberQuery(object):
def __init__(self, editor=None, associate=None):
self.editor = editor
self.associate = associate
def query(self):
q = {"query": {"bool": {"should": []}}}
if self.editor is not None:
et = {"term": {"editor.exact": self.editor}}
q["query"]["bool"]["should"].append(et)
if self.associate is not None:
at = {"term": {"associates.exact": self.associate}}
q["query"]["bool"]["should"].append(at)
return q
| apache-2.0 |
p0deje/selenium | py/selenium/webdriver/firefox/service.py | 7 | 2439 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from subprocess import PIPE
from selenium.webdriver.common import service
class Service(service.Service):
"""Object that manages the starting and stopping of the
GeckoDriver."""
def __init__(
self, executable_path, firefox_binary=None, port=0, service_args=None,
log_path="geckodriver.log", env=None):
"""Creates a new instance of the GeckoDriver remote service proxy.
GeckoDriver provides a HTTP interface speaking the W3C WebDriver
protocol to Marionette.
:param executable_path: Path to the GeckoDriver binary.
:param firefox_binary: Optional path to the Firefox binary.
:param port: Run the remote service on a specified port.
Defaults to 0.
:param service_args: Optional list of arguments to pass to the
GeckoDriver binary.
:param log_path: Optional path for the GeckoDriver to log to.
Defaults to _geckodriver.log_ in the current working directory.
:param env: Optional dictionary of output variables to expose
in the services' environment.
"""
if log_path:
log_file = open(log_path, "a+")
service.Service.__init__(
self, executable_path, port=port, log_file=log_file, env=env)
self.firefox_binary = firefox_binary
self.service_args = service_args or []
def command_line_args(self):
if self.firefox_binary:
return ["-b", self.firefox_binary, "--webdriver-port", "%d" % self.port]
return ["--webdriver-port", "%d" % self.port]
def send_remote_shutdown_command(self):
pass
| apache-2.0 |
naves-thiago/mbed-midi | workspace_tools/export/iar.py | 8 | 6321 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from workspace_tools.export.exporters import Exporter
import re
import os
class IAREmbeddedWorkbench(Exporter):
"""
Exporter class for IAR Systems.
"""
NAME = 'IAR'
TOOLCHAIN = 'IAR'
TARGETS = [
'LPC1768',
'LPC1347',
'LPC11U24',
'LPC11U35_401',
'LPC11U35_501',
#Removed LPCCAPPUCCINO linker file and startup file missing
#'LPCCAPPUCCINO',
'LPC1114',
'LPC1549',
'LPC812',
'LPC4088',
'LPC4088_DM',
'LPC824',
'UBLOX_C027',
'ARCH_PRO',
'K20D50M',
'KL05Z',
'KL25Z',
'KL46Z',
'K22F',
'K64F',
'NUCLEO_F030R8',
'NUCLEO_F070RB',
'NUCLEO_F072RB',
'NUCLEO_F091RC',
'NUCLEO_F103RB',
'NUCLEO_F302R8',
'NUCLEO_F303RE',
'NUCLEO_F334R8',
'NUCLEO_F401RE',
'NUCLEO_F411RE',
'NUCLEO_L053R8',
'NUCLEO_L073RZ',
'NUCLEO_L152RE',
'DISCO_L053C8',
'DISCO_F334C8',
'DISCO_F746NG',
#'STM32F407', Fails to build same for GCC
'MAXWSNENV',
'MAX32600MBED',
'MTS_MDOT_F405RG',
'MTS_MDOT_F411RE',
'MTS_DRAGONFLY_F411RE',
'NRF51822',
'NRF51_DK',
'NRF51_DONGLE',
'DELTA_DFCM_NNN40',
'SEEED_TINY_BLE',
'HRM1017',
'ARCH_BLE',
'MOTE_L152RC',
]
def generate(self):
"""
Generates the project files
"""
sources = []
sources += self.resources.c_sources
sources += self.resources.cpp_sources
sources += self.resources.s_sources
iar_files = IarFolder("", "", [])
for source in sources:
iar_files.insert_file(source)
ctx = {
'name': self.program_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': self.resources.libraries,
'symbols': self.get_symbols(),
'source_files': iar_files.__str__(),
'binary_files': self.resources.bin_files,
}
self.gen_file('iar_%s.ewp.tmpl' % self.target.lower(), ctx, '%s.ewp' % self.program_name)
self.gen_file('iar.eww.tmpl', ctx, '%s.eww' % self.program_name)
self.gen_file('iar_%s.ewd.tmpl' % self.target.lower(), ctx, '%s.ewd' % self.program_name)
class IarFolder():
"""
This is a recursive folder object.
To present the folder structure in the IDE as it is presented on the disk.
This can be used for uvision as well if you replace the __str__ method.
Example:
files: ./main.cpp, ./apis/I2C.h, ./mbed/common/I2C.cpp
in the project this would look like:
main.cpp
common/I2C.cpp
input:
folder_level : folder path to current folder
folder_name : name of current folder
source_files : list of source_files (all must be in same directory)
"""
def __init__(self, folder_level, folder_name, source_files):
self.folder_level = folder_level
self.folder_name = folder_name
self.source_files = source_files
self.sub_folders = {}
def __str__(self):
"""
converts the folder structue to IAR project format.
"""
group_start = ""
group_end = ""
if self.folder_name != "":
group_start = "<group>\n<name>%s</name>\n" %(self.folder_name)
group_end = "</group>\n"
str_content = group_start
#Add files in current folder
if self.source_files:
for src in self.source_files:
str_content += "<file>\n<name>$PROJ_DIR$/%s</name>\n</file>\n" % src
#Add sub folders
if self.sub_folders:
for folder_name in self.sub_folders.iterkeys():
str_content += self.sub_folders[folder_name].__str__()
str_content += group_end
return str_content
def insert_file(self, source_input):
"""
Inserts a source file into the folder tree
"""
if self.source_files:
#All source_files in a IarFolder must be in same directory.
dir_sources = IarFolder.get_directory(self.source_files[0])
#Check if sources are already at their deepest level.
if not self.folder_level == dir_sources:
_reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
folder_name = re.match(_reg_exp, dir_sources).group(1)
self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, self.source_files)
self.source_files = []
dir_input = IarFolder.get_directory(source_input)
if dir_input == self.folder_level:
self.source_files.append(source_input)
else:
_reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
folder_name = re.match(_reg_exp, dir_input).group(1)
if self.sub_folders.has_key(folder_name):
self.sub_folders[folder_name].insert_file(source_input)
else:
if self.folder_level == "":
#Top level exception
self.sub_folders[folder_name] = IarFolder(folder_name, folder_name, [source_input])
else:
self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, [source_input])
@staticmethod
def get_directory(file_path):
"""
Returns the directory of the file
"""
return os.path.dirname(file_path)
| apache-2.0 |
keysona/python | burness/0002/save_to_mysql.py | 40 | 1463 | #-*- coding: utf-8-*-
import mysql.connector
config = {
'user': 'root',
'password': 'root',
'host': '127.0.0.1',
'database': 'test',
'raise_on_warnings': True,
}
class save_keys_to_mysql:
def __init__(self,path):
self.path=path
print(self.path)
def __conn(self,**conf):
try:
conn=mysql.connector.connect(**conf)
print(conn)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
prnt("database does not exists")
else:
print(err)
return conn
#print(self.conn)
def save_to_mysql(self,**conf):
conn=self.__conn(**conf)
path=self.path
cursor=conn.cursor()
cursor.execute('drop table if exists act_keys')
cursor.execute('create table act_keys (id int(8) primary key, act_keys varchar(50))')
row=0
with open('keys_text.txt','r') as f:
for line in f.readlines():
#row_no='0000'+str(row)
act_keys=line.rstrip()
cursor.execute('insert into act_keys (id, act_keys) values (%s, %s)',[row,act_keys])
row+=1
conn.commit()
cursor.close()
conn.close()
def see_all(self,**conf):
conn=self.__conn(**conf)
cursor=conn.cursor()
cursor.execute('select * from act_keys')
values=cursor.fetchall()
print(values)
cursor.close()
conn.close()
test=save_keys_to_mysql('keys_text.txt')
test.save_to_mysql(**config)
test.see_all(**config)
| mit |
40223226/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/_sysconfigdata.py | 731 | 18167 | build_time_vars={'HAVE_SYS_WAIT_H': 1, 'HAVE_UTIL_H': 0, 'HAVE_SYMLINKAT': 1, 'HAVE_LIBSENDFILE': 0, 'SRCDIRS': 'Parser Grammar Objects Python Modules Mac', 'SIZEOF_OFF_T': 8, 'BASECFLAGS': '-Wno-unused-result', 'HAVE_UTIME_H': 1, 'EXTRAMACHDEPPATH': '', 'HAVE_SYS_TIME_H': 1, 'CFLAGSFORSHARED': '-fPIC', 'HAVE_HYPOT': 1, 'PGSRCS': '\\', 'HAVE_LIBUTIL_H': 0, 'HAVE_COMPUTED_GOTOS': 1, 'HAVE_LUTIMES': 1, 'HAVE_MAKEDEV': 1, 'HAVE_REALPATH': 1, 'HAVE_LINUX_TIPC_H': 1, 'MULTIARCH': 'i386-linux-gnu', 'HAVE_GETWD': 1, 'HAVE_GCC_ASM_FOR_X64': 0, 'HAVE_INET_PTON': 1, 'HAVE_GETHOSTBYNAME_R_6_ARG': 1, 'SIZEOF__BOOL': 1, 'HAVE_ZLIB_COPY': 1, 'ASDLGEN': 'python3.3 ../Parser/asdl_c.py', 'GRAMMAR_INPUT': '../Grammar/Grammar', 'HOST_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_SCHED_RR_GET_INTERVAL': 1, 'HAVE_BLUETOOTH_H': 0, 'HAVE_MKFIFO': 1, 'TIMEMODULE_LIB': 0, 'LIBM': '-lm', 'PGENOBJS': '\\ \\', 'PYTHONFRAMEWORK': '', 'GETPGRP_HAVE_ARG': 0, 'HAVE_MMAP': 1, 'SHLIB_SUFFIX': '.so', 'SIZEOF_FLOAT': 4, 'HAVE_RENAMEAT': 1, 'HAVE_LANGINFO_H': 1, 'HAVE_STDLIB_H': 1, 'PY_CORE_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE', 'HAVE_BROKEN_PIPE_BUF': 0, 'HAVE_CONFSTR': 1, 'HAVE_SIGTIMEDWAIT': 1, 'HAVE_FTELLO': 1, 'READELF': 'readelf', 'HAVE_SIGALTSTACK': 1, 'TESTTIMEOUT': 3600, 'PYTHONPATH': ':plat-i386-linux-gnu', 'SIZEOF_WCHAR_T': 4, 'LIBOBJS': '', 'HAVE_SYSCONF': 1, 'MAKESETUP': '../Modules/makesetup', 'HAVE_UTIMENSAT': 1, 'HAVE_FCHOWNAT': 1, 'HAVE_WORKING_TZSET': 1, 'HAVE_FINITE': 1, 'HAVE_ASINH': 1, 'HAVE_SETEUID': 1, 'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'HAVE_SETGROUPS': 1, 'PARSER_OBJS': '\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o', 'HAVE_MBRTOWC': 1, 'SIZEOF_INT': 4, 'HAVE_STDARG_PROTOTYPES': 1, 'TM_IN_SYS_TIME': 0, 'HAVE_SYS_TIMES_H': 1, 'HAVE_LCHOWN': 1, 'HAVE_SSIZE_T': 1, 'HAVE_PAUSE': 1, 'SYSLIBS': '-lm', 'POSIX_SEMAPHORES_NOT_ENABLED': 0, 'HAVE_DEVICE_MACROS': 1, 'BLDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBSUBDIRS': 'tkinter tkinter/test tkinter/test/test_tkinter \\', 'HAVE_SYS_UN_H': 1, 'HAVE_SYS_STAT_H': 1, 'VPATH': '..', 'INCLDIRSTOMAKE': '/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m', 'HAVE_BROKEN_SEM_GETVALUE': 0, 'HAVE_TIMEGM': 1, 'PACKAGE_VERSION': 0, 'MAJOR_IN_SYSMACROS': 0, 'HAVE_ATANH': 1, 'HAVE_GAI_STRERROR': 1, 'HAVE_SYS_POLL_H': 1, 'SIZEOF_PTHREAD_T': 4, 'SIZEOF_FPOS_T': 16, 'HAVE_CTERMID': 1, 'HAVE_TMPFILE': 1, 'HAVE_SETUID': 1, 'CXX': 'i686-linux-gnu-g++ -pthread', 'srcdir': '..', 'HAVE_UINT32_T': 1, 'HAVE_ADDRINFO': 1, 'HAVE_GETSPENT': 1, 'SIZEOF_DOUBLE': 8, 'HAVE_INT32_T': 1, 'LIBRARY_OBJS_OMIT_FROZEN': '\\', 'HAVE_FUTIMES': 1, 'CONFINCLUDEPY': '/usr/include/python3.3m', 'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1, 'LIBFFI_INCLUDEDIR': '', 'HAVE_SETGID': 1, 'HAVE_UINT64_T': 1, 'EXEMODE': 755, 'UNIVERSALSDK': '', 'HAVE_LIBDL': 1, 'HAVE_GETNAMEINFO': 1, 'HAVE_STDINT_H': 1, 'COREPYTHONPATH': ':plat-i386-linux-gnu', 'HAVE_SOCKADDR_STORAGE': 1, 'HAVE_WAITID': 1, 'EXTRAPLATDIR': '@EXTRAPLATDIR@', 'HAVE_ACCEPT4': 1, 'RUNSHARED': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:', 'EXE': '', 'HAVE_SIGACTION': 1, 'HAVE_CHOWN': 1, 'HAVE_GETLOGIN': 1, 'HAVE_TZNAME': 0, 'PACKAGE_NAME': 0, 'HAVE_GETPGID': 1, 'HAVE_GLIBC_MEMMOVE_BUG': 0, 'BUILD_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_LINUX_CAN_H': 1, 'DYNLOADFILE': 'dynload_shlib.o', 'HAVE_PWRITE': 1, 'BUILDEXE': '', 'HAVE_OPENPTY': 1, 'HAVE_LOCKF': 1, 'HAVE_COPYSIGN': 1, 'HAVE_PREAD': 1, 'HAVE_DLOPEN': 1, 'HAVE_SYS_KERN_CONTROL_H': 0, 'PY_FORMAT_LONG_LONG': '"ll"', 'HAVE_TCSETPGRP': 1, 'HAVE_SETSID': 1, 'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0, 'HAVE_STRING_H': 1, 'LDLIBRARY': 'libpython3.3m.so', 'INSTALL_SCRIPT': '/usr/bin/install -c', 'HAVE_SYS_XATTR_H': 1, 'HAVE_CURSES_IS_TERM_RESIZED': 1, 'HAVE_TMPNAM_R': 1, 'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */", 'WANT_SIGFPE_HANDLER': 1, 'HAVE_INT64_T': 1, 'HAVE_STAT_TV_NSEC': 1, 'HAVE_SYS_MKDEV_H': 0, 'HAVE_BROKEN_POLL': 0, 'HAVE_IF_NAMEINDEX': 1, 'HAVE_GETPWENT': 1, 'PSRCS': '\\', 'RANLIB': 'ranlib', 'HAVE_WCSCOLL': 1, 'WITH_NEXT_FRAMEWORK': 0, 'ASDLGEN_FILES': '../Parser/asdl.py ../Parser/asdl_c.py', 'HAVE_RL_PRE_INPUT_HOOK': 1, 'PACKAGE_URL': 0, 'SHLIB_EXT': 0, 'HAVE_SYS_LOADAVG_H': 0, 'HAVE_LIBIEEE': 0, 'HAVE_SEM_OPEN': 1, 'HAVE_TERM_H': 1, 'IO_OBJS': '\\', 'IO_H': 'Modules/_io/_iomodule.h', 'HAVE_STATVFS': 1, 'VERSION': '3.3', 'HAVE_GETC_UNLOCKED': 1, 'MACHDEPS': 'plat-i386-linux-gnu @EXTRAPLATDIR@', 'SUBDIRSTOO': 'Include Lib Misc', 'HAVE_SETREUID': 1, 'HAVE_ERFC': 1, 'HAVE_SETRESUID': 1, 'LINKFORSHARED': '-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions', 'HAVE_SYS_TYPES_H': 1, 'HAVE_GETPAGESIZE': 1, 'HAVE_SETEGID': 1, 'HAVE_PTY_H': 1, 'HAVE_STRUCT_STAT_ST_FLAGS': 0, 'HAVE_WCHAR_H': 1, 'HAVE_FSEEKO': 1, 'Py_ENABLE_SHARED': 1, 'HAVE_SIGRELSE': 1, 'HAVE_PTHREAD_INIT': 0, 'FILEMODE': 644, 'HAVE_SYS_RESOURCE_H': 1, 'HAVE_READLINKAT': 1, 'PYLONG_BITS_IN_DIGIT': 0, 'LINKCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SETLOCALE': 1, 'HAVE_CHROOT': 1, 'HAVE_OPENAT': 1, 'HAVE_FEXECVE': 1, 'LDCXXSHARED': 'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions', 'DIST': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy', 'HAVE_MKNOD': 1, 'PY_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_BROKEN_MBSTOWCS': 0, 'LIBRARY_OBJS': '\\', 'HAVE_LOG1P': 1, 'SIZEOF_VOID_P': 4, 'HAVE_FCHOWN': 1, 'PYTHONFRAMEWORKPREFIX': '', 'HAVE_LIBDLD': 0, 'HAVE_TGAMMA': 1, 'HAVE_ERRNO_H': 1, 'HAVE_IO_H': 0, 'OTHER_LIBTOOL_OPT': '', 'HAVE_POLL_H': 1, 'PY_CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax', 'GRAMMAR_H': 'Include/graminit.h', 'TANH_PRESERVES_ZERO_SIGN': 1, 'HAVE_GETLOADAVG': 1, 'UNICODE_DEPS': '\\ \\', 'HAVE_GETCWD': 1, 'MANDIR': '/usr/share/man', 'MACHDESTLIB': '/usr/lib/python3.3', 'GRAMMAR_C': 'Python/graminit.c', 'PGOBJS': '\\', 'HAVE_DEV_PTMX': 1, 'HAVE_UINTPTR_T': 1, 'HAVE_SCHED_SETAFFINITY': 1, 'PURIFY': '', 'HAVE_DECL_ISINF': 1, 'HAVE_RL_CALLBACK': 1, 'HAVE_WRITEV': 1, 'HAVE_GETHOSTBYNAME_R_5_ARG': 0, 'HAVE_SYS_AUDIOIO_H': 0, 'EXT_SUFFIX': '.cpython-33m.so', 'SIZEOF_LONG_LONG': 8, 'DLINCLDIR': '.', 'HAVE_PATHCONF': 1, 'HAVE_UNLINKAT': 1, 'MKDIR_P': '/bin/mkdir -p', 'HAVE_ALTZONE': 0, 'SCRIPTDIR': '/usr/lib', 'OPCODETARGETGEN_FILES': '\\', 'HAVE_GETSPNAM': 1, 'HAVE_SYS_TERMIO_H': 0, 'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE': 0, 'HAVE_PTHREAD_H': 1, 'Py_DEBUG': 0, 'HAVE_STRUCT_STAT_ST_BLOCKS': 1, 'X87_DOUBLE_ROUNDING': 1, 'SIZEOF_TIME_T': 4, 'HAVE_DYNAMIC_LOADING': 1, 'HAVE_DIRECT_H': 0, 'SRC_GDB_HOOKS': '../Tools/gdb/libpython.py', 'HAVE_GETADDRINFO': 1, 'HAVE_BROKEN_NICE': 0, 'HAVE_DIRENT_H': 1, 'HAVE_WCSXFRM': 1, 'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1, 'HAVE_FSTATVFS': 1, 'PYTHON': 'python', 'HAVE_OSX105_SDK': 0, 'BINDIR': '/usr/bin', 'TESTPYTHON': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python', 'ARFLAGS': 'rc', 'PLATDIR': 'plat-i386-linux-gnu', 'HAVE_ASM_TYPES_H': 1, 'PY3LIBRARY': 'libpython3.so', 'HAVE_PLOCK': 0, 'FLOCK_NEEDS_LIBBSD': 0, 'WITH_TSC': 0, 'HAVE_LIBREADLINE': 1, 'MACHDEP': 'linux', 'HAVE_SELECT': 1, 'LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_HSTRERROR': 1, 'SOABI': 'cpython-33m', 'HAVE_GETTIMEOFDAY': 1, 'HAVE_LIBRESOLV': 0, 'HAVE_UNSETENV': 1, 'HAVE_TM_ZONE': 1, 'HAVE_GETPGRP': 1, 'HAVE_FLOCK': 1, 'HAVE_SYS_BSDTTY_H': 0, 'SUBDIRS': '', 'PYTHONFRAMEWORKINSTALLDIR': '', 'PACKAGE_BUGREPORT': 0, 'HAVE_CLOCK': 1, 'HAVE_GETPEERNAME': 1, 'SIZEOF_PID_T': 4, 'HAVE_CONIO_H': 0, 'HAVE_FSTATAT': 1, 'HAVE_NETPACKET_PACKET_H': 1, 'HAVE_WAIT3': 1, 'DESTPATH': '', 'HAVE_STAT_TV_NSEC2': 0, 'HAVE_GETRESGID': 1, 'HAVE_UCS4_TCL': 0, 'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0, 'HAVE_TIMES': 1, 'HAVE_UNAME': 1, 'HAVE_ERF': 1, 'SIZEOF_SHORT': 2, 'HAVE_NCURSES_H': 1, 'HAVE_SYS_SENDFILE_H': 1, 'HAVE_CTERMID_R': 0, 'HAVE_TMPNAM': 1, 'prefix': '/usr', 'HAVE_NICE': 1, 'WITH_THREAD': 1, 'LN': 'ln', 'TESTRUNNER': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py', 'HAVE_SIGINTERRUPT': 1, 'HAVE_SETPGID': 1, 'RETSIGTYPE': 'void', 'HAVE_SCHED_GET_PRIORITY_MAX': 1, 'HAVE_SYS_SYS_DOMAIN_H': 0, 'HAVE_SYS_DIR_H': 0, 'HAVE__GETPTY': 0, 'HAVE_BLUETOOTH_BLUETOOTH_H': 1, 'HAVE_BIND_TEXTDOMAIN_CODESET': 1, 'HAVE_POLL': 1, 'PYTHON_OBJS': '\\', 'HAVE_WAITPID': 1, 'USE_INLINE': 1, 'HAVE_FUTIMENS': 1, 'USE_COMPUTED_GOTOS': 1, 'MAINCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SOCKETPAIR': 1, 'HAVE_PROCESS_H': 0, 'HAVE_SETVBUF': 1, 'HAVE_FDOPENDIR': 1, 'CONFINCLUDEDIR': '/usr/include', 'BINLIBDEST': '/usr/lib/python3.3', 'HAVE_SYS_IOCTL_H': 1, 'HAVE_SYSEXITS_H': 1, 'LDLAST': '', 'HAVE_SYS_FILE_H': 1, 'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1, 'HAVE_RL_COMPLETION_MATCHES': 1, 'HAVE_TCGETPGRP': 1, 'SIZEOF_SIZE_T': 4, 'HAVE_EPOLL_CREATE1': 1, 'HAVE_SYS_SELECT_H': 1, 'HAVE_CLOCK_GETTIME': 1, 'CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HAVE_SNPRINTF': 1, 'BLDLIBRARY': '-lpython3.3m', 'PARSER_HEADERS': '\\', 'SO': '.so', 'LIBRARY': 'libpython3.3m.a', 'HAVE_FPATHCONF': 1, 'HAVE_TERMIOS_H': 1, 'HAVE_BROKEN_PTHREAD_SIGMASK': 0, 'AST_H': 'Include/Python-ast.h', 'HAVE_GCC_UINT128_T': 0, 'HAVE_ACOSH': 1, 'MODOBJS': 'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o', 'AST_C': 'Python/Python-ast.c', 'HAVE_SYS_NDIR_H': 0, 'DESTDIRS': '/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload', 'HAVE_SIGNAL_H': 1, 'PACKAGE_TARNAME': 0, 'HAVE_GETPRIORITY': 1, 'INCLUDEDIR': '/usr/include', 'HAVE_INTTYPES_H': 1, 'SIGNAL_OBJS': '', 'HAVE_READV': 1, 'HAVE_SETHOSTNAME': 1, 'MODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'CC': 'i686-linux-gnu-gcc -pthread', 'HAVE_LCHMOD': 0, 'SIZEOF_UINTPTR_T': 4, 'LIBPC': '/usr/lib/i386-linux-gnu/pkgconfig', 'BYTESTR_DEPS': '\\', 'HAVE_MKDIRAT': 1, 'LIBPL': '/usr/lib/python3.3/config-3.3m-i386-linux-gnu', 'HAVE_SHADOW_H': 1, 'HAVE_SYS_EVENT_H': 0, 'INSTALL': '/usr/bin/install -c', 'HAVE_GCC_ASM_FOR_X87': 1, 'HAVE_BROKEN_UNSETENV': 0, 'BASECPPFLAGS': '', 'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0, 'HAVE_STRUCT_STAT_ST_RDEV': 1, 'HAVE_SEM_UNLINK': 1, 'BUILDPYTHON': 'python', 'HAVE_RL_CATCH_SIGNAL': 1, 'HAVE_DECL_TZNAME': 0, 'RESSRCDIR': 'Mac/Resources/framework', 'HAVE_PTHREAD_SIGMASK': 1, 'HAVE_UTIMES': 1, 'DISTDIRS': 'Include Lib Misc Ext-dummy', 'HAVE_FDATASYNC': 1, 'HAVE_USABLE_WCHAR_T': 0, 'PY_FORMAT_SIZE_T': '"z"', 'HAVE_SCHED_SETSCHEDULER': 1, 'VA_LIST_IS_ARRAY': 0, 'HAVE_LINUX_NETLINK_H': 1, 'HAVE_SETREGID': 1, 'HAVE_STROPTS_H': 1, 'LDVERSION': '3.3m', 'abs_builddir': '/build/buildd/python3.3-3.3.1/build-shared', 'SITEPATH': '', 'HAVE_GETHOSTBYNAME': 0, 'HAVE_SIGPENDING': 1, 'HAVE_KQUEUE': 0, 'HAVE_SYNC': 1, 'HAVE_GETSID': 1, 'HAVE_ROUND': 1, 'HAVE_STRFTIME': 1, 'AST_H_DIR': 'Include', 'HAVE_PIPE2': 1, 'AST_C_DIR': 'Python', 'TESTPYTHONOPTS': '', 'HAVE_DEV_PTC': 0, 'GETTIMEOFDAY_NO_TZ': 0, 'HAVE_NET_IF_H': 1, 'HAVE_SENDFILE': 1, 'HAVE_SETPGRP': 1, 'HAVE_SEM_GETVALUE': 1, 'CONFIGURE_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'DLLLIBRARY': '', 'PYTHON_FOR_BUILD': './python -E', 'SETPGRP_HAVE_ARG': 0, 'HAVE_INET_ATON': 1, 'INSTALL_SHARED': '/usr/bin/install -c -m 555', 'WITH_DOC_STRINGS': 1, 'OPCODETARGETS_H': '\\', 'HAVE_INITGROUPS': 1, 'HAVE_LINKAT': 1, 'BASEMODLIBS': '', 'SGI_ABI': '', 'HAVE_SCHED_SETPARAM': 1, 'OPT': '-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes', 'HAVE_POSIX_FADVISE': 1, 'datarootdir': '/usr/share', 'HAVE_MEMRCHR': 1, 'HGTAG': '', 'HAVE_MEMMOVE': 1, 'HAVE_GETRESUID': 1, 'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0, 'HAVE_LSTAT': 1, 'AR': 'ar', 'HAVE_WAIT4': 1, 'HAVE_SYS_MODEM_H': 0, 'INSTSONAME': 'libpython3.3m.so.1.0', 'HAVE_SYS_STATVFS_H': 1, 'HAVE_LGAMMA': 1, 'HAVE_PROTOTYPES': 1, 'HAVE_SYS_UIO_H': 1, 'MAJOR_IN_MKDEV': 0, 'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\', 'HAVE_SYS_DEVPOLL_H': 0, 'HAVE_CHFLAGS': 0, 'HAVE_FSYNC': 1, 'HAVE_FCHMOD': 1, 'INCLUDEPY': '/usr/include/python3.3m', 'HAVE_SEM_TIMEDWAIT': 1, 'LDLIBRARYDIR': '', 'HAVE_STRUCT_TM_TM_ZONE': 1, 'HAVE_CURSES_H': 1, 'TIME_WITH_SYS_TIME': 1, 'HAVE_DUP2': 1, 'ENABLE_IPV6': 1, 'WITH_VALGRIND': 0, 'HAVE_SETITIMER': 1, 'THREADOBJ': 'Python/thread.o', 'LOCALMODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'HAVE_MEMORY_H': 1, 'HAVE_GETITIMER': 1, 'HAVE_C99_BOOL': 1, 'INSTALL_DATA': '/usr/bin/install -c -m 644', 'PGEN': 'Parser/pgen', 'HAVE_GRP_H': 1, 'HAVE_WCSFTIME': 1, 'AIX_GENUINE_CPLUSPLUS': 0, 'HAVE_LIBINTL_H': 1, 'SHELL': '/bin/sh', 'HAVE_UNISTD_H': 1, 'EXTRATESTOPTS': '', 'HAVE_EXECV': 1, 'HAVE_FSEEK64': 0, 'MVWDELCH_IS_EXPRESSION': 1, 'DESTSHARED': '/usr/lib/python3.3/lib-dynload', 'OPCODETARGETGEN': '\\', 'LIBDEST': '/usr/lib/python3.3', 'CCSHARED': '-fPIC', 'HAVE_EXPM1': 1, 'HAVE_DLFCN_H': 1, 'exec_prefix': '/usr', 'HAVE_READLINK': 1, 'WINDOW_HAS_FLAGS': 1, 'HAVE_FTELL64': 0, 'HAVE_STRLCPY': 0, 'MACOSX_DEPLOYMENT_TARGET': '', 'HAVE_SYS_SYSCALL_H': 1, 'DESTLIB': '/usr/lib/python3.3', 'LDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HGVERSION': '', 'PYTHON_HEADERS': '\\', 'HAVE_STRINGS_H': 1, 'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1, 'HAVE_POSIX_FALLOCATE': 1, 'HAVE_DIRFD': 1, 'HAVE_LOG2': 1, 'HAVE_GETPID': 1, 'HAVE_ALARM': 1, 'MACHDEP_OBJS': '', 'HAVE_SPAWN_H': 1, 'HAVE_FORK': 1, 'HAVE_SETRESGID': 1, 'HAVE_FCHMODAT': 1, 'HAVE_CLOCK_GETRES': 1, 'MACHDEPPATH': ':plat-i386-linux-gnu', 'STDC_HEADERS': 1, 'HAVE_SETPRIORITY': 1, 'LIBC': '', 'HAVE_SYS_EPOLL_H': 1, 'HAVE_SYS_UTSNAME_H': 1, 'HAVE_PUTENV': 1, 'HAVE_CURSES_RESIZE_TERM': 1, 'HAVE_FUTIMESAT': 1, 'WITH_DYLD': 0, 'INSTALL_PROGRAM': '/usr/bin/install -c', 'LIBS': '-lpthread -ldl -lutil', 'HAVE_TRUNCATE': 1, 'TESTOPTS': '', 'PROFILE_TASK': '../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck', 'HAVE_CURSES_RESIZETERM': 1, 'ABIFLAGS': 'm', 'HAVE_GETGROUPLIST': 1, 'OBJECT_OBJS': '\\', 'HAVE_MKNODAT': 1, 'HAVE_ST_BLOCKS': 1, 'HAVE_STRUCT_STAT_ST_GEN': 0, 'SYS_SELECT_WITH_SYS_TIME': 1, 'SHLIBS': '-lpthread -ldl -lutil', 'HAVE_GETGROUPS': 1, 'MODULE_OBJS': '\\', 'PYTHONFRAMEWORKDIR': 'no-framework', 'HAVE_FCNTL_H': 1, 'HAVE_LINK': 1, 'HAVE_SIGWAIT': 1, 'HAVE_GAMMA': 1, 'HAVE_SYS_LOCK_H': 0, 'HAVE_FORKPTY': 1, 'HAVE_SOCKADDR_SA_LEN': 0, 'HAVE_TEMPNAM': 1, 'HAVE_STRUCT_STAT_ST_BLKSIZE': 1, 'HAVE_MKFIFOAT': 1, 'HAVE_SIGWAITINFO': 1, 'HAVE_FTIME': 1, 'HAVE_EPOLL': 1, 'HAVE_SYS_SOCKET_H': 1, 'HAVE_LARGEFILE_SUPPORT': 1, 'CONFIGURE_CFLAGS': '-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security', 'HAVE_PTHREAD_DESTRUCTOR': 0, 'CONFIGURE_CPPFLAGS': '-D_FORTIFY_SOURCE=2', 'HAVE_SYMLINK': 1, 'HAVE_LONG_LONG': 1, 'HAVE_IEEEFP_H': 0, 'LIBDIR': '/usr/lib', 'HAVE_PTHREAD_KILL': 1, 'TESTPATH': '', 'HAVE_STRDUP': 1, 'POBJS': '\\', 'NO_AS_NEEDED': '-Wl,--no-as-needed', 'HAVE_LONG_DOUBLE': 1, 'HGBRANCH': '', 'DISTFILES': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1, 'HAVE_FACCESSAT': 1, 'AST_ASDL': '../Parser/Python.asdl', 'CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'HAVE_MKTIME': 1, 'HAVE_NDIR_H': 0, 'PY_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBOBJDIR': 'Python/', 'HAVE_LINUX_CAN_RAW_H': 1, 'HAVE_GETHOSTBYNAME_R_3_ARG': 0, 'PACKAGE_STRING': 0, 'GNULD': 'yes', 'LOG1P_DROPS_ZERO_SIGN': 0, 'HAVE_FTRUNCATE': 1, 'WITH_LIBINTL': 0, 'HAVE_MREMAP': 1, 'HAVE_DECL_ISNAN': 1, 'HAVE_KILLPG': 1, 'SIZEOF_LONG': 4, 'HAVE_DECL_ISFINITE': 1, 'HAVE_IPA_PURE_CONST_BUG': 0, 'WITH_PYMALLOC': 1, 'abs_srcdir': '/build/buildd/python3.3-3.3.1/build-shared/..', 'HAVE_FCHDIR': 1, 'HAVE_BROKEN_POSIX_SEMAPHORES': 0, 'AC_APPLE_UNIVERSAL_BUILD': 0, 'PGENSRCS': '\\ \\', 'DIRMODE': 755, 'HAVE_GETHOSTBYNAME_R': 1, 'HAVE_LCHFLAGS': 0, 'HAVE_SYS_PARAM_H': 1, 'SIZEOF_LONG_DOUBLE': 12, 'CONFIG_ARGS': "'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'", 'HAVE_SCHED_H': 1, 'HAVE_KILL': 1}
| gpl-3.0 |
uclaros/QGIS | tests/src/python/test_qgslayoutunitscombobox.py | 45 | 2465 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutUnitsComboBox
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '18/07/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
from qgis.core import QgsUnitTypes, QgsLayoutMeasurementConverter
from qgis.gui import QgsLayoutUnitsComboBox
from qgis.PyQt.QtWidgets import QDoubleSpinBox
from qgis.PyQt.QtTest import QSignalSpy
from qgis.testing import start_app, unittest
start_app()
class TestQgsLayoutUnitsComboBox(unittest.TestCase):
def testGettersSetters(self):
""" test widget getters/setters """
w = qgis.gui.QgsLayoutUnitsComboBox()
w.setUnit(QgsUnitTypes.LayoutPixels)
self.assertEqual(w.unit(), QgsUnitTypes.LayoutPixels)
def test_ChangedSignals(self):
""" test that signals are correctly emitted when setting unit"""
w = qgis.gui.QgsLayoutUnitsComboBox()
spy = QSignalSpy(w.changed)
w.setUnit(QgsUnitTypes.LayoutPixels)
self.assertEqual(len(spy), 1)
self.assertEqual(spy[0][0], QgsUnitTypes.LayoutPixels)
def testLinkedWidgets(self):
""" test linking spin boxes to combobox"""
w = qgis.gui.QgsLayoutUnitsComboBox()
self.assertFalse(w.converter())
c = QgsLayoutMeasurementConverter()
w.setConverter(c)
self.assertEqual(w.converter(), c)
spin = QDoubleSpinBox()
spin.setMaximum(1000000)
spin.setValue(100)
w.setUnit(QgsUnitTypes.LayoutCentimeters)
w.linkToWidget(spin)
w.setUnit(QgsUnitTypes.LayoutMeters)
self.assertAlmostEqual(spin.value(), 1.0, 2)
w.setUnit(QgsUnitTypes.LayoutMillimeters)
self.assertAlmostEqual(spin.value(), 1000.0, 2)
spin2 = QDoubleSpinBox()
spin2.setValue(50)
spin2.setMaximum(1000000)
w.linkToWidget(spin2)
w.setUnit(QgsUnitTypes.LayoutCentimeters)
self.assertAlmostEqual(spin.value(), 100.0, 2)
self.assertAlmostEqual(spin2.value(), 5.0, 2)
# no crash!
del spin
w.setUnit(QgsUnitTypes.LayoutMeters)
self.assertAlmostEqual(spin2.value(), 0.05, 2)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
proxysh/Safejumper-for-Mac | buildlinux/env32/local/lib/python2.7/sre_compile.py | 28 | 16357 | #
# Secret Labs' Regular Expression Engine
#
# convert template to internal format
#
# Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
import _sre, sys
import sre_parse
from sre_constants import *
assert _sre.MAGIC == MAGIC, "SRE module mismatch"
if _sre.CODESIZE == 2:
MAXCODE = 65535
else:
MAXCODE = 0xFFFFFFFFL
def _identityfunction(x):
return x
_LITERAL_CODES = set([LITERAL, NOT_LITERAL])
_REPEATING_CODES = set([REPEAT, MIN_REPEAT, MAX_REPEAT])
_SUCCESS_CODES = set([SUCCESS, FAILURE])
_ASSERT_CODES = set([ASSERT, ASSERT_NOT])
def _compile(code, pattern, flags):
# internal: compile a (sub)pattern
emit = code.append
_len = len
LITERAL_CODES = _LITERAL_CODES
REPEATING_CODES = _REPEATING_CODES
SUCCESS_CODES = _SUCCESS_CODES
ASSERT_CODES = _ASSERT_CODES
for op, av in pattern:
if op in LITERAL_CODES:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
emit(_sre.getlower(av, flags))
else:
emit(OPCODES[op])
emit(av)
elif op is IN:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
def fixup(literal, flags=flags):
return _sre.getlower(literal, flags)
else:
emit(OPCODES[op])
fixup = _identityfunction
skip = _len(code); emit(0)
_compile_charset(av, flags, code, fixup)
code[skip] = _len(code) - skip
elif op is ANY:
if flags & SRE_FLAG_DOTALL:
emit(OPCODES[ANY_ALL])
else:
emit(OPCODES[ANY])
elif op in REPEATING_CODES:
if flags & SRE_FLAG_TEMPLATE:
raise error, "internal: unsupported template operator"
emit(OPCODES[REPEAT])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif _simple(av) and op is not REPEAT:
if op is MAX_REPEAT:
emit(OPCODES[REPEAT_ONE])
else:
emit(OPCODES[MIN_REPEAT_ONE])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
else:
emit(OPCODES[REPEAT])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
code[skip] = _len(code) - skip
if op is MAX_REPEAT:
emit(OPCODES[MAX_UNTIL])
else:
emit(OPCODES[MIN_UNTIL])
elif op is SUBPATTERN:
if av[0]:
emit(OPCODES[MARK])
emit((av[0]-1)*2)
# _compile_info(code, av[1], flags)
_compile(code, av[1], flags)
if av[0]:
emit(OPCODES[MARK])
emit((av[0]-1)*2+1)
elif op in SUCCESS_CODES:
emit(OPCODES[op])
elif op in ASSERT_CODES:
emit(OPCODES[op])
skip = _len(code); emit(0)
if av[0] >= 0:
emit(0) # look ahead
else:
lo, hi = av[1].getwidth()
if lo != hi:
raise error, "look-behind requires fixed-width pattern"
emit(lo) # look behind
_compile(code, av[1], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif op is CALL:
emit(OPCODES[op])
skip = _len(code); emit(0)
_compile(code, av, flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif op is AT:
emit(OPCODES[op])
if flags & SRE_FLAG_MULTILINE:
av = AT_MULTILINE.get(av, av)
if flags & SRE_FLAG_LOCALE:
av = AT_LOCALE.get(av, av)
elif flags & SRE_FLAG_UNICODE:
av = AT_UNICODE.get(av, av)
emit(ATCODES[av])
elif op is BRANCH:
emit(OPCODES[op])
tail = []
tailappend = tail.append
for av in av[1]:
skip = _len(code); emit(0)
# _compile_info(code, av, flags)
_compile(code, av, flags)
emit(OPCODES[JUMP])
tailappend(_len(code)); emit(0)
code[skip] = _len(code) - skip
emit(0) # end of branch
for tail in tail:
code[tail] = _len(code) - tail
elif op is CATEGORY:
emit(OPCODES[op])
if flags & SRE_FLAG_LOCALE:
av = CH_LOCALE[av]
elif flags & SRE_FLAG_UNICODE:
av = CH_UNICODE[av]
emit(CHCODES[av])
elif op is GROUPREF:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
else:
emit(OPCODES[op])
emit(av-1)
elif op is GROUPREF_EXISTS:
emit(OPCODES[op])
emit(av[0]-1)
skipyes = _len(code); emit(0)
_compile(code, av[1], flags)
if av[2]:
emit(OPCODES[JUMP])
skipno = _len(code); emit(0)
code[skipyes] = _len(code) - skipyes + 1
_compile(code, av[2], flags)
code[skipno] = _len(code) - skipno
else:
code[skipyes] = _len(code) - skipyes + 1
else:
raise ValueError, ("unsupported operand type", op)
def _compile_charset(charset, flags, code, fixup=None):
# compile charset subprogram
emit = code.append
if fixup is None:
fixup = _identityfunction
for op, av in _optimize_charset(charset, fixup):
emit(OPCODES[op])
if op is NEGATE:
pass
elif op is LITERAL:
emit(fixup(av))
elif op is RANGE:
emit(fixup(av[0]))
emit(fixup(av[1]))
elif op is CHARSET:
code.extend(av)
elif op is BIGCHARSET:
code.extend(av)
elif op is CATEGORY:
if flags & SRE_FLAG_LOCALE:
emit(CHCODES[CH_LOCALE[av]])
elif flags & SRE_FLAG_UNICODE:
emit(CHCODES[CH_UNICODE[av]])
else:
emit(CHCODES[av])
else:
raise error, "internal: unsupported set operator"
emit(OPCODES[FAILURE])
def _optimize_charset(charset, fixup):
# internal: optimize character set
out = []
outappend = out.append
charmap = [0]*256
try:
for op, av in charset:
if op is NEGATE:
outappend((op, av))
elif op is LITERAL:
charmap[fixup(av)] = 1
elif op is RANGE:
for i in range(fixup(av[0]), fixup(av[1])+1):
charmap[i] = 1
elif op is CATEGORY:
# XXX: could append to charmap tail
return charset # cannot compress
except IndexError:
# character set contains unicode characters
return _optimize_unicode(charset, fixup)
# compress character map
i = p = n = 0
runs = []
runsappend = runs.append
for c in charmap:
if c:
if n == 0:
p = i
n = n + 1
elif n:
runsappend((p, n))
n = 0
i = i + 1
if n:
runsappend((p, n))
if len(runs) <= 2:
# use literal/range
for p, n in runs:
if n == 1:
outappend((LITERAL, p))
else:
outappend((RANGE, (p, p+n-1)))
if len(out) < len(charset):
return out
else:
# use bitmap
data = _mk_bitmap(charmap)
outappend((CHARSET, data))
return out
return charset
def _mk_bitmap(bits):
data = []
dataappend = data.append
if _sre.CODESIZE == 2:
start = (1, 0)
else:
start = (1L, 0L)
m, v = start
for c in bits:
if c:
v = v + m
m = m + m
if m > MAXCODE:
dataappend(v)
m, v = start
return data
# To represent a big charset, first a bitmap of all characters in the
# set is constructed. Then, this bitmap is sliced into chunks of 256
# characters, duplicate chunks are eliminated, and each chunk is
# given a number. In the compiled expression, the charset is
# represented by a 32-bit word sequence, consisting of one word for
# the number of different chunks, a sequence of 256 bytes (64 words)
# of chunk numbers indexed by their original chunk position, and a
# sequence of 256-bit chunks (8 words each).
# Compression is normally good: in a typical charset, large ranges of
# Unicode will be either completely excluded (e.g. if only cyrillic
# letters are to be matched), or completely included (e.g. if large
# subranges of Kanji match). These ranges will be represented by
# chunks of all one-bits or all zero-bits.
# Matching can be also done efficiently: the more significant byte of
# the Unicode character is an index into the chunk number, and the
# less significant byte is a bit index in the chunk (just like the
# CHARSET matching).
# In UCS-4 mode, the BIGCHARSET opcode still supports only subsets
# of the basic multilingual plane; an efficient representation
# for all of Unicode has not yet been developed. This means,
# in particular, that negated charsets cannot be represented as
# bigcharsets.
def _optimize_unicode(charset, fixup):
try:
import array
except ImportError:
return charset
charmap = [0]*65536
negate = 0
try:
for op, av in charset:
if op is NEGATE:
negate = 1
elif op is LITERAL:
charmap[fixup(av)] = 1
elif op is RANGE:
for i in xrange(fixup(av[0]), fixup(av[1])+1):
charmap[i] = 1
elif op is CATEGORY:
# XXX: could expand category
return charset # cannot compress
except IndexError:
# non-BMP characters
return charset
if negate:
if sys.maxunicode != 65535:
# XXX: negation does not work with big charsets
return charset
for i in xrange(65536):
charmap[i] = not charmap[i]
comps = {}
mapping = [0]*256
block = 0
data = []
for i in xrange(256):
chunk = tuple(charmap[i*256:(i+1)*256])
new = comps.setdefault(chunk, block)
mapping[i] = new
if new == block:
block = block + 1
data = data + _mk_bitmap(chunk)
header = [block]
if _sre.CODESIZE == 2:
code = 'H'
else:
code = 'I'
# Convert block indices to byte array of 256 bytes
mapping = array.array('B', mapping).tostring()
# Convert byte array to word array
mapping = array.array(code, mapping)
assert mapping.itemsize == _sre.CODESIZE
header = header + mapping.tolist()
data[0:0] = header
return [(BIGCHARSET, data)]
def _simple(av):
# check if av is a "simple" operator
lo, hi = av[2].getwidth()
return lo == hi == 1 and av[2][0][0] != SUBPATTERN
def _compile_info(code, pattern, flags):
# internal: compile an info block. in the current version,
# this contains min/max pattern width, and an optional literal
# prefix or a character map
lo, hi = pattern.getwidth()
if lo == 0:
return # not worth it
# look for a literal prefix
prefix = []
prefixappend = prefix.append
prefix_skip = 0
charset = [] # not used
charsetappend = charset.append
if not (flags & SRE_FLAG_IGNORECASE):
# look for literal prefix
for op, av in pattern.data:
if op is LITERAL:
if len(prefix) == prefix_skip:
prefix_skip = prefix_skip + 1
prefixappend(av)
elif op is SUBPATTERN and len(av[1]) == 1:
op, av = av[1][0]
if op is LITERAL:
prefixappend(av)
else:
break
else:
break
# if no prefix, look for charset prefix
if not prefix and pattern.data:
op, av = pattern.data[0]
if op is SUBPATTERN and av[1]:
op, av = av[1][0]
if op is LITERAL:
charsetappend((op, av))
elif op is BRANCH:
c = []
cappend = c.append
for p in av[1]:
if not p:
break
op, av = p[0]
if op is LITERAL:
cappend((op, av))
else:
break
else:
charset = c
elif op is BRANCH:
c = []
cappend = c.append
for p in av[1]:
if not p:
break
op, av = p[0]
if op is LITERAL:
cappend((op, av))
else:
break
else:
charset = c
elif op is IN:
charset = av
## if prefix:
## print "*** PREFIX", prefix, prefix_skip
## if charset:
## print "*** CHARSET", charset
# add an info block
emit = code.append
emit(OPCODES[INFO])
skip = len(code); emit(0)
# literal flag
mask = 0
if prefix:
mask = SRE_INFO_PREFIX
if len(prefix) == prefix_skip == len(pattern.data):
mask = mask + SRE_INFO_LITERAL
elif charset:
mask = mask + SRE_INFO_CHARSET
emit(mask)
# pattern length
if lo < MAXCODE:
emit(lo)
else:
emit(MAXCODE)
prefix = prefix[:MAXCODE]
if hi < MAXCODE:
emit(hi)
else:
emit(0)
# add literal prefix
if prefix:
emit(len(prefix)) # length
emit(prefix_skip) # skip
code.extend(prefix)
# generate overlap table
table = [-1] + ([0]*len(prefix))
for i in xrange(len(prefix)):
table[i+1] = table[i]+1
while table[i+1] > 0 and prefix[i] != prefix[table[i+1]-1]:
table[i+1] = table[table[i+1]-1]+1
code.extend(table[1:]) # don't store first entry
elif charset:
_compile_charset(charset, flags, code)
code[skip] = len(code) - skip
try:
unicode
except NameError:
STRING_TYPES = (type(""),)
else:
STRING_TYPES = (type(""), type(unicode("")))
def isstring(obj):
for tp in STRING_TYPES:
if isinstance(obj, tp):
return 1
return 0
def _code(p, flags):
flags = p.pattern.flags | flags
code = []
# compile info block
_compile_info(code, p, flags)
# compile the pattern
_compile(code, p.data, flags)
code.append(OPCODES[SUCCESS])
return code
def compile(p, flags=0):
# internal: convert pattern list to internal format
if isstring(p):
pattern = p
p = sre_parse.parse(p, flags)
else:
pattern = None
code = _code(p, flags)
# print code
# XXX: <fl> get rid of this limitation!
if p.pattern.groups > 100:
raise AssertionError(
"sorry, but this version only supports 100 named groups"
)
# map in either direction
groupindex = p.pattern.groupdict
indexgroup = [None] * p.pattern.groups
for k, i in groupindex.items():
indexgroup[i] = k
return _sre.compile(
pattern, flags | p.pattern.flags, code,
p.pattern.groups-1,
groupindex, indexgroup
)
| gpl-2.0 |
abagh0703/RetailTrail | flask/lib/python2.7/site-packages/requests/packages/urllib3/util/response.py | 199 | 2167 | from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
get_payload = getattr(headers, 'get_payload', None)
unparsed_data = None
if get_payload: # Platform-specific: Python 3.
unparsed_data = get_payload()
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks, wether a the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == 'HEAD'
| mit |
sriki18/scipy | scipy/sparse/linalg/_onenormest.py | 96 | 15138 | """Sparse block 1-norm estimator.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from scipy.sparse.linalg import aslinearoperator
__all__ = ['onenormest']
def onenormest(A, t=2, itmax=5, compute_v=False, compute_w=False):
"""
Compute a lower bound of the 1-norm of a sparse matrix.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can be transposed and that can
produce matrix products.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
Larger values take longer and use more memory
but give more accurate output.
itmax : int, optional
Use at most this many iterations.
compute_v : bool, optional
Request a norm-maximizing linear operator input vector if True.
compute_w : bool, optional
Request a norm-maximizing linear operator output vector if True.
Returns
-------
est : float
An underestimate of the 1-norm of the sparse matrix.
v : ndarray, optional
The vector such that ||Av||_1 == est*||v||_1.
It can be thought of as an input to the linear operator
that gives an output with particularly large norm.
w : ndarray, optional
The vector Av which has relatively large 1-norm.
It can be thought of as an output of the linear operator
that is relatively large in norm compared to the input.
Notes
-----
This is algorithm 2.4 of [1].
In [2] it is described as follows.
"This algorithm typically requires the evaluation of
about 4t matrix-vector products and almost invariably
produces a norm estimate (which is, in fact, a lower
bound on the norm) correct to within a factor 3."
.. versionadded:: 0.13.0
References
----------
.. [1] Nicholas J. Higham and Francoise Tisseur (2000),
"A Block Algorithm for Matrix 1-Norm Estimation,
with an Application to 1-Norm Pseudospectra."
SIAM J. Matrix Anal. Appl. Vol. 21, No. 4, pp. 1185-1201.
.. [2] Awad H. Al-Mohy and Nicholas J. Higham (2009),
"A new scaling and squaring algorithm for the matrix exponential."
SIAM J. Matrix Anal. Appl. Vol. 31, No. 3, pp. 970-989.
"""
# Check the input.
A = aslinearoperator(A)
if A.shape[0] != A.shape[1]:
raise ValueError('expected the operator to act like a square matrix')
# If the operator size is small compared to t,
# then it is easier to compute the exact norm.
# Otherwise estimate the norm.
n = A.shape[1]
if t >= n:
A_explicit = np.asarray(aslinearoperator(A).matmat(np.identity(n)))
if A_explicit.shape != (n, n):
raise Exception('internal error: ',
'unexpected shape ' + str(A_explicit.shape))
col_abs_sums = abs(A_explicit).sum(axis=0)
if col_abs_sums.shape != (n, ):
raise Exception('internal error: ',
'unexpected shape ' + str(col_abs_sums.shape))
argmax_j = np.argmax(col_abs_sums)
v = elementary_vector(n, argmax_j)
w = A_explicit[:, argmax_j]
est = col_abs_sums[argmax_j]
else:
est, v, w, nmults, nresamples = _onenormest_core(A, A.H, t, itmax)
# Report the norm estimate along with some certificates of the estimate.
if compute_v or compute_w:
result = (est,)
if compute_v:
result += (v,)
if compute_w:
result += (w,)
return result
else:
return est
def _blocked_elementwise(func):
"""
Decorator for an elementwise function, to apply it blockwise along
first dimension, to avoid excessive memory usage in temporaries.
"""
block_size = 2**20
def wrapper(x):
if x.shape[0] < block_size:
return func(x)
else:
y0 = func(x[:block_size])
y = np.zeros((x.shape[0],) + y0.shape[1:], dtype=y0.dtype)
y[:block_size] = y0
del y0
for j in range(block_size, x.shape[0], block_size):
y[j:j+block_size] = func(x[j:j+block_size])
return y
return wrapper
@_blocked_elementwise
def sign_round_up(X):
"""
This should do the right thing for both real and complex matrices.
From Higham and Tisseur:
"Everything in this section remains valid for complex matrices
provided that sign(A) is redefined as the matrix (aij / |aij|)
(and sign(0) = 1) transposes are replaced by conjugate transposes."
"""
Y = X.copy()
Y[Y == 0] = 1
Y /= np.abs(Y)
return Y
@_blocked_elementwise
def _max_abs_axis1(X):
return np.max(np.abs(X), axis=1)
def _sum_abs_axis0(X):
block_size = 2**20
r = None
for j in range(0, X.shape[0], block_size):
y = np.sum(np.abs(X[j:j+block_size]), axis=0)
if r is None:
r = y
else:
r += y
return r
def elementary_vector(n, i):
v = np.zeros(n, dtype=float)
v[i] = 1
return v
def vectors_are_parallel(v, w):
# Columns are considered parallel when they are equal or negative.
# Entries are required to be in {-1, 1},
# which guarantees that the magnitudes of the vectors are identical.
if v.ndim != 1 or v.shape != w.shape:
raise ValueError('expected conformant vectors with entries in {-1,1}')
n = v.shape[0]
return np.dot(v, w) == n
def every_col_of_X_is_parallel_to_a_col_of_Y(X, Y):
for v in X.T:
if not any(vectors_are_parallel(v, w) for w in Y.T):
return False
return True
def column_needs_resampling(i, X, Y=None):
# column i of X needs resampling if either
# it is parallel to a previous column of X or
# it is parallel to a column of Y
n, t = X.shape
v = X[:, i]
if any(vectors_are_parallel(v, X[:, j]) for j in range(i)):
return True
if Y is not None:
if any(vectors_are_parallel(v, w) for w in Y.T):
return True
return False
def resample_column(i, X):
X[:, i] = np.random.randint(0, 2, size=X.shape[0])*2 - 1
def less_than_or_close(a, b):
return np.allclose(a, b) or (a < b)
def _algorithm_2_2(A, AT, t):
"""
This is Algorithm 2.2.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can produce matrix products.
AT : ndarray or other linear operator
The transpose of A.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
Returns
-------
g : sequence
A non-negative decreasing vector
such that g[j] is a lower bound for the 1-norm
of the column of A of jth largest 1-norm.
The first entry of this vector is therefore a lower bound
on the 1-norm of the linear operator A.
This sequence has length t.
ind : sequence
The ith entry of ind is the index of the column A whose 1-norm
is given by g[i].
This sequence of indices has length t, and its entries are
chosen from range(n), possibly with repetition,
where n is the order of the operator A.
Notes
-----
This algorithm is mainly for testing.
It uses the 'ind' array in a way that is similar to
its usage in algorithm 2.4. This algorithm 2.2 may be easier to test,
so it gives a chance of uncovering bugs related to indexing
which could have propagated less noticeably to algorithm 2.4.
"""
A_linear_operator = aslinearoperator(A)
AT_linear_operator = aslinearoperator(AT)
n = A_linear_operator.shape[0]
# Initialize the X block with columns of unit 1-norm.
X = np.ones((n, t))
if t > 1:
X[:, 1:] = np.random.randint(0, 2, size=(n, t-1))*2 - 1
X /= float(n)
# Iteratively improve the lower bounds.
# Track extra things, to assert invariants for debugging.
g_prev = None
h_prev = None
k = 1
ind = range(t)
while True:
Y = np.asarray(A_linear_operator.matmat(X))
g = _sum_abs_axis0(Y)
best_j = np.argmax(g)
g.sort()
g = g[::-1]
S = sign_round_up(Y)
Z = np.asarray(AT_linear_operator.matmat(S))
h = _max_abs_axis1(Z)
# If this algorithm runs for fewer than two iterations,
# then its return values do not have the properties indicated
# in the description of the algorithm.
# In particular, the entries of g are not 1-norms of any
# column of A until the second iteration.
# Therefore we will require the algorithm to run for at least
# two iterations, even though this requirement is not stated
# in the description of the algorithm.
if k >= 2:
if less_than_or_close(max(h), np.dot(Z[:, best_j], X[:, best_j])):
break
ind = np.argsort(h)[::-1][:t]
h = h[ind]
for j in range(t):
X[:, j] = elementary_vector(n, ind[j])
# Check invariant (2.2).
if k >= 2:
if not less_than_or_close(g_prev[0], h_prev[0]):
raise Exception('invariant (2.2) is violated')
if not less_than_or_close(h_prev[0], g[0]):
raise Exception('invariant (2.2) is violated')
# Check invariant (2.3).
if k >= 3:
for j in range(t):
if not less_than_or_close(g[j], g_prev[j]):
raise Exception('invariant (2.3) is violated')
# Update for the next iteration.
g_prev = g
h_prev = h
k += 1
# Return the lower bounds and the corresponding column indices.
return g, ind
def _onenormest_core(A, AT, t, itmax):
"""
Compute a lower bound of the 1-norm of a sparse matrix.
Parameters
----------
A : ndarray or other linear operator
A linear operator that can produce matrix products.
AT : ndarray or other linear operator
The transpose of A.
t : int, optional
A positive parameter controlling the tradeoff between
accuracy versus time and memory usage.
itmax : int, optional
Use at most this many iterations.
Returns
-------
est : float
An underestimate of the 1-norm of the sparse matrix.
v : ndarray, optional
The vector such that ||Av||_1 == est*||v||_1.
It can be thought of as an input to the linear operator
that gives an output with particularly large norm.
w : ndarray, optional
The vector Av which has relatively large 1-norm.
It can be thought of as an output of the linear operator
that is relatively large in norm compared to the input.
nmults : int, optional
The number of matrix products that were computed.
nresamples : int, optional
The number of times a parallel column was observed,
necessitating a re-randomization of the column.
Notes
-----
This is algorithm 2.4.
"""
# This function is a more or less direct translation
# of Algorithm 2.4 from the Higham and Tisseur (2000) paper.
A_linear_operator = aslinearoperator(A)
AT_linear_operator = aslinearoperator(AT)
if itmax < 2:
raise ValueError('at least two iterations are required')
if t < 1:
raise ValueError('at least one column is required')
n = A.shape[0]
if t >= n:
raise ValueError('t should be smaller than the order of A')
# Track the number of big*small matrix multiplications
# and the number of resamplings.
nmults = 0
nresamples = 0
# "We now explain our choice of starting matrix. We take the first
# column of X to be the vector of 1s [...] This has the advantage that
# for a matrix with nonnegative elements the algorithm converges
# with an exact estimate on the second iteration, and such matrices
# arise in applications [...]"
X = np.ones((n, t), dtype=float)
# "The remaining columns are chosen as rand{-1,1},
# with a check for and correction of parallel columns,
# exactly as for S in the body of the algorithm."
if t > 1:
for i in range(1, t):
# These are technically initial samples, not resamples,
# so the resampling count is not incremented.
resample_column(i, X)
for i in range(t):
while column_needs_resampling(i, X):
resample_column(i, X)
nresamples += 1
# "Choose starting matrix X with columns of unit 1-norm."
X /= float(n)
# "indices of used unit vectors e_j"
ind_hist = np.zeros(0, dtype=np.intp)
est_old = 0
S = np.zeros((n, t), dtype=float)
k = 1
ind = None
while True:
Y = np.asarray(A_linear_operator.matmat(X))
nmults += 1
mags = _sum_abs_axis0(Y)
est = np.max(mags)
best_j = np.argmax(mags)
if est > est_old or k == 2:
if k >= 2:
ind_best = ind[best_j]
w = Y[:, best_j]
# (1)
if k >= 2 and est <= est_old:
est = est_old
break
est_old = est
S_old = S
if k > itmax:
break
S = sign_round_up(Y)
del Y
# (2)
if every_col_of_X_is_parallel_to_a_col_of_Y(S, S_old):
break
if t > 1:
# "Ensure that no column of S is parallel to another column of S
# or to a column of S_old by replacing columns of S by rand{-1,1}."
for i in range(t):
while column_needs_resampling(i, S, S_old):
resample_column(i, S)
nresamples += 1
del S_old
# (3)
Z = np.asarray(AT_linear_operator.matmat(S))
nmults += 1
h = _max_abs_axis1(Z)
del Z
# (4)
if k >= 2 and max(h) == h[ind_best]:
break
# "Sort h so that h_first >= ... >= h_last
# and re-order ind correspondingly."
#
# Later on, we will need at most t+len(ind_hist) largest
# entries, so drop the rest
ind = np.argsort(h)[::-1][:t+len(ind_hist)].copy()
del h
if t > 1:
# (5)
# Break if the most promising t vectors have been visited already.
if np.in1d(ind[:t], ind_hist).all():
break
# Put the most promising unvisited vectors at the front of the list
# and put the visited vectors at the end of the list.
# Preserve the order of the indices induced by the ordering of h.
seen = np.in1d(ind, ind_hist)
ind = np.concatenate((ind[~seen], ind[seen]))
for j in range(t):
X[:, j] = elementary_vector(n, ind[j])
new_ind = ind[:t][~np.in1d(ind[:t], ind_hist)]
ind_hist = np.concatenate((ind_hist, new_ind))
k += 1
v = elementary_vector(n, ind_best)
return est, v, w, nmults, nresamples
| bsd-3-clause |
fighterCui/L4ReFiascoOC | l4/pkg/python/contrib/Demo/tkinter/matt/dialog-box.py | 47 | 2440 | from Tkinter import *
from Dialog import Dialog
# this shows how to create a new window with a button in it
# that can create new windows
class Test(Frame):
def printit(self):
print "hi"
def makeWindow(self):
"""Create a top-level dialog with some buttons.
This uses the Dialog class, which is a wrapper around the Tcl/Tk
tk_dialog script. The function returns 0 if the user clicks 'yes'
or 1 if the user clicks 'no'.
"""
# the parameters to this call are as follows:
d = Dialog(
self, ## name of a toplevel window
title="fred the dialog box",## title on the window
text="click on a choice", ## message to appear in window
bitmap="info", ## bitmap (if any) to appear;
## if none, use ""
# legal values here are:
# string what it looks like
# ----------------------------------------------
# error a circle with a slash through it
# grey25 grey square
# grey50 darker grey square
# hourglass use for "wait.."
# info a large, lower case "i"
# questhead a human head with a "?" in it
# question a large "?"
# warning a large "!"
# @fname X bitmap where fname is the path to the file
#
default=0, # the index of the default button choice.
# hitting return selects this
strings=("yes", "no"))
# values of the 'strings' key are the labels for the
# buttons that appear left to right in the dialog box
return d.num
def createWidgets(self):
self.QUIT = Button(self, text='QUIT', foreground='red',
command=self.quit)
self.QUIT.pack(side=LEFT, fill=BOTH)
# a hello button
self.hi_there = Button(self, text='Make a New Window',
command=self.makeWindow)
self.hi_there.pack(side=LEFT)
def __init__(self, master=None):
Frame.__init__(self, master)
Pack.config(self)
self.windownum = 0
self.createWidgets()
test = Test()
test.mainloop()
| gpl-2.0 |
JacobJacob/pyew | pymsasid/inst.py | 16 | 5413 | from operand import O_NONE, P_none
import syn_intel as intel
#hack MK
from syn_intel import intel_operand_syntax
#from syn_att import *
operator_list_invalid = [ 'invalid']
operator_list_call = ['syscall',
'call',
'vmcall',
'vmmcall']
operator_list_ret = ['sysret',
'iretw',
'iretd',
'iretq',
'ret',
'retf']
operator_list_jmp = ['jmp']
operator_list_jcc = ['jo',
'jno',
'jb',
'jae',
'jz',
'jnz',
'jbe',
'ja',
'js',
'jns',
'jp',
'jnp',
'jl',
'jge',
'jle',
'jg',
'jcxz',
'jecxz',
'jrcxz',
'loopnz',
'loope',
'loop']
operator_list_hlt = ['hlt']
class itab_entry:
def __init__(self,
operator = None,
op1 = O_NONE, op2 = O_NONE, op3 = O_NONE,
pfx = 0):
self.operator = operator
self.operand = [op1, op2, op3]
self.prefix = pfx
ie_invalid = itab_entry('invalid', O_NONE, O_NONE, O_NONE, P_none)
ie_pause = itab_entry('pause', O_NONE, O_NONE, O_NONE, P_none)
ie_nop = itab_entry('nop', O_NONE, O_NONE, O_NONE, P_none)
class Prefix:
def __init__(self):
self.rex = 0
self.seg = ''
self.opr = 0
self.adr = 0
self.lock = 0
self.rep = 0
self.repe = 0
self.repne = 0
self.insn = 0
def clear(self):
self.seg = ''
self.opr = 0
self.adr = 0
self.lock = 0
self.repne = 0
self.rep = 0
self.repe = 0
self.rex = 0
self.insn = 0
class Ptr:
def __init__(self, off = 0, seg = 0):
self.off = off
self.seg = seg
class Operand:
def __init__(self):
self.seg = None
self.type = None
self.size = 0
self.lval = 0
self.base = None
self.index = None
self.offset = 0
self.scale = 0
self.cast = 0
self.pc = 0
self.value = None
self.ref = None
def clear(self):
self.__init__()
def __str__(self):
return intel_operand_syntax (self)
def __repr__(self):
return self.__str__()
class Inst:
def __init__(self, myInput, add = 0, mode = 16, syntax = intel.intel_syntax):
self.input = myInput
self.dis_mode = mode
self.size = 0
self.add = add
self.pc = 0
self.syntax = syntax
self.my_syntax = None
self.itab_entry = ie_invalid
self.operator = 'invalid'
self.operand = []
self.pfx = Prefix()
self.opr_mode = 0
self.adr_mode = 0
self.branch_dist = None
def clear(self):
self.pfx.clear()
self.itab_entry = ie_invalid
self.operator = self.itab_entry.operator
for op in self.operand:
op.clear()
def __str__(self):
if(self.my_syntax == None):
self.my_syntax = self.syntax(self) # wtf ?
return self.my_syntax
def __repr__(self):
return str(self)
def set_pc(self, pc):
self.pc = pc
for op in self.operand:
op.pc = pc
def branch(self):
if(self.operator in operator_list_invalid
or self.operator in operator_list_ret
or self.operator in operator_list_hlt):
return []
elif self.operator in operator_list_jmp:
return [self.target_add()]
elif self.operator in operator_list_call or self.operator in operator_list_jcc:
return [self.next_add(), self.target_add()]
return [self.next_add()]
def next_add(self):
return long(self.pc)
def target_add(self):
if(self.operand[0].type == 'OP_JIMM'
or self.operand[0].type == 'OP_IMM'):
ret = self.add + self.size + self.operand[0].lval
elif self.operand[0].type == 'OP_PTR':
ret = ((self.operand[0].lval.seg << 4)
+ self.operand[0].lval.off)
elif self.operand[0].type == 'OP_MEM':
self.input.seek(self.operand[0].lval)
ret = long (self.input.hook.base_address + self.input.read(self.operand[0].size))
else:
ret = str(self.operand[0])
if(type(ret) == str):
return ret
return long(ret)
def flow_label(self):
if self.operator in operator_list_invalid:
return 'invd'
elif self.operator in operator_list_call:
return 'call'
elif self.operator in operator_list_jmp:
return 'jmp'
elif self.operator in operator_list_jcc:
return 'jcc'
elif self.operator in operator_list_ret:
return 'ret'
elif self.operator in operator_list_hlt:
return 'hlt'
else:
return 'seq'
| gpl-2.0 |
arpitparmar5739/youtube-dl | youtube_dl/extractor/pornovoisines.py | 113 | 3388 | # coding: utf-8
from __future__ import unicode_literals
import re
import random
from .common import InfoExtractor
from ..utils import (
int_or_none,
float_or_none,
unified_strdate,
)
class PornoVoisinesIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?pornovoisines\.com/showvideo/(?P<id>\d+)/(?P<display_id>[^/]+)'
_VIDEO_URL_TEMPLATE = 'http://stream%d.pornovoisines.com' \
'/static/media/video/transcoded/%s-640x360-1000-trscded.mp4'
_SERVER_NUMBERS = (1, 2)
_TEST = {
'url': 'http://www.pornovoisines.com/showvideo/1285/recherche-appartement/',
'md5': '5ac670803bc12e9e7f9f662ce64cf1d1',
'info_dict': {
'id': '1285',
'display_id': 'recherche-appartement',
'ext': 'mp4',
'title': 'Recherche appartement',
'description': 'md5:819ea0b785e2a04667a1a01cdc89594e',
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20140925',
'duration': 120,
'view_count': int,
'average_rating': float,
'categories': ['Débutantes', 'Scénario', 'Sodomie'],
'age_limit': 18,
}
}
@classmethod
def build_video_url(cls, num):
return cls._VIDEO_URL_TEMPLATE % (random.choice(cls._SERVER_NUMBERS), num)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, video_id)
video_url = self.build_video_url(video_id)
title = self._html_search_regex(
r'<h1>(.+?)</h1>', webpage, 'title', flags=re.DOTALL)
description = self._html_search_regex(
r'<article id="descriptif">(.+?)</article>',
webpage, "description", fatal=False, flags=re.DOTALL)
thumbnail = self._search_regex(
r'<div id="mediaspace%s">\s*<img src="/?([^"]+)"' % video_id,
webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = 'http://www.pornovoisines.com/%s' % thumbnail
upload_date = unified_strdate(self._search_regex(
r'Publié le ([\d-]+)', webpage, 'upload date', fatal=False))
duration = int_or_none(self._search_regex(
'Durée (\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._search_regex(
r'(\d+) vues', webpage, 'view count', fatal=False))
average_rating = self._search_regex(
r'Note\s*:\s*(\d+(?:,\d+)?)', webpage, 'average rating', fatal=False)
if average_rating:
average_rating = float_or_none(average_rating.replace(',', '.'))
categories = self._html_search_meta(
'keywords', webpage, 'categories', fatal=False)
if categories:
categories = [category.strip() for category in categories.split(',')]
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'upload_date': upload_date,
'duration': duration,
'view_count': view_count,
'average_rating': average_rating,
'categories': categories,
'age_limit': 18,
}
| unlicense |
ansible/ansible | lib/ansible/modules/import_role.py | 15 | 3445 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
author: Ansible Core Team (@ansible)
module: import_role
short_description: Import a role into a play
description:
- Much like the C(roles:) keyword, this task loads a role, but it allows you to control when the role tasks run in
between other tasks of the play.
- Most keywords, loops and conditionals will only be applied to the imported tasks, not to this statement itself. If
you want the opposite behavior, use M(ansible.builtin.include_role) instead.
- Does not work in handlers.
version_added: '2.4'
options:
name:
description:
- The name of the role to be executed.
type: str
required: true
tasks_from:
description:
- File to load from a role's C(tasks/) directory.
type: str
default: main
vars_from:
description:
- File to load from a role's C(vars/) directory.
type: str
default: main
defaults_from:
description:
- File to load from a role's C(defaults/) directory.
type: str
default: main
allow_duplicates:
description:
- Overrides the role's metadata setting to allow using a role more than once with the same parameters.
type: bool
default: yes
handlers_from:
description:
- File to load from a role's C(handlers/) directory.
type: str
default: main
version_added: '2.8'
rolespec_validate:
description:
- Perform role argument spec validation if an argument spec is defined.
type: bool
default: yes
version_added: '2.11'
extends_documentation_fragment:
- action_common_attributes
attributes:
async:
support: none
become:
support: none
bypass_host_loop:
support: partial
conditional:
support: none
connection:
support: none
delegation:
support: none
loops:
support: none
tags:
support: none
until:
support: none
notes:
- Handlers are made available to the whole play.
- Since Ansible 2.7 variables defined in C(vars) and C(defaults) for the role are exposed to the play at playbook parsing time.
Due to this, these variables will be accessible to roles and tasks executed before the location of the
M(ansible.builtin.import_role) task.
- Unlike M(ansible.builtin.include_role) variable exposure is not configurable, and will always be exposed.
seealso:
- module: ansible.builtin.import_playbook
- module: ansible.builtin.import_tasks
- module: ansible.builtin.include_role
- module: ansible.builtin.include_tasks
- ref: playbooks_reuse_includes
description: More information related to including and importing playbooks, roles and tasks.
'''
EXAMPLES = r'''
- hosts: all
tasks:
- import_role:
name: myrole
- name: Run tasks/other.yaml instead of 'main'
import_role:
name: myrole
tasks_from: other
- name: Pass variables to role
import_role:
name: myrole
vars:
rolevar1: value from task
- name: Apply condition to each task in role
import_role:
name: myrole
when: not idontwanttorun
'''
RETURN = r'''
# This module does not return anything except tasks to execute.
'''
| gpl-3.0 |
anmolonruby/mongo-connector | mongo_connector/connector.py | 1 | 41715 | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discovers the mongo cluster and starts the connector.
"""
import json
import logging
import logging.handlers
import os
import pymongo
import re
import shutil
import ssl
import sys
import threading
import time
from mongo_connector import config, constants, errors, util
from mongo_connector.locking_dict import LockingDict
from mongo_connector.oplog_manager import OplogThread
from mongo_connector.doc_managers import doc_manager_simulator as simulator
from mongo_connector.doc_managers.doc_manager_base import DocManagerBase
from mongo_connector.command_helper import CommandHelper
from mongo_connector.util import log_fatal_exceptions
from pymongo import MongoClient
LOG = logging.getLogger(__name__)
_SSL_POLICY_MAP = {
'ignored': ssl.CERT_NONE,
'optional': ssl.CERT_OPTIONAL,
'required': ssl.CERT_REQUIRED
}
class Connector(threading.Thread):
"""Thread that monitors a replica set or sharded cluster.
Creates, runs, and monitors an OplogThread for each replica set found.
"""
def __init__(self, mongo_address, doc_managers=None, **kwargs):
super(Connector, self).__init__()
# can_run is set to false when we join the thread
self.can_run = True
# main address - either mongos for sharded setups or a primary otherwise
self.address = mongo_address
# List of DocManager instances
if doc_managers:
self.doc_managers = doc_managers
else:
LOG.warning('No doc managers specified, using simulator.')
self.doc_managers = (simulator.DocManager(),)
# Password for authentication
self.auth_key = kwargs.pop('auth_key', None)
# Username for authentication
self.auth_username = kwargs.pop('auth_username', None)
# The name of the file that stores the progress of the OplogThreads
self.oplog_checkpoint = kwargs.pop('oplog_checkpoint',
'oplog.timestamp')
# The set of OplogThreads created
self.shard_set = {}
# Dict of OplogThread/timestamp pairs to record progress
self.oplog_progress = LockingDict()
# Timezone awareness
self.tz_aware = kwargs.get('tz_aware', False)
# SSL keyword arguments to MongoClient.
ssl_certfile = kwargs.pop('ssl_certfile', None)
ssl_ca_certs = kwargs.pop('ssl_ca_certs', None)
ssl_keyfile = kwargs.pop('ssl_keyfile', None)
ssl_cert_reqs = kwargs.pop('ssl_cert_reqs', None)
self.ssl_kwargs = {}
if ssl_certfile:
self.ssl_kwargs['ssl_certfile'] = ssl_certfile
if ssl_ca_certs:
self.ssl_kwargs['ssl_ca_certs'] = ssl_ca_certs
if ssl_keyfile:
self.ssl_kwargs['ssl_keyfile'] = ssl_keyfile
if ssl_cert_reqs:
self.ssl_kwargs['ssl_cert_reqs'] = ssl_cert_reqs
# Save the rest of kwargs.
self.kwargs = kwargs
# Initialize and set the command helper
command_helper = CommandHelper(kwargs.get('ns_set', []),
kwargs.get('dest_mapping', {}))
for dm in self.doc_managers:
dm.command_helper = command_helper
if self.oplog_checkpoint is not None:
if not os.path.exists(self.oplog_checkpoint):
info_str = ("MongoConnector: Can't find %s, "
"attempting to create an empty progress log" %
self.oplog_checkpoint)
LOG.warning(info_str)
try:
# Create oplog progress file
open(self.oplog_checkpoint, "w").close()
except IOError as e:
LOG.critical("MongoConnector: Could not "
"create a progress log: %s" %
str(e))
sys.exit(2)
else:
if (not os.access(self.oplog_checkpoint, os.W_OK)
and not os.access(self.oplog_checkpoint, os.R_OK)):
LOG.critical("Invalid permissions on %s! Exiting" %
(self.oplog_checkpoint))
sys.exit(2)
@classmethod
def from_config(cls, config):
"""Create a new Connector instance from a Config object."""
auth_key = None
password_file = config['authentication.passwordFile']
if password_file is not None:
try:
auth_key = open(config['authentication.passwordFile']).read()
auth_key = re.sub(r'\s', '', auth_key)
except IOError:
LOG.error('Could not load password file!')
sys.exit(1)
password = config['authentication.password']
if password is not None:
auth_key = password
connector = Connector(
mongo_address=config['mainAddress'],
doc_managers=config['docManagers'],
oplog_checkpoint=config['oplogFile'],
collection_dump=(not config['noDump']),
batch_size=config['batchSize'],
continue_on_error=config['continueOnError'],
auth_username=config['authentication.adminUsername'],
auth_key=auth_key,
fields=config['fields'],
ns_set=config['namespaces.include'],
dest_mapping=config['namespaces.mapping'],
gridfs_set=config['namespaces.gridfs'],
ssl_certfile=config['ssl.sslCertfile'],
ssl_keyfile=config['ssl.sslKeyfile'],
ssl_ca_certs=config['ssl.sslCACerts'],
ssl_cert_reqs=config['ssl.sslCertificatePolicy'],
tz_aware=config['timezoneAware']
)
return connector
def join(self):
""" Joins thread, stops it from running
"""
self.can_run = False
for dm in self.doc_managers:
dm.stop()
threading.Thread.join(self)
def write_oplog_progress(self):
""" Writes oplog progress to file provided by user
"""
if self.oplog_checkpoint is None:
return None
with self.oplog_progress as oplog_prog:
oplog_dict = oplog_prog.get_dict()
items = [[name, util.bson_ts_to_long(oplog_dict[name])]
for name in oplog_dict]
if not items:
return
# write to temp file
backup_file = self.oplog_checkpoint + '.backup'
os.rename(self.oplog_checkpoint, backup_file)
# for each of the threads write to file
with open(self.oplog_checkpoint, 'w') as dest:
if len(items) == 1:
# Write 1-dimensional array, as in previous versions.
json_str = json.dumps(items[0])
else:
# Write a 2d array to support sharded clusters.
json_str = json.dumps(items)
try:
dest.write(json_str)
except IOError:
# Basically wipe the file, copy from backup
dest.truncate()
with open(backup_file, 'r') as backup:
shutil.copyfile(backup, dest)
os.remove(backup_file)
def read_oplog_progress(self):
"""Reads oplog progress from file provided by user.
This method is only called once before any threads are spanwed.
"""
if self.oplog_checkpoint is None:
return None
# Check for empty file
try:
if os.stat(self.oplog_checkpoint).st_size == 0:
LOG.info("MongoConnector: Empty oplog progress file.")
return None
except OSError:
return None
with open(self.oplog_checkpoint, 'r') as progress_file:
try:
data = json.load(progress_file)
except ValueError:
LOG.exception(
'Cannot read oplog progress file "%s". '
'It may be corrupt after Mongo Connector was shut down'
'uncleanly. You can try to recover from a backup file '
'(may be called "%s.backup") or create a new progress file '
'starting at the current moment in time by running '
'mongo-connector --no-dump <other options>. '
'You may also be trying to read an oplog progress file '
'created with the old format for sharded clusters. '
'See https://github.com/10gen-labs/mongo-connector/wiki'
'/Oplog-Progress-File for complete documentation.'
% (self.oplog_checkpoint, self.oplog_checkpoint))
return
# data format:
# [name, timestamp] = replica set
# [[name, timestamp], [name, timestamp], ...] = sharded cluster
if not isinstance(data[0], list):
data = [data]
with self.oplog_progress:
self.oplog_progress.dict = dict(
(name, util.long_to_bson_ts(timestamp))
for name, timestamp in data)
@log_fatal_exceptions
def run(self):
"""Discovers the mongo cluster and creates a thread for each primary.
"""
main_conn = MongoClient(
self.address, tz_aware=self.tz_aware, **self.ssl_kwargs)
if self.auth_key is not None:
main_conn['admin'].authenticate(self.auth_username, self.auth_key)
self.read_oplog_progress()
conn_type = None
try:
main_conn.admin.command("isdbgrid")
except pymongo.errors.OperationFailure:
conn_type = "REPLSET"
if conn_type == "REPLSET":
# Make sure we are connected to a replica set
is_master = main_conn.admin.command("isMaster")
if "setName" not in is_master:
LOG.error(
'No replica set at "%s"! A replica set is required '
'to run mongo-connector. Shutting down...' % self.address
)
return
# Establish a connection to the replica set as a whole
main_conn.disconnect()
main_conn = MongoClient(
self.address, replicaSet=is_master['setName'],
tz_aware=self.tz_aware, **self.ssl_kwargs)
if self.auth_key is not None:
main_conn.admin.authenticate(self.auth_username, self.auth_key)
# non sharded configuration
oplog = OplogThread(
main_conn, self.doc_managers, self.oplog_progress,
**self.kwargs)
self.shard_set[0] = oplog
LOG.info('MongoConnector: Starting connection thread %s' %
main_conn)
oplog.start()
while self.can_run:
if not self.shard_set[0].running:
LOG.error("MongoConnector: OplogThread"
" %s unexpectedly stopped! Shutting down" %
(str(self.shard_set[0])))
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
self.write_oplog_progress()
time.sleep(1)
else: # sharded cluster
while self.can_run is True:
for shard_doc in main_conn['config']['shards'].find():
shard_id = shard_doc['_id']
if shard_id in self.shard_set:
if not self.shard_set[shard_id].running:
LOG.error("MongoConnector: OplogThread "
"%s unexpectedly stopped! Shutting "
"down" %
(str(self.shard_set[shard_id])))
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
self.write_oplog_progress()
time.sleep(1)
continue
try:
repl_set, hosts = shard_doc['host'].split('/')
except ValueError:
cause = "The system only uses replica sets!"
LOG.exception("MongoConnector: %s", cause)
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
shard_conn = MongoClient(
hosts, replicaSet=repl_set, tz_aware=self.tz_aware,
**self.ssl_kwargs)
if self.auth_key is not None:
shard_conn['admin'].authenticate(self.auth_username, self.auth_key)
oplog = OplogThread(
shard_conn, self.doc_managers, self.oplog_progress,
**self.kwargs)
self.shard_set[shard_id] = oplog
msg = "Starting connection thread"
LOG.info("MongoConnector: %s %s" % (msg, shard_conn))
oplog.start()
self.oplog_thread_join()
self.write_oplog_progress()
def oplog_thread_join(self):
"""Stops all the OplogThreads
"""
LOG.info('MongoConnector: Stopping all OplogThreads')
for thread in self.shard_set.values():
thread.join()
def get_config_options():
result = []
def add_option(*args, **kwargs):
opt = config.Option(*args, **kwargs)
result.append(opt)
return opt
main_address = add_option(
config_key="mainAddress",
default="localhost:27017",
type=str)
# -m is for the main address, which is a host:port pair, ideally of the
# mongos. For non sharded clusters, it can be the primary.
main_address.add_cli(
"-m", "--main", dest="main_address", help=
"Specify the main address, which is a"
" host:port pair. For sharded clusters, this"
" should be the mongos address. For individual"
" replica sets, supply the address of the"
" primary. For example, `-m localhost:27217`"
" would be a valid argument to `-m`. Don't use"
" quotes around the address.")
oplog_file = add_option(
config_key="oplogFile",
default="oplog.timestamp",
type=str)
# -o is to specify the oplog-config file. This file is used by the system
# to store the last timestamp read on a specific oplog. This allows for
# quick recovery from failure.
oplog_file.add_cli(
"-o", "--oplog-ts", dest="oplog_file", help=
"Specify the name of the file that stores the "
"oplog progress timestamps. "
"This file is used by the system to store the last "
"timestamp read on a specific oplog. This allows "
"for quick recovery from failure. By default this "
"is `config.txt`, which starts off empty. An empty "
"file causes the system to go through all the mongo "
"oplog and sync all the documents. Whenever the "
"cluster is restarted, it is essential that the "
"oplog-timestamp config file be emptied - otherwise "
"the connector will miss some documents and behave "
"incorrectly.")
no_dump = add_option(
config_key="noDump",
default=False,
type=bool)
# --no-dump specifies whether we should read an entire collection from
# scratch if no timestamp is found in the oplog_config.
no_dump.add_cli(
"--no-dump", action="store_true", dest="no_dump", help=
"If specified, this flag will ensure that "
"mongo_connector won't read the entire contents of a "
"namespace iff --oplog-ts points to an empty file.")
batch_size = add_option(
config_key="batchSize",
default=constants.DEFAULT_BATCH_SIZE,
type=int)
# --batch-size specifies num docs to read from oplog before updating the
# --oplog-ts config file with current oplog position
batch_size.add_cli(
"--batch-size", type="int", dest="batch_size", help=
"Specify an int to update the --oplog-ts "
"config file with latest position of oplog every "
"N documents. By default, the oplog config isn't "
"updated until we've read through the entire oplog. "
"You may want more frequent updates if you are at risk "
"of falling behind the earliest timestamp in the oplog")
def apply_verbosity(option, cli_values):
if cli_values['verbose']:
option.value = 3
if option.value < 0 or option.value > 3:
raise errors.InvalidConfiguration(
"verbosity must be in the range [0, 3].")
verbosity = add_option(
config_key="verbosity",
default=0,
type=int,
apply_function=apply_verbosity)
# -v enables verbose logging
verbosity.add_cli(
"-v", "--verbose", action="store_true",
dest="verbose", help="Enables verbose logging.")
def apply_logging(option, cli_values):
if cli_values['logfile'] and cli_values['enable_syslog']:
raise errors.InvalidConfiguration(
"You cannot specify syslog and a logfile simultaneously,"
" please choose the logging method you would prefer.")
if cli_values['logfile']:
when = cli_values['logfile_when']
interval = cli_values['logfile_interval']
if (when and when.startswith('W') and
interval != constants.DEFAULT_LOGFILE_INTERVAL):
raise errors.InvalidConfiguration(
"You cannot specify a log rotation interval when rotating "
"based on a weekday (W0 - W6).")
option.value['type'] = 'file'
option.value['filename'] = cli_values['logfile']
if when:
option.value['rotationWhen'] = when
if interval:
option.value['rotationInterval'] = interval
if cli_values['logfile_backups']:
option.value['rotationBackups'] = cli_values['logfile_backups']
if cli_values['enable_syslog']:
option.value['type'] = 'syslog'
if cli_values['syslog_host']:
option.value['host'] = cli_values['syslog_host']
if cli_values['syslog_facility']:
option.value['facility'] = cli_values['syslog_facility']
default_logging = {
'type': 'file',
'filename': 'mongo-connector.log',
'rotationInterval': constants.DEFAULT_LOGFILE_INTERVAL,
'rotationBackups': constants.DEFAULT_LOGFILE_BACKUPCOUNT,
'rotationWhen': constants.DEFAULT_LOGFILE_WHEN,
'host': constants.DEFAULT_SYSLOG_HOST,
'facility': constants.DEFAULT_SYSLOG_FACILITY
}
logging = add_option(
config_key="logging",
default=default_logging,
type=dict,
apply_function=apply_logging)
# -w enables logging to a file
logging.add_cli(
"-w", "--logfile", dest="logfile", help=
"Log all output to a file rather than stream to "
"stderr. Omit to stream to stderr.")
# -s is to enable syslog logging.
logging.add_cli(
"-s", "--enable-syslog", action="store_true",
dest="enable_syslog", help=
"The syslog host, which may be an address like 'localhost:514' or, "
"on Unix/Linux, the path to a Unix domain socket such as '/dev/log'.")
# --syslog-host is to specify the syslog host.
logging.add_cli(
"--syslog-host", dest="syslog_host", help=
"Used to specify the syslog host."
" The default is 'localhost:514'")
# --syslog-facility is to specify the syslog facility.
logging.add_cli(
"--syslog-facility", dest="syslog_facility", help=
"Used to specify the syslog facility."
" The default is 'user'")
# --logfile-when specifies the type of interval of the rotating file
# (seconds, minutes, hours)
logging.add_cli("--logfile-when", action="store", dest="logfile_when",
type="string",
help="The type of interval for rotating the log file. "
"Should be one of "
"'S' (seconds), 'M' (minutes), 'H' (hours), "
"'D' (days), 'W0' - 'W6' (days of the week 0 - 6), "
"or 'midnight' (the default). See the Python documentation "
"for 'logging.handlers.TimedRotatingFileHandler' for more "
"details.")
# --logfile-interval specifies when to create a new log file
logging.add_cli("--logfile-interval", action="store",
dest="logfile_interval", type="int",
help="How frequently to rotate the log file, "
"specifically, how many units of the rotation interval "
"should pass before the rotation occurs. For example, "
"to create a new file each hour: "
" '--logfile-when=H --logfile-interval=1'. "
"Defaults to 1. You may not use this option if "
"--logfile-when is set to a weekday (W0 - W6). "
"See the Python documentation for "
"'logging.handlers.TimedRotatingFileHandler' for more "
"details. ")
# --logfile-backups specifies how many log files will be kept.
logging.add_cli("--logfile-backups", action="store",
dest="logfile_backups", type="int",
help="How many log files will be kept after rotation. "
"If set to zero, then no log files will be deleted. "
"Defaults to 7.")
def apply_authentication(option, cli_values):
if cli_values['admin_username']:
option.value['adminUsername'] = cli_values['admin_username']
if cli_values['password']:
option.value['password'] = cli_values['password']
if cli_values['password_file']:
option.value['passwordFile'] = cli_values['password_file']
if option.value.get("adminUsername"):
password = option.value.get("password")
passwordFile = option.value.get("passwordFile")
if not password and not passwordFile:
raise errors.InvalidConfiguration(
"Admin username specified without password.")
if password and passwordFile:
raise errors.InvalidConfiguration(
"Can't specify both password and password file.")
default_authentication = {
'adminUsername': None,
'password': None,
'passwordFile': None
}
authentication = add_option(
config_key="authentication",
default=default_authentication,
type=dict,
apply_function=apply_authentication)
# -a is to specify the username for authentication.
authentication.add_cli(
"-a", "--admin-username", dest="admin_username", help=
"Used to specify the username of an admin user to "
"authenticate with. To use authentication, the user "
"must specify both an admin username and a keyFile.")
# -p is to specify the password used for authentication.
authentication.add_cli(
"-p", "--password", dest="password", help=
"Used to specify the password."
" This is used by mongos to authenticate"
" connections to the shards, and in the"
" oplog threads. If authentication is not used, then"
" this field can be left empty as the default ")
# -f is to specify the authentication key file. This file is used by mongos
# to authenticate connections to the shards, and we'll use it in the oplog
# threads.
authentication.add_cli(
"-f", "--password-file", dest="password_file", help=
"Used to store the password for authentication."
" Use this option if you wish to specify a"
" username and password but don't want to"
" type in the password. The contents of this"
" file should be the password for the admin user.")
def apply_fields(option, cli_values):
if cli_values['fields']:
option.value = cli_values['fields'].split(",")
for field in option.value:
if '.' in field:
print(
"WARNING: mongo-connector can only successfully filter "
"sub-document fields for inserts and updates, "
"not replacements. To catch all changes on "
"a sub-document field, specify the name of the "
"sub-document instead. You are seeing this "
"message because you passed the name of a nested field "
"to the 'fields' option: %s" % field)
break
fields = add_option(
config_key="fields",
default=[],
type=list,
apply_function=apply_fields)
# -i to specify the list of fields to export
fields.add_cli(
"-i", "--fields", dest="fields", help=
"Used to specify the list of fields to export. "
"Specify a field or fields to include in the export. "
"Use a comma separated list of fields to specify multiple "
"fields. The '_id', 'ns' and '_ts' fields are always "
"exported.")
def apply_namespaces(option, cli_values):
if cli_values['ns_set']:
option.value['include'] = cli_values['ns_set'].split(',')
if cli_values['gridfs_set']:
option.value['gridfs'] = cli_values['gridfs_set'].split(',')
if cli_values['dest_ns_set']:
ns_set = option.value['include']
dest_ns_set = cli_values['dest_ns_set'].split(',')
if len(ns_set) != len(dest_ns_set):
raise errors.InvalidConfiguration(
"Destination namespace set should be the"
" same length as the origin namespace set.")
option.value['mapping'] = dict(zip(ns_set, dest_ns_set))
ns_set = option.value['include']
if len(ns_set) != len(set(ns_set)):
raise errors.InvalidConfiguration(
"Namespace set should not contain any duplicates.")
dest_mapping = option.value['mapping']
if len(dest_mapping) != len(set(dest_mapping.values())):
raise errors.InvalidConfiguration(
"Destination namespaces set should not"
" contain any duplicates.")
gridfs_set = option.value['gridfs']
if len(gridfs_set) != len(set(gridfs_set)):
raise errors.InvalidConfiguration(
"GridFS set should not contain any duplicates.")
default_namespaces = {
"include": [],
"mapping": {},
"gridfs": []
}
namespaces = add_option(
config_key="namespaces",
default=default_namespaces,
type=dict,
apply_function=apply_namespaces)
# -n is to specify the namespaces we want to consider. The default
# considers all the namespaces
namespaces.add_cli(
"-n", "--namespace-set", dest="ns_set", help=
"Used to specify the namespaces we want to "
"consider. For example, if we wished to store all "
"documents from the test.test and alpha.foo "
"namespaces, we could use `-n test.test,alpha.foo`. "
"The default is to consider all the namespaces, "
"excluding the system and config databases, and "
"also ignoring the \"system.indexes\" collection in "
"any database.")
# -g is the destination namespace
namespaces.add_cli(
"-g", "--dest-namespace-set", dest="dest_ns_set", help=
"Specify a destination namespace mapping. Each "
"namespace provided in the --namespace-set option "
"will be mapped respectively according to this "
"comma-separated list. These lists must have "
"equal length. The default is to use the identity "
"mapping. This is currently only implemented "
"for mongo-to-mongo connections.")
# --gridfs-set is the set of GridFS namespaces to consider
namespaces.add_cli(
"--gridfs-set", dest="gridfs_set", help=
"Used to specify the GridFS namespaces we want to "
"consider. For example, if your metadata is stored in "
"test.fs.files and chunks are stored in test.fs.chunks, "
"you can use `--gridfs-set test.fs`.")
def apply_doc_managers(option, cli_values):
if cli_values['doc_manager'] is None:
if cli_values['target_url']:
raise errors.InvalidConfiguration(
"Cannot create a Connector with a target URL"
" but no doc manager.")
else:
if option.value is not None:
bulk_size = option.value[0].get(
'bulkSize', constants.DEFAULT_MAX_BULK)
else:
bulk_size = constants.DEFAULT_MAX_BULK
option.value = [{
'docManager': cli_values['doc_manager'],
'targetURL': cli_values['target_url'],
'uniqueKey': cli_values['unique_key'],
'autoCommitInterval': cli_values['auto_commit_interval'],
'bulkSize': bulk_size
}]
if not option.value:
return
# validate doc managers and fill in default values
for dm in option.value:
if not isinstance(dm, dict):
raise errors.InvalidConfiguration(
"Elements of docManagers must be a dict.")
if 'docManager' not in dm:
raise errors.InvalidConfiguration(
"Every element of docManagers"
" must contain 'docManager' property.")
if not dm.get('targetURL'):
dm['targetURL'] = None
if not dm.get('uniqueKey'):
dm['uniqueKey'] = constants.DEFAULT_UNIQUE_KEY
if dm.get('autoCommitInterval') is None:
dm['autoCommitInterval'] = constants.DEFAULT_COMMIT_INTERVAL
if not dm.get('args'):
dm['args'] = {}
if not dm.get('bulkSize'):
dm['bulkSize'] = constants.DEFAULT_MAX_BULK
aci = dm['autoCommitInterval']
if aci is not None and aci < 0:
raise errors.InvalidConfiguration(
"autoCommitInterval must be non-negative.")
def import_dm_by_name(name):
try:
full_name = "mongo_connector.doc_managers.%s" % name
# importlib doesn't exist in 2.6, but __import__ is everywhere
module = __import__(full_name, fromlist=(name,))
dm_impl = module.DocManager
if not issubclass(dm_impl, DocManagerBase):
raise TypeError("DocManager must inherit DocManagerBase.")
return module
except ImportError:
raise errors.InvalidConfiguration(
"Could not import %s." % full_name)
sys.exit(1)
except (AttributeError, TypeError):
raise errors.InvalidConfiguration(
"No definition for DocManager found in %s." % full_name)
sys.exit(1)
# instantiate the doc manager objects
dm_instances = []
for dm in option.value:
module = import_dm_by_name(dm['docManager'])
kwargs = {
'unique_key': dm['uniqueKey'],
'auto_commit_interval': dm['autoCommitInterval'],
'chunk_size': dm['bulkSize']
}
for k in dm['args']:
if k not in kwargs:
kwargs[k] = dm['args'][k]
target_url = dm['targetURL']
if target_url:
dm_instances.append(module.DocManager(target_url, **kwargs))
else:
dm_instances.append(module.DocManager(**kwargs))
option.value = dm_instances
doc_managers = add_option(
config_key="docManagers",
default=None,
type=list,
apply_function=apply_doc_managers)
# -d is to specify the doc manager file.
doc_managers.add_cli(
"-d", "--doc-manager", dest="doc_manager", help=
"Used to specify the path to each doc manager "
"file that will be used. DocManagers should be "
"specified in the same order as their respective "
"target addresses in the --target-urls option. "
"URLs are assigned to doc managers "
"respectively. Additional doc managers are "
"implied to have no target URL. Additional URLs "
"are implied to have the same doc manager type as "
"the last doc manager for which a URL was "
"specified. By default, Mongo Connector will use "
"'doc_manager_simulator.py'. It is recommended "
"that all doc manager files be kept in the "
"doc_managers folder in mongo-connector. For "
"more information about making your own doc "
"manager, see 'Writing Your Own DocManager' "
"section of the wiki")
# -d is to specify the doc manager file.
doc_managers.add_cli(
"-t", "--target-url",
dest="target_url", help=
"Specify the URL to each target system being "
"used. For example, if you were using Solr out of "
"the box, you could use '-t "
"http://localhost:8080/solr' with the "
"SolrDocManager to establish a proper connection. "
"URLs should be specified in the same order as "
"their respective doc managers in the "
"--doc-managers option. URLs are assigned to doc "
"managers respectively. Additional doc managers "
"are implied to have no target URL. Additional "
"URLs are implied to have the same doc manager "
"type as the last doc manager for which a URL was "
"specified. "
"Don't use quotes around addresses. ")
# -u is to specify the mongoDB field that will serve as the unique key
# for the target system,
doc_managers.add_cli(
"-u", "--unique-key", dest="unique_key", help=
"The name of the MongoDB field that will serve "
"as the unique key for the target system. "
"Note that this option does not apply "
"when targeting another MongoDB cluster. "
"Defaults to \"_id\".")
# --auto-commit-interval to specify auto commit time interval
doc_managers.add_cli(
"--auto-commit-interval", type="int",
dest="auto_commit_interval", help=
"Seconds in-between calls for the Doc Manager"
" to commit changes to the target system. A value of"
" 0 means to commit after every write operation."
" When left unset, Mongo Connector will not make"
" explicit commits. Some systems have"
" their own mechanism for adjusting a commit"
" interval, which should be preferred to this"
" option.")
continue_on_error = add_option(
config_key="continueOnError",
default=False,
type=bool)
def apply_ssl(option, cli_values):
option.value = option.value or {}
ssl_certfile = cli_values.pop('ssl_certfile')
ssl_keyfile = cli_values.pop('ssl_keyfile')
ssl_cert_reqs = cli_values.pop('ssl_cert_reqs')
ssl_ca_certs = (
cli_values.pop('ssl_ca_certs') or option.value.get('sslCACerts'))
if ssl_cert_reqs and ssl_cert_reqs != 'ignored' and not ssl_ca_certs:
raise errors.InvalidConfiguration(
'--ssl-ca-certs must be provided if the '
'--ssl-certificate-policy is not "ignored".')
option.value.setdefault('sslCertfile', ssl_certfile)
option.value.setdefault('sslCACerts', ssl_ca_certs)
option.value.setdefault('sslKeyfile', ssl_keyfile)
option.value['sslCertificatePolicy'] = _SSL_POLICY_MAP.get(
ssl_cert_reqs)
ssl = add_option(
config_key="ssl",
default={},
type=dict,
apply_function=apply_ssl)
ssl.add_cli(
'--ssl-certfile', dest='ssl_certfile',
help=('Path to a certificate identifying the local connection '
'to MongoDB.')
)
ssl.add_cli(
'--ssl-keyfile', dest='ssl_keyfile',
help=('Path to the private key for --ssl-certfile. '
'Not necessary if already included in --ssl-certfile.')
)
ssl.add_cli(
'--ssl-certificate-policy', dest='ssl_cert_reqs',
choices=('required', 'optional', 'ignored'),
help=('Policy for validating SSL certificates provided from the other '
'end of the connection. There are three possible values: '
'required = Require and validate the remote certificate. '
'optional = Validate the remote certificate only if one '
'is provided. '
'ignored = Remote SSL certificates are ignored completely.')
)
ssl.add_cli(
'--ssl-ca-certs', dest='ssl_ca_certs',
help=('Path to a concatenated set of certificate authority '
'certificates to validate the other side of the connection. ')
)
# --continue-on-error to continue to upsert documents during a collection
# dump, even if the documents cannot be inserted for some reason
continue_on_error.add_cli(
"--continue-on-error", action="store_true",
dest="continue_on_error", help=
"By default, if any document fails to upsert"
" during a collection dump, the entire operation fails."
" When this flag is enabled, normally fatal errors"
" will be caught and logged, allowing the collection"
" dump to continue.\n"
"Note: Applying oplog operations to an incomplete"
" set of documents due to errors may cause undefined"
" behavior. Use this flag to dump only.")
config_file = add_option()
config_file.add_cli(
"-c", "--config-file", dest="config_file", help=
"Specify a JSON file to load configurations from. You can find"
" an example config file at mongo-connector/config.json")
tz_aware = add_option(
config_key="timezoneAware", default=False, type=bool)
tz_aware.add_cli(
"--tz-aware", dest="tz_aware", action="store_true",
help="Make all dates and times timezone-aware.")
return result
def setup_logging(conf):
root_logger = logging.getLogger()
formatter = logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s:%(lineno)d - %(message)s")
log_levels = [
logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG
]
loglevel = log_levels[conf['verbosity']]
root_logger.setLevel(loglevel)
if conf['logging.type'] == 'file':
log_out = logging.handlers.TimedRotatingFileHandler(
conf['logging.filename'],
when=conf['logging.rotationWhen'],
interval=conf['logging.rotationInterval'],
backupCount=conf['logging.rotationBackups']
)
print("Logging to %s." % conf['logging.filename'])
elif conf['logging.type'] == 'syslog':
syslog_info = conf['logging.host']
if ':' in syslog_info:
log_host, log_port = syslog_info.split(':')
syslog_info = (log_host, int(log_port))
log_out = logging.handlers.SysLogHandler(
address=syslog_info,
facility=conf['logging.facility']
)
print("Logging to system log at %s" % conf['logging.host'])
elif conf['logging.type'] == 'stream':
log_out = logging.StreamHandler()
else:
print("Logging type must be one of 'stream', 'syslog', or 'file', not "
"'%s'." % conf['logging.type'])
sys.exit(1)
log_out.setLevel(loglevel)
log_out.setFormatter(formatter)
root_logger.addHandler(log_out)
return root_logger
@log_fatal_exceptions
def main():
""" Starts the mongo connector (assuming CLI)
"""
conf = config.Config(get_config_options())
conf.parse_args()
setup_logging(conf)
LOG.info('Beginning Mongo Connector')
connector = Connector.from_config(conf)
connector.start()
while True:
try:
time.sleep(3)
if not connector.is_alive():
break
except KeyboardInterrupt:
LOG.info("Caught keyboard interrupt, exiting!")
connector.join()
break
if __name__ == '__main__':
main()
| apache-2.0 |
maohongyuan/kbengine | kbe/res/scripts/common/Lib/asyncio/queues.py | 63 | 9019 | """Queues"""
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
'QueueFull', 'QueueEmpty']
import collections
import heapq
from . import events
from . import futures
from . import locks
from .tasks import coroutine
class QueueEmpty(Exception):
'Exception raised by Queue.get(block=0)/get_nowait().'
pass
class QueueFull(Exception):
'Exception raised by Queue.put(block=0)/put_nowait().'
pass
class Queue:
"""A queue, useful for coordinating producer and consumer coroutines.
If maxsize is less than or equal to zero, the queue size is infinite. If it
is an integer greater than 0, then "yield from put()" will block when the
queue reaches maxsize, until an item is removed by get().
Unlike the standard library Queue, you can reliably know this Queue's size
with qsize(), since your single-threaded asyncio application won't be
interrupted between calling qsize() and doing an operation on the Queue.
"""
def __init__(self, maxsize=0, *, loop=None):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._maxsize = maxsize
# Futures.
self._getters = collections.deque()
# Pairs of (item, Future).
self._putters = collections.deque()
self._init(maxsize)
def _init(self, maxsize):
self._queue = collections.deque()
def _get(self):
return self._queue.popleft()
def _put(self, item):
self._queue.append(item)
def __repr__(self):
return '<{} at {:#x} {}>'.format(
type(self).__name__, id(self), self._format())
def __str__(self):
return '<{} {}>'.format(type(self).__name__, self._format())
def _format(self):
result = 'maxsize={!r}'.format(self._maxsize)
if getattr(self, '_queue', None):
result += ' _queue={!r}'.format(list(self._queue))
if self._getters:
result += ' _getters[{}]'.format(len(self._getters))
if self._putters:
result += ' _putters[{}]'.format(len(self._putters))
return result
def _consume_done_getters(self):
# Delete waiters at the head of the get() queue who've timed out.
while self._getters and self._getters[0].done():
self._getters.popleft()
def _consume_done_putters(self):
# Delete waiters at the head of the put() queue who've timed out.
while self._putters and self._putters[0][1].done():
self._putters.popleft()
def qsize(self):
"""Number of items in the queue."""
return len(self._queue)
@property
def maxsize(self):
"""Number of items allowed in the queue."""
return self._maxsize
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return not self._queue
def full(self):
"""Return True if there are maxsize items in the queue.
Note: if the Queue was initialized with maxsize=0 (the default),
then full() is never True.
"""
if self._maxsize <= 0:
return False
else:
return self.qsize() >= self._maxsize
@coroutine
def put(self, item):
"""Put an item into the queue.
If you yield from put(), wait until a free slot is available
before adding item.
"""
self._consume_done_getters()
if self._getters:
assert not self._queue, (
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
getter.set_result(self._get())
elif self._maxsize > 0 and self._maxsize <= self.qsize():
waiter = futures.Future(loop=self._loop)
self._putters.append((item, waiter))
yield from waiter
else:
self._put(item)
def put_nowait(self, item):
"""Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
"""
self._consume_done_getters()
if self._getters:
assert not self._queue, (
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
getter.set_result(self._get())
elif self._maxsize > 0 and self._maxsize <= self.qsize():
raise QueueFull
else:
self._put(item)
@coroutine
def get(self):
"""Remove and return an item from the queue.
If you yield from get(), wait until a item is available.
"""
self._consume_done_putters()
if self._putters:
assert self.full(), 'queue not full, why are putters waiting?'
item, putter = self._putters.popleft()
self._put(item)
# When a getter runs and frees up a slot so this putter can
# run, we need to defer the put for a tick to ensure that
# getters and putters alternate perfectly. See
# ChannelTest.test_wait.
self._loop.call_soon(putter._set_result_unless_cancelled, None)
return self._get()
elif self.qsize():
return self._get()
else:
waiter = futures.Future(loop=self._loop)
self._getters.append(waiter)
return (yield from waiter)
def get_nowait(self):
"""Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
"""
self._consume_done_putters()
if self._putters:
assert self.full(), 'queue not full, why are putters waiting?'
item, putter = self._putters.popleft()
self._put(item)
# Wake putter on next tick.
putter.set_result(None)
return self._get()
elif self.qsize():
return self._get()
else:
raise QueueEmpty
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
"""
def _init(self, maxsize):
self._queue = []
def _put(self, item, heappush=heapq.heappush):
heappush(self._queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self._queue)
class LifoQueue(Queue):
"""A subclass of Queue that retrieves most recently added entries first."""
def _init(self, maxsize):
self._queue = []
def _put(self, item):
self._queue.append(item)
def _get(self):
return self._queue.pop()
class JoinableQueue(Queue):
"""A subclass of Queue with task_done() and join() methods."""
def __init__(self, maxsize=0, *, loop=None):
super().__init__(maxsize=maxsize, loop=loop)
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
def _format(self):
result = Queue._format(self)
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def _put(self, item):
super()._put(item)
self._unfinished_tasks += 1
self._finished.clear()
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
| lgpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.