code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import os
from carton.cart import Cart
from django.contrib import messages
from django.shortcuts import render, redirect
from django.template.response import TemplateResponse
from django.views.generic import View
from authentication.views import LoginRequiredMixin
from deals.models import Deal
from .models import UserShippingDetails
class CheckoutView(LoginRequiredMixin, View):
"""
Creates a checkout page.
Attributes:
template_name: name of the template that renders the view
stripe_secret_api_key: the secret API key for our stripe account
stripe_publishable_api_key: the publishable API key
"""
template_name = 'cart/checkout.html'
stripe_secret_api_key = os.getenv('STRIPE_SECRET_API_KEY')
stripe_publishable_api_key = os.getenv('STRIPE_PUBLISHABLE_API_KEY')
def get(self, request, **kwargs):
"""
Create checkout page.
Gets shopping information from cart and sends it to the payment app
in form of a dict. It then renders the checkout template which can then
be used to pay.
Args:
request: The incoming get request object
**kwargs: Any keyword arguments passed to the function
Returns:
A template rendered with the payment details context
"""
cart = Cart(request.session)
amount = cart.total
amount_in_cents = int(amount) * 100
title = "Total payment expected"
description = "Troupon shopping"
payment_details = {
"title": title,
"key": self.stripe_publishable_api_key,
"amount": amount_in_cents,
"description": description,
"currency": "usd",
}
request.session['payment_details'] = payment_details
context = {
"amount": amount,
"title": title,
"description": description,
"payment_details": payment_details,
}
return render(request, self.template_name, context)
class AddToCartView(LoginRequiredMixin, View):
"""
Add items to cart.
When a logged in person clicks on Add to cart on a deal, this view
adds the item to the cart.
Attributes:
LoginRequiredMixin: Ensures the user is logged in
View: Normal django view
"""
def post(self, request, **kwargs):
"""
Add item to cart.
Args:
request: The incoming post request object
**kwargs: Any keyword arguments passed to the function
Returns:
A redirect to the deals homepage
"""
dealid = request.POST.get('dealid')
deal = Deal.objects.get(id=dealid)
cart = Cart(request.session)
cart.add(deal, price=deal.price)
return redirect('/')
class AddShippingDetails(LoginRequiredMixin, View):
"""
Add shipping details of user.
When a logged in user clicks on proceed to checkout this view
gets the shipping details of the user
Attributes:
LoginRequiredMixin: Ensures the user is logged in
View: Normal django view
"""
def get(self, request):
cart = Cart(request.session)
context = {'cart': cart}
return TemplateResponse(request, 'cart/shipping.html', context)
def post(self, request, **kwargs):
"""
Add shipping details.
Args:
request: The incoming post request object
**kwargs: Any keyword arguments passed to the function
Returns:
A redirect to the checkout page
"""
user = request.user
street = request.POST.get('street')
state = request.POST.get('state')
postal = request.POST.get('postal')
telephone = request.POST.get('telephone')
shipping = UserShippingDetails(user=user, street=street, postal=postal, state=state, telephone=telephone)
shipping.save()
cart = Cart(request.session)
context = {'cart': cart}
return TemplateResponse(request, 'cart/checkout.html', context)
class ViewCartView(LoginRequiredMixin, View):
"""
Allow user to view all the items in the cart.
A logged in user with items in the cart can see a
summary of them and their prices.
Attributes:
LoginRequiredMixin: Ensures the user is logged in
View: Normal django view
"""
def get(self, request, **kwargs):
"""
Show cart items.
Args:
request: The incoming get request object
**kwargs: Any keyword arguments passed to the function
Returns:
A template rendered with all the cart items.
"""
cart = Cart(request.session)
context = {'cart': cart}
return TemplateResponse(request, 'cart/cart.html', context)
class ClearCartView(LoginRequiredMixin, View):
"""
Clear items in cart.
When triggered, removes every item in the cart session
and leaves it empty.
Attributes:
LoginRequiredMixin: Ensures the user is logged in
View: Normal django view
"""
def get(self, request, **kwargs):
"""
Get cart from session and remove everything from it.
Args:
request: The incoming get request object
**kwargs: Any keyword arguments passed to the function
Returns:
A redirect to the deals homepage
"""
cart = Cart(request.session)
cart.clear()
return redirect('/')
class RemoveItemView(LoginRequiredMixin, View):
"""
Remove item from cart.
When triggered, removes a particular item from the cart session
based on its id.
Attributes:
LoginRequiredMixin: Ensures the user is logged in
View: Normal django view
"""
def post(self, request, **kwargs):
"""
Remove item from cart.
Args:
request: The incoming get request object
**kwargs: Any keyword arguments passed to the function
Returns:
A redirect to the deals homepage
"""
dealid = request.POST.get('dealid')
deal = Deal.objects.get(id=dealid)
cart = Cart(request.session)
cart.remove(deal)
return redirect('/')
| andela/troupon | troupon/cart/views.py | Python | mit | 6,304 |
from django.views.generic import ListView
from django.http import HttpResponse
from .models import Job
from geoq.maps.models import FeatureType
from django.shortcuts import get_object_or_404
from datetime import datetime
from pytz import timezone
from webcolors import name_to_hex, normalize_hex
from xml.sax.saxutils import escape as xml_escape
class JobKML(ListView):
model = Job
def get(self, request, *args, **kwargs):
job = get_object_or_404(Job, pk=self.kwargs.get('pk'))
feature_types = FeatureType.objects.all()
aoi_count = job.total_count()
aoi_complete = job.complete_count()
aoi_work = job.in_work_count()
cookie_url_trailer = get_cookie_trailer(request)
description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n'
if aoi_count == 0:
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Document>\n'
output += ' <name>Empty Job</name>\n'
output += ' <description>'+description+'</description>\n'
output += ' </Document>\n'
output += '</kml>\n'
return HttpResponse(output, mimetype="application/vnd.google-earth.kml+xml", status=200)
aoi_comp_pct = (100 * float(aoi_complete)/float(aoi_count))
aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count))
aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count))
doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]'
description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count)
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Document>\n'
output += ' <name>'+doc_name+'</name>\n'
output += ' <description>'+description+'</description>\n'
output += ' <Style id="geoq_inwork">\n'
output += ' <LineStyle>\n'
output += ' <width>4</width>\n'
output += ' <color>7f0186cf</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
output += ' <Style id="geoq_complete">\n'
output += ' <LineStyle>\n'
output += ' <width>3</width>\n'
output += ' <color>7f0101cf</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
output += ' <Style id="geoq_unassigned">\n'
output += ' <LineStyle>\n'
output += ' <width>2</width>\n'
output += ' <color>7f00ff00</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
for feature in feature_types:
output += ' <Style id="geoq_'+str(feature.id)+'">\n'
out_color = '7f0066ff'
if feature.style == None:
output += ' </Style>\n'
continue
if 'color' in feature.style:
color = feature.style['color']
#convert to a kml-recognized color
if color[0:1] == '#' and len(color) == 4:
color = normalize_hex(color)
try:
c = name_to_hex(color)
out_color = '7f' + c[5:7] + c[3:5] + c[1:3]
except Exception:
out_color = '7f0066ff'
output += ' <PolyStyle>\n'
output += ' <color>'+out_color+'</color>\n'
output += ' <colorMode>normal</colorMode>\n'
output += ' <fill>1</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
if 'weight' in feature.style:
output += ' <LineStyle>\n'
output += ' <width>'+str(feature.style['weight'])+'</width>\n'
if 'color' in feature.style:
output += ' <color>'+out_color+'</color>\n'
output += ' </LineStyle>\n'
if 'iconUrl' in feature.style:
icon_url = str(feature.style['iconUrl'])
if not icon_url.startswith("http"):
icon_url = request.build_absolute_uri(icon_url)
else:
icon_url += cookie_url_trailer
output += ' <IconStyle>\n'
output += ' <Icon>\n'
output += ' <href>' + xml_escape(icon_url) + '</href>\n'
output += ' </Icon>\n'
output += ' </IconStyle>\n'
output += ' </Style>\n'
# locations = job.feature_set.all().order_by('template')
locations = job.feature_set.all()\
.extra(tables=['maps_featuretype'])\
.extra(where=['maps_featuretype.id=maps_feature.template_id'])\
.order_by('maps_featuretype.name')
last_template = ""
skip_the_first = True
template_has_started = False
for loc in locations:
template_name = str(loc.template.name)
if template_name != last_template:
if skip_the_first:
skip_the_first = False
else:
output += ' </Folder>\n'
output += ' <Folder><name>'+template_name+'</name>\n'
last_template = template_name
template_has_started = True
analyst_name = str(loc.analyst.username)
dtg = str(loc.created_at)
job_id = str(loc.job.id)
#TODO: Add links to Jobs and Projects
datetime_obj = datetime.strptime(dtg, "%Y-%m-%d %H:%M:%S.%f+00:00")
datetime_obj_utc = datetime_obj.replace(tzinfo=timezone('UTC'))
date_time = datetime_obj_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
date_time_desc = datetime_obj_utc.strftime('%Y-%m-%d %H:%M:%S')
desc = 'Posted by '+analyst_name+' at '+date_time_desc+' Zulu (UTC) in Job #'+job_id
#TODO: Add more details
#TODO: Add links to linked objects
#Simplify polygons to reduce points in complex shapes
if loc.the_geom.num_coords > 0: #skip empty locations
simplegeom = loc.the_geom.simplify(0.0002)
if simplegeom.num_coords > 0:
kml = str(loc.the_geom.simplify(0.0002).kml)
else:
kml = str(loc.the_geom.kml)
if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml:
add_text = '<altitudeMode>clampToGround</altitudeMode>'
kml = kml.replace('<coordinates>', add_text+'<coordinates>')
kml = kml.replace('</outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing>', '')
output += ' <Placemark><name>'+template_name+'</name>\n'
output += ' <TimeStamp><when>'+date_time+'</when></TimeStamp>\n'
output += ' <description>'+desc+'</description>\n'
output += ' <styleUrl>#geoq_'+str(loc.template.id)+'</styleUrl>\n'
output += ' '+str(kml)+'\n'
output += ' </Placemark>\n'
if template_has_started:
output += ' </Folder>\n'
output += ' <Folder><name>Work Cells</name>\n'
aois = job.aois.order_by('status')
for aoi in aois:
style = 'complete'
if aoi.status == 'In work':
style = 'inwork'
if aoi.status == 'Unassigned':
style = 'unassigned'
aoi_name = "#"+str(aoi.id)+", "+str(aoi.status)+" - Priority:"+str(aoi.priority)
kml = str(aoi.polygon.simplify(0.0002).kml)
if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml:
add_text = '<tessellate>1</tessellate><altitudeMode>clampToGround</altitudeMode>'
kml = kml.replace('<coordinates>', add_text+'<coordinates>')
output += ' <Placemark>\n'
output += ' <name>'+aoi_name+'</name>\n'
output += ' <styleUrl>#geoq_'+style+'</styleUrl>\n'
output += ' '+kml+'\n'
output += ' </Placemark>\n'
output += ' </Folder>\n'
output += ' </Document>\n'
output += '</kml>'
return HttpResponse(output, content_type="application/vnd.google-earth.kml+xml", status=200)
def get_cookie_trailer(request):
cookies_to_look_for = ['iPlanetDirectoryPro'] #TODO: Pull this from an admin setting
cookie_url_trailer = ''
for cook in cookies_to_look_for:
cookie = request.COOKIES.get(cook, None)
if cookie:
cookie_url_trailer += cook + "=" + cookie
if cookie_url_trailer:
cookie_url_trailer = "?" + cookie_url_trailer
return cookie_url_trailer
class JobKMLNetworkLink(ListView):
model = Job
def get(self, request, *args, **kwargs):
id = self.kwargs.get('pk')
job = get_object_or_404(Job, pk=id)
setting_zoom_auto = True #TODO: Pull from settings
settings_refresh_every = 90 #TODO: Pull from settings
cookie_url_trailer = get_cookie_trailer(request)
url = request.build_absolute_uri('/geoq/api/job/'+id+'.kml' + cookie_url_trailer)
aoi_count = job.total_count()
aoi_complete = job.complete_count()
aoi_work = job.in_work_count()
aoi_comp_pct = int(100 * float(aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0
aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count)) if aoi_count > 0 else 0
aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0
doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]'
description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n'
description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count)
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Folder>\n'
output += ' <name>GeoQ Worked Cells</name>\n'
output += ' <visibility>1</visibility>\n'
output += ' <open>1</open>\n'
output += ' <description>Work progress from GeoQ</description>\n'
output += ' <NetworkLink id="GeoQ-'+id+'">\n'
output += ' <name>'+doc_name+'</name>\n'
output += ' <visibility>1</visibility>\n'
output += ' <open>1</open>\n'
output += ' <description>'+description+'</description>\n'
output += ' <refreshVisibility>0</refreshVisibility>\n'
if setting_zoom_auto:
output += ' <flyToView>1</flyToView>\n'
output += ' <Link>\n'
output += ' <href>'+url+'</href>\n'
if settings_refresh_every:
output += ' <refreshInterval>'+str(settings_refresh_every)+'</refreshInterval>\n' # Refresh every n seconds
output += ' <refreshMode>onInterval</refreshMode>\n'
output += ' <viewRefreshTime>5</viewRefreshTime>\n' # Also refresh after viewscreen movement
output += ' <viewRefreshMode>onStop</viewRefreshMode>\n'
output += ' </Link>\n'
output += ' </NetworkLink>\n'
output += ' </Folder>\n'
output += '</kml>'
return HttpResponse(output, content_type="application/vnd.google-earth.kml+xml", status=200)
| ngageoint/geoq | geoq/core/kml_view.py | Python | mit | 12,540 |
try:
set
except NameError:
from sets import Set as set
from django.core.paginator import Paginator, Page, InvalidPage
from django.db.models import F
from django.http import Http404
from coffin import template
from jinja2 import nodes
from jinja2.ext import Extension
from jinja2.exceptions import TemplateSyntaxError
from tcc import settings
register = template.Library()
# Most of the code below is borrowed from the django_pagination module by James Tauber and Pinax Team,
# http://pinaxproject.com/docs/dev/external/pagination/index.html
class ParentCommentPaginator(Paginator):
def page(self, number):
"Returns a Page object for the given 1-based page number."
number = self.validate_number(number)
if self.count == 0:
return Page(self.object_list, number, self)
bottom = (number - 1) * self.per_page
bottomdate = self.parentcomments[bottom].sortdate
top = bottom + self.per_page
if top + self.orphans >= self.count:
object_list = self.object_list.filter(sortdate__lte=bottomdate)
else:
topdate = self.parentcomments[bottom+self.per_page-1].sortdate
object_list = self.object_list.filter(
sortdate__range=(topdate, bottomdate))
return Page(object_list, number, self)
def _get_count(self):
"Returns the total number of objects, across all pages."
if self._count is None:
try:
self.parentcomments = self.object_list.filter(
parent__isnull=True)
self._count = self.parentcomments.count()
except (AttributeError, TypeError):
# AttributeError if object_list has no count() method.
# TypeError if object_list.count() requires arguments
# (i.e. is of type list).
self._count = len(self.object_list)
return self._count
count = property(_get_count)
class AutopaginateExtension(Extension):
"""
Applies pagination to the given dataset (and saves truncated
dataset to the context variable), sets context variable with
data enough to build html for paginator
General syntax:
{% autopaginate dataset [as ctx_variable] %}
if "as" part is omitted, trying to save truncated dataset back
to the original context variable. Pagination data is saved to
the NAME_pages context variable, where NAME is original name
of the dataset or ctx_variable
"""
tags = set(['autopaginate'])
default_kwargs = {
'per_page': settings.PER_PAGE,
'orphans': settings.PAGE_ORPHANS,
'window': settings.PAGE_WINDOW,
'hashtag': '',
'prefix': '',
}
def parse(self, parser):
lineno = parser.stream.next().lineno
object_list = parser.parse_expression()
if parser.stream.skip_if('name:as'):
name = parser.stream.expect('name').value
elif hasattr(object_list, 'name'):
name = object_list.name
else:
raise TemplateSyntaxError(
"Cannot determine the name of objects " \
"you want to paginate, use 'as foobar' syntax", lineno)
kwargs = [] # wait... what?
loops = 0
while parser.stream.current.type != 'block_end':
lineno = parser.stream.current.lineno
if loops:
parser.stream.expect('comma')
key = parser.parse_assign_target().name
if key not in self.default_kwargs.keys():
raise TemplateSyntaxError(
"Unknown keyword argument for autopaginate. "\
"Your options are: %s" % (
", ".join(self.default_kwargs.keys())
))
parser.stream.expect('assign')
value = parser.parse_expression()
kwargs.append(nodes.Keyword(key, value))
loops += 1
return [
nodes.Assign(nodes.Name(name + '_pages', 'store'),
self.call_method('_render_pages',
[object_list, nodes.Name('request', 'load')],
kwargs)
).set_lineno(lineno),
nodes.Assign(nodes.Name(name, 'store'),
nodes.Getattr(nodes.Name(name + '_pages', 'load'),
'object_list',
nodes.Impossible())
).set_lineno(lineno),
]
def _render_pages(self, objs, request, **kwargs):
mykwargs = self.default_kwargs.copy()
mykwargs.update(kwargs)
prefix = mykwargs.pop('prefix')
window = mykwargs.pop('window')
hashtag = mykwargs.pop('hashtag')
try:
paginator = ParentCommentPaginator(objs, **mykwargs)
key = 'page'
if prefix:
key = prefix + key
try:
try:
pageno = int(request.GET[key])
except (KeyError, ValueError, TypeError):
pageno = 1
page_obj = paginator.page(pageno)
except InvalidPage:
raise Http404('Invalid page requested. If DEBUG were set to ' +
'False, an HTTP 404 page would have been shown instead.')
page_range = paginator.page_range
# Calculate the record range in the current page for display.
records = {'first': 1 + (page_obj.number - 1) * paginator.per_page}
records['last'] = records['first'] + paginator.per_page - 1
if records['last'] + paginator.orphans >= paginator.count:
records['last'] = paginator.count
# First and last are simply the first *n* pages and the last *n* pages,
# where *n* is the current window size.
first = set(page_range[:window])
last = set(page_range[-window:])
# Now we look around our current page, making sure that we don't wrap
# around.
current_start = page_obj.number-1-window
if current_start < 0:
current_start = 0
current_end = page_obj.number-1+window
if current_end < 0:
current_end = 0
current = set(page_range[current_start:current_end])
pages = []
# If there's no overlap between the first set of pages and the current
# set of pages, then there's a possible need for elusion.
if len(first.intersection(current)) == 0:
first_list = list(first)
first_list.sort()
second_list = list(current)
second_list.sort()
pages.extend(first_list)
diff = second_list[0] - first_list[-1]
# If there is a gap of two, between the last page of the first
# set and the first page of the current set, then we're missing a
# page.
if diff == 2:
pages.append(second_list[0] - 1)
# If the difference is just one, then there's nothing to be done,
# as the pages need no elusion and are correct.
elif diff == 1:
pass
# Otherwise, there's a bigger gap which needs to be signaled for
# elusion, by pushing a None value to the page list.
else:
pages.append(None)
pages.extend(second_list)
else:
unioned = list(first.union(current))
unioned.sort()
pages.extend(unioned)
# If there's no overlap between the current set of pages and the last
# set of pages, then there's a possible need for elusion.
if len(current.intersection(last)) == 0:
second_list = list(last)
second_list.sort()
diff = second_list[0] - pages[-1]
# If there is a gap of two, between the last page of the current
# set and the first page of the last set, then we're missing a
# page.
if diff == 2:
pages.append(second_list[0] - 1)
# If the difference is just one, then there's nothing to be done,
# as the pages need no elusion and are correct.
elif diff == 1:
pass
# Otherwise, there's a bigger gap which needs to be signaled for
# elusion, by pushing a None value to the page list.
else:
pages.append(None)
pages.extend(second_list)
else:
differenced = list(last.difference(current))
differenced.sort()
pages.extend(differenced)
to_return = {
'pages': pages,
'records': records,
'page_obj': page_obj,
'prefix': prefix,
'object_list': page_obj.object_list,
'paginator': paginator,
'hashtag': hashtag,
'is_paginated': paginator.count > (paginator.per_page + \
paginator.orphans),
}
getvars = request.GET.copy()
if key in getvars:
del getvars[key]
if len(getvars.keys()) > 0:
to_return['getvars'] = "&%s" % getvars.urlencode()
else:
to_return['getvars'] = ''
return to_return
except KeyError, AttributeError:
return {}
register.tag(AutopaginateExtension)
| pterk/django-tcc | tcc/templatetags/autopaginator.py | Python | mit | 9,909 |
"""
General descrition of your module here.
"""
from functools import partial
from maya import OpenMaya
from maya import OpenMayaUI
from maya import cmds
from PySide import QtCore
from PySide import QtGui
from shiboken import wrapInstance
from shiboken import getCppPointer
class RbfSettings(object):
"""
Class for storing rbf network creation options.
"""
def __init__(self):
self.connectMatrix = False
self.connectRgbValues = False
self.connectAlphaValues = False
self.useAttributeAlias = False
self.visualizeFalloff = False
class RbfManager(object):
"""
Pose driver mixing contribution of various elements in n spaces.
"""
def __init__(self):
self.pluginState = self.initPlugins()
def createNetwork(self, inputRbfSettings):
if self.pluginState is False:
return
def vizualizeSigma(self):
pass
def createSigmaShader(self):
pass
def initPlugins(self):
try:
#you dont seem to use the class elements nor Api related encapsulation
#of pymel so basically you can stick to maya python commands?
cmds.loadPlugin('jsRadial.mll')
except:
cmds.error('ERROR: jsRadial.mll not loaded.')
class RbfOptionsWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfOptionsWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.connectMatrixCheckBox = QtGui.QCheckBox('Connect Matrix')
self.connectRgbCheckBox = QtGui.QCheckBox('Connect RGB Values from Material')
self.connectAlphaCheckBox = QtGui.QCheckBox('Connect Alpha Values from Material')
self.useAliasCheckBox = QtGui.QCheckBox('Use Aliases for Targets on RBF Node')
sphereLabel = 'Create Spheres to Visualize Falloff (most accurate for Gaussian)'
self.createSphereCheckBox = QtGui.QCheckBox(sphereLabel)
#Create layout
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
self.mainLayout.setContentsMargins(5, 5, 5, 5)
for widget in [self.connectMatrixCheckBox,
self.connectRgbCheckBox,
self.connectAlphaCheckBox,
self.useAliasCheckBox,
self.createSphereCheckBox]:
#Set properties
widget.setChecked(True)
#Assign widget to layouts
self.mainLayout.addWidget(widget)
#set the main layout for this UI part
self.setLayout(self.mainLayout)
class RbfListWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfListWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.poseListWidget = QtGui.QListView()
self.targetListWidget = QtGui.QListView()
#Create layout
self.poselistLayout = QtGui.QVBoxLayout()
#Set properties
self.poseListWidget.setMaximumHeight(20)
self.poseListWidget.setMinimumWidth(190)
self.targetListWidget.setMinimumHeight(260)
self.poselistLayout.setContentsMargins(0, 0, 0, 0)
self.poselistLayout.setSpacing(14)
#Assign widget to layouts
self.poselistLayout.addWidget(self.poseListWidget)
self.poselistLayout.addWidget(self.targetListWidget)
#set the main layout for this UI part
self.setLayout(self.poselistLayout)
class RbfDataIoWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfDataIoWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.anchorWidget = QtGui.QWidget()
self.addPoseButton = QtGui.QPushButton('Add Pose')
self.removePoseButton = QtGui.QPushButton('Remove Pose')
self.addTargetButton= QtGui.QPushButton('Add Target')
self.removeTargetButton = QtGui.QPushButton('Remove Target')
#Create layout
self.ioLayout = QtGui.QGridLayout()
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
ioWidth = 78
self.ioLayout.setContentsMargins(0, 0, 0, 0)
self.ioLayout.setColumnMinimumWidth(0, ioWidth)
self.ioLayout.setColumnMinimumWidth(1, ioWidth)
self.ioLayout.setSpacing(10)
self.mainLayout.setContentsMargins(0, 0, 0, 0)
#Assign widget to layouts
self.ioLayout.addWidget(self.removePoseButton, 0 , 0)
self.ioLayout.addWidget(self.addPoseButton, 0 , 1)
self.ioLayout.addWidget(self.removeTargetButton, 1 , 0)
self.ioLayout.addWidget(self.addTargetButton, 1 , 1)
self.mainLayout.addWidget(self.anchorWidget)
self.mainLayout.addStretch()
#set the main layout for this UI part
self.anchorWidget.setLayout(self.ioLayout)
self.setLayout(self.mainLayout)
#Connect signals
self.addPoseButton.clicked.connect(self._addPose)
self.removePoseButton.clicked.connect(self._removePose)
self.addTargetButton.clicked.connect(self._addTargets)
self.removeTargetButton.clicked.connect(self._removeTargets)
def _addPose(self):
pass
def _addTargets(self):
pass
def _removeTargets(self):
pass
def _removePose(self):
pass
class RbfHeaderWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfHeaderWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.headerLabel = QtGui.QLabel('RBF Network Builder')
self.creditLabel = QtGui.QLabel('by James Sumner III')
self.websiteLabel = QtGui.QLabel('www.jamessumneriii.com')
#Create layout
self.headerLayout = QtGui.QVBoxLayout()
#Set properties
self.headerLabel.setStyleSheet('font-size: 16pt' )
self.creditLabel.setStyleSheet('color: rgb(140,140,140)')
self.websiteLabel.setStyleSheet('color: rgb(140,140,140); link-decoration: none;')
#Assign widget to layouts
self.headerLayout.addWidget(self.headerLabel)
self.headerLayout.addWidget(self.creditLabel)
self.headerLayout.addWidget(self.websiteLabel)
#set the main layout for this UI part
self.setLayout(self.headerLayout)
class RbfManagerTool(QtGui.QDialog):
"""
General UI used to create and maintain pose drivers.
"""
def __init__(self, parent=None):
super(RbfManagerTool, self).__init__(parent=parent)
#Parent widget under Maya main window
self.setParent(parent)
self.setWindowFlags(QtCore.Qt.Window)
self.toolName = 'RBF Tool'
self.pose = []
self.targets = []
self.setupUI()
def setupUI(self):
#cmds.undoInfo(openChunk=True) will bundle a list of commands
#which will modify the Dag or the dg hence the separation in the
#API into 2 classes MDAGModifier / MDGModifier.
#not sure about its usefulness for UI?
#create widget
self.tabWidget = QtGui.QTabWidget()
self.headerWidget = RbfHeaderWidget()
self.createTab = self._buildCreateTab()
#Create layout
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
self.setWindowTitle(self.toolName)
self.mainLayout.setContentsMargins(10, 10, 10, 10)
#Assign widget to layouts
self.tabWidget.addTab(self.createTab, 'Create')
#self.tabWidget.addTab(self.editTab, 'Edit')
self.mainLayout.addWidget(self.headerWidget)
self.mainLayout.addWidget(self.tabWidget)
self.setLayout(self.mainLayout)
def _buildCreateTab(self):
#create widget
self.createTabWidget = QtGui.QWidget()
self.createTabAnchor = QtGui.QWidget()
self.ioWidget = RbfDataIoWidget()
self.poseListWidget = RbfListWidget()
self.optionsWidget = RbfOptionsWidget()
#Create layout
self.createTabLayout = QtGui.QHBoxLayout()
self.createTabOptionLayout = QtGui.QVBoxLayout()
#Set properties
self.createTabLayout.setContentsMargins(5, 5, 5, 5)
self.createTabOptionLayout.setContentsMargins(0, 0, 0, 0)
#Assign widget to layouts
self.createTabOptionLayout.addWidget(self.createTabAnchor)
self.createTabOptionLayout.addWidget(self.optionsWidget)
self.createTabLayout.addWidget(self.ioWidget)
self.createTabLayout.addWidget(self.poseListWidget)
self.createTabWidget.setLayout(self.createTabOptionLayout)
self.createTabAnchor.setLayout(self.createTabLayout)
return self.createTabWidget
def DeleteWindowInstances(mayaMainWindow):
"""
Close tool by type.
"""
checkWidget = RbfManagerTool()
#Check if window exists
for child in mayaMainWindow.children():
if not isinstance(child, QtGui.QWidget):
continue
#delete previous UI instance (isinstance was giving weird result)
if child.__class__.__name__ == checkWidget.__class__.__name__:
child.deleteLater()
child.parent = None
checkWidget = None
def Run():
mayaMainWindowPtr = OpenMayaUI.MQtUtil.mainWindow()
mayaMainWindow = wrapInstance(long(mayaMainWindowPtr), QtGui.QWidget)
DeleteWindowInstances(mayaMainWindow)
tool = RbfManagerTool(parent=mayaMainWindow)
tool.show()
return tool
| cedricB/circeCharacterWorksTools | rbfTool.py | Python | mit | 9,899 |
# nvprof --print-gpu-trace python examples/stream/thrust.py
import cupy
x = cupy.array([1, 3, 2])
expected = x.sort()
cupy.cuda.Device().synchronize()
stream = cupy.cuda.stream.Stream()
with stream:
y = x.sort()
stream.synchronize()
cupy.testing.assert_array_equal(y, expected)
stream = cupy.cuda.stream.Stream()
stream.use()
y = x.sort()
stream.synchronize()
cupy.testing.assert_array_equal(y, expected)
| cupy/cupy | examples/stream/thrust.py | Python | mit | 412 |
# -*- coding: utf-8 -*-
__author__ = 'glow'
import requests
import json
server = "http://fhir.careevolution.com/apitest/fhir"
client = requests.Session()
response = client.get(server + "/Patient?_id=23&_format=application/json+fhir")
print(response)
print(response.json())
| glow-mdsol/fhirton | servers.py | Python | mit | 278 |
from __future__ import with_statement
import sys
from xml.sax import saxutils
from keyword import kwlist as PYTHON_KWORD_LIST
is_py2 = sys.version[0] == '2'
if is_py2:
from StringIO import StringIO
else:
from io import StringIO
__all__ = ['Builder', 'Element']
__license__ = 'BSD'
__version__ = '0.2.1'
__author__ = "Jonas Galvez <http://jonasgalvez.com.br/>"
__contributors__ = ["bbolli <http://github.com/bbolli/>",
"masklinn <http://github.com/masklinn/>"]
class Builder:
def __init__(self, encoding='utf-8', indent=' '*2, version=None):
self._document = StringIO()
self._encoding = encoding
self._indent = indent
self._indentation = 0
if version is not None:
self.write('<?xml version="%s" encoding="%s"?>\n' % (
version, encoding
))
def __getattr__(self, name):
return Element(name, self)
def __getitem__(self, name):
return Element(name, self)
def __str__(self):
if is_py2:
return self._document.getvalue().encode(self._encoding).strip()
else:
return self._document.getvalue()
def __unicode__(self):
if is_py2:
return self._document.getvalue().decode(self._encoding).strip()
else:
return self._document.getvalue()
def write(self, content):
"""Write raw content to the document"""
if is_py2 and type(content) is not unicode:
content = content.decode(self._encoding)
self._document.write('%s' % content)
def write_escaped(self, content):
"""Write escaped content to the document"""
self.write(saxutils.escape(content))
def write_indented(self, content):
"""Write indented content to the document"""
self.write('%s%s\n' % (self._indent * self._indentation, content))
builder = Builder # 0.1 backward compatibility
class Element:
PYTHON_KWORD_MAP = dict([(k + '_', k) for k in PYTHON_KWORD_LIST])
def __init__(self, name, builder):
self.name = self._nameprep(name)
self.builder = builder
self.attributes = {}
def __enter__(self):
"""Add a parent element to the document"""
self.builder.write_indented('<%s%s>' % (
self.name, self._serialized_attrs()
))
self.builder._indentation += 1
return self
def __exit__(self, type, value, tb):
"""Add close tag to current parent element"""
self.builder._indentation -= 1
self.builder.write_indented('</%s>' % self.name)
def __call__(*args, **kargs):
"""Add a child element to the document"""
self = args[0]
self.attributes.update(kargs)
if len(args) > 1:
value = args[1]
if value is None:
self.builder.write_indented('<%s%s />' % (
self.name, self._serialized_attrs()
))
else:
value = saxutils.escape(value)
self.builder.write_indented('<%s%s>%s</%s>' % (
self.name, self._serialized_attrs(), value, self.name
))
return self
def _serialized_attrs(self):
"""Serialize attributes for element insertion"""
serialized = []
for attr, value in self.attributes.items():
serialized.append(' %s=%s' % (
self._nameprep(attr), saxutils.quoteattr(value)
))
return ''.join(serialized)
def _nameprep(self, name):
"""Undo keyword and colon mangling"""
name = Element.PYTHON_KWORD_MAP.get(name, name)
return name.replace('__', ':')
| PeteCrighton/Praesence | praesence/xmlwitch.py | Python | mit | 3,819 |
# This script demonstrates that font effects specified in your pdftex.map
# are now supported in pdf usetex.
import matplotlib
import matplotlib.pyplot as plt
matplotlib.rc('text', usetex=True)
def setfont(font):
return r'\font\a %s at 14pt\a ' % font
for y, font, text in zip(range(5),
['ptmr8r', 'ptmri8r', 'ptmro8r', 'ptmr8rn', 'ptmrr8re'],
['Nimbus Roman No9 L ' + x for x in
['', 'Italics (real italics for comparison)',
'(slanted)', '(condensed)', '(extended)']]):
plt.text(0, y, setfont(font) + text)
plt.ylim(-1, 5)
plt.xlim(-0.2, 0.6)
plt.setp(plt.gca(), frame_on=False, xticks=(), yticks=())
plt.title('Usetex font effects')
plt.savefig('usetex_fonteffects.pdf')
| bundgus/python-playground | matplotlib-playground/examples/pylab_examples/usetex_fonteffects.py | Python | mit | 789 |
from typing import Optional
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.bend_euler import bend_euler
from gdsfactory.components.coupler90 import coupler90 as coupler90function
from gdsfactory.components.coupler_straight import (
coupler_straight as coupler_straight_function,
)
from gdsfactory.cross_section import strip
from gdsfactory.snap import assert_on_2nm_grid
from gdsfactory.types import ComponentFactory, CrossSectionFactory
@gf.cell
def coupler_ring(
gap: float = 0.2,
radius: float = 5.0,
length_x: float = 4.0,
coupler90: ComponentFactory = coupler90function,
bend: Optional[ComponentFactory] = None,
coupler_straight: ComponentFactory = coupler_straight_function,
cross_section: CrossSectionFactory = strip,
bend_cross_section: Optional[CrossSectionFactory] = None,
**kwargs
) -> Component:
r"""Coupler for ring.
Args:
gap: spacing between parallel coupled straight waveguides.
radius: of the bends.
length_x: length of the parallel coupled straight waveguides.
coupler90: straight coupled to a 90deg bend.
bend: factory for bend
coupler_straight: two parallel coupled straight waveguides.
cross_section:
kwargs: cross_section settings
.. code::
2 3
| |
\ /
\ /
---=========---
1 length_x 4
"""
bend = bend or bend_euler
c = Component()
assert_on_2nm_grid(gap)
# define subcells
coupler90_component = (
coupler90(
gap=gap,
radius=radius,
bend=bend,
cross_section=cross_section,
bend_cross_section=bend_cross_section,
**kwargs
)
if callable(coupler90)
else coupler90
)
coupler_straight_component = (
coupler_straight(
gap=gap, length=length_x, cross_section=cross_section, **kwargs
)
if callable(coupler_straight)
else coupler_straight
)
# add references to subcells
cbl = c << coupler90_component
cbr = c << coupler90_component
cs = c << coupler_straight_component
# connect references
y = coupler90_component.y
cs.connect(port="o4", destination=cbr.ports["o1"])
cbl.reflect(p1=(0, y), p2=(1, y))
cbl.connect(port="o2", destination=cs.ports["o2"])
c.absorb(cbl)
c.absorb(cbr)
c.absorb(cs)
c.add_port("o1", port=cbl.ports["o3"])
c.add_port("o2", port=cbl.ports["o4"])
c.add_port("o3", port=cbr.ports["o3"])
c.add_port("o4", port=cbr.ports["o4"])
c.auto_rename_ports()
return c
if __name__ == "__main__":
c = coupler_ring(width=1, layer=(2, 0))
c.show(show_subports=True)
| gdsfactory/gdsfactory | gdsfactory/components/coupler_ring.py | Python | mit | 2,846 |
import logging
import os
import sys
import tkinter
from tkinter import ttk
sys.path.append('../..')
import cv2
from src.image.imnp import ImageNP
from src.support.tkconvert import TkConverter
from src.view.template import TkViewer
from src.view.tkfonts import TkFonts
from src.view.tkframe import TkFrame, TkLabelFrame
from src.view.ttkstyle import TTKStyle, init_css
LOGGER = logging.getLogger(__name__)
THRESHOLD_OPTION = [(u'手動', 'manual'), ('Mean Adaptive', 'mean'), ('Gaussian Adaptive', 'gaussian')]
class GraphCutViewer(TkViewer):
def __init__(self):
super().__init__()
self._im_w, self._im_h = 800, 533
self._init_window(zoom=False)
self._init_style()
self._init_frame()
self._init_menu()
def _init_style(self):
init_css()
theme = 'default'
if os.name == 'posix':
theme = 'alt'
TTKStyle('H4Padding.TLabelframe', theme=theme, background='gray82')
TTKStyle('H4Padding.TLabelframe.Label', theme=theme, font=('', 16), background='gray82')
TTKStyle('H2BlackBold.TLabel', theme=theme, font=('', 24, 'bold'), background='white', foreground='black')
TTKStyle('H2RedBold.TLabel', theme=theme, font=('', 24, 'bold'), background='white', foreground='red')
self.font = TkFonts()
# init frame
def _init_frame(self):
# root
self.frame_root = TkFrame(self.root, bg='white')
self.frame_root.grid(row=0, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_root, 0, 1, 2)
self.set_all_grid_columnconfigure(self.frame_root, 0)
# head
self.frame_head = TkFrame(self.frame_root, bg='white')
self.frame_head.grid(row=0, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_head, 0)
self.set_all_grid_columnconfigure(self.frame_head, 0)
# body
self.frame_body = TkFrame(self.frame_root, bg='black')
self.frame_body.grid(row=1, column=0, sticky='news')
self.set_all_grid_columnconfigure(self.frame_body, 0, 1)
self.set_all_grid_rowconfigure(self.frame_body, 0)
# body > panel
self.frame_panel = TkFrame(self.frame_body, bg='light pink')
self.frame_panel.grid(row=0, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_panel, 0)
self.set_all_grid_columnconfigure(self.frame_panel, 0)
# body > display
self.frame_display = TkFrame(self.frame_body, bg='royal blue')
self.frame_display.grid(row=0, column=1, sticky='news')
self.set_all_grid_rowconfigure(self.frame_display, 0)
self.set_all_grid_columnconfigure(self.frame_display, 0)
# footer
self.frame_footer = TkFrame(self.frame_root, bg='gray82')
self.frame_footer.grid(row=2, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_footer, 0, 1)
self.set_all_grid_columnconfigure(self.frame_footer, 0)
# footer > panel setting
self.frame_panel_setting = ttk.LabelFrame(self.frame_footer, text=u'輸入圖片選項: ', style='H4Padding.TLabelframe')
self.frame_panel_setting.grid(row=0, column=0, sticky='news', pady=10)
self.set_all_grid_rowconfigure(self.frame_panel_setting, 0, 1)
self.set_all_grid_columnconfigure(self.frame_panel_setting, 0)
# footer > panel setting > template option
self.frame_template_options = TkFrame(self.frame_panel_setting, bg='gray82', pady=5)
self.frame_template_options.grid(row=0, column=0, sticky='news')
# footer > panel setting > gamma
self.frame_gamma = TkFrame(self.frame_panel_setting, bg='gray82', pady=5)
self.frame_gamma.grid(row=1, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_gamma, 0)
self.set_all_grid_columnconfigure(self.frame_gamma, 0)
# footer > display setting
self.frame_display_setting = ttk.LabelFrame(self.frame_footer, text=u'輸出圖片選項: ', style='H4Padding.TLabelframe')
self.frame_display_setting.grid(row=1, column=0, sticky='news', pady=10)
self.set_all_grid_rowconfigure(self.frame_display_setting, 0)
self.set_all_grid_columnconfigure(self.frame_display_setting, 0)
# footer > display setting > threshold options
self.frame_threshold_options = TkFrame(self.frame_display_setting, bg='gray82', pady=5)
self.frame_threshold_options.grid(row=0, column=0, sticky='news')
# footer > display setting > manual threshold
self.frame_manual_threshold = TkFrame(self.frame_display_setting, bg='gray82', pady=5)
self.frame_manual_threshold.grid(row=1, column=0, sticky='news')
self.set_all_grid_rowconfigure(self.frame_manual_threshold, 0)
self.set_all_grid_columnconfigure(self.frame_manual_threshold, 0)
self._init_widget_head()
self._init_widget_body()
self._init_widget_footer()
# init head widget
def _init_widget_head(self):
self.set_all_grid_rowconfigure(self.frame_head, 0, 1)
self.label_state = ttk.Label(self.frame_head, text=u'現在模式: N/A', style='H2.TLabel')
self.label_state.grid(row=0, column=0, sticky='w')
self.label_resize = ttk.Label(self.frame_head, text=u'原有尺寸 N/A-> 顯示尺寸 N/A', style='H2.TLabel')
self.label_resize.grid(row=1, column=0, sticky='w')
# init body widget
def _init_widget_body(self):
# panel
self.set_all_grid_rowconfigure(self.frame_panel, 0, 1)
self.label_panel = ttk.Label(self.frame_panel, text='Input Panel', style='H2.TLabel')
self.label_panel.grid(row=0, column=0, sticky='ns')
self.photo_panel = ImageNP.generate_checkboard((self._im_h, self._im_w), block_size=10)
self.photo_panel = TkConverter.ndarray_to_photo(self.photo_panel)
self.label_panel_image = ttk.Label(self.frame_panel, image=self.photo_panel)
self.label_panel_image.grid(row=1, column=0, sticky='ns')
# display
self.label_display = ttk.Label(self.frame_display, text='Display', style='H2.TLabel')
self.label_display.grid(row=0, column=0, columnspan=3)
self.set_all_grid_rowconfigure(self.frame_display, 0, 1, 2)
self.set_all_grid_columnconfigure(self.frame_display, 0, 1, 2)
self.photo_small = ImageNP.generate_checkboard((self._im_h//2, self._im_w//3), 10)
self.photo_small = TkConverter.ndarray_to_photo(self.photo_small)
self.photo_large = ImageNP.generate_checkboard((self._im_h, self._im_w//3), 10)
self.photo_large = TkConverter.ndarray_to_photo(self.photo_large)
self.label_fl_image = ttk.Label(self.frame_display, image=self.photo_small)
self.label_fl_image.grid(row=1, column=0)
self.label_fr_image = ttk.Label(self.frame_display, image=self.photo_small)
self.label_fr_image.grid(row=1, column=1)
self.label_bl_image = ttk.Label(self.frame_display, image=self.photo_small)
self.label_bl_image.grid(row=2, column=0)
self.label_br_image = ttk.Label(self.frame_display, image=self.photo_small)
self.label_br_image.grid(row=2, column=1)
self.label_body_image = ttk.Label(self.frame_display, image=self.photo_large)
self.label_body_image.grid(row=1, column=2, rowspan=2)
# init footer widget
def _init_widget_footer(self):
# input panel template option
self.label_template = ttk.Label(self.frame_template_options, text=u'過濾方式: ', style='H5.TLabel')
self.label_template.grid(row=0, column=0, sticky='w')
self.val_checkbtn_floodfill = tkinter.StringVar()
self.checkbtn_floodfill = ttk.Checkbutton(
self.frame_template_options,
text=u'floodfill',
variable=self.val_checkbtn_floodfill,
onvalue='on', offvalue='off',
style='H5.TCheckbutton'
)
self.checkbtn_floodfill.grid(row=0, column=1, sticky='w')
# input panel gamma
self.label_gamma = ttk.Label(self.frame_gamma, text=u'調整對比 ({:.2f}): '.format(1.), style='H5.TLabel')
self.label_gamma.grid(row=0, column=0, sticky='w')
self.val_scale_gamma = tkinter.DoubleVar()
self.val_scale_gamma.set(1.0)
self.scale_gamma = ttk.Scale(self.frame_gamma,
orient=tkinter.HORIZONTAL,
length=self._im_w*2,
from_=0, to=2.5,
variable=self.val_scale_gamma,
style='Gray.Horizontal.TScale')
self.scale_gamma.state(('active', '!disabled'))
self.scale_gamma.grid(row=0, column=1, sticky='w')
# display threshold option
self.label_threshold_options = ttk.Label(self.frame_threshold_options, text=u'門檻值選項: ', style='H5.TLabel')
# self.label_threshold_options.grid(row=0, column=0, sticky='w')
self.val_threshold_option = tkinter.StringVar()
self.val_threshold_option.set(THRESHOLD_OPTION[0][-1])
self.radiobtn_threshold_options = []
for i, op in enumerate(THRESHOLD_OPTION):
text, val = op
radiobtn = ttk.Radiobutton(self.frame_threshold_options,
text=text,
variable=self.val_threshold_option,
value=val,
style='H5.TRadiobutton')
# radiobtn.grid(row=0, column=i+1, sticky='w', padx=10)
self.radiobtn_threshold_options.append(radiobtn)
# display threshold manual scale
self.label_manual_threshold = ttk.Label(self.frame_manual_threshold, text=u'門檻值 ({:.2f}): '.format(250), style='H5.TLabel')
self.label_manual_threshold.grid(row=0, column=0, sticky='w')
self.val_manual_threshold = tkinter.DoubleVar()
self.val_manual_threshold.set(250)
self.scale_manual_threshold = ttk.Scale(self.frame_manual_threshold,
orient=tkinter.HORIZONTAL,
length=self._im_w*2,
from_=1, to=254,
variable=self.val_manual_threshold,
style='Gray.Horizontal.TScale')
self.scale_manual_threshold.state(('active', '!disabled'))
self.scale_manual_threshold.grid(row=0, column=1, sticky='news', columnspan=len(THRESHOLD_OPTION))
# init menu bar
def _init_menu(self):
# root
self.menu_root = tkinter.Menu(self.root)
self.root.config(menu=self.menu_root)
# load image
self.menu_load_img = tkinter.Menu(self.menu_root)
# show menu
self.menu_root.add_cascade(label=u'File', menu=self.menu_load_img)
if __name__ == '__main__':
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(filename)12s:L%(lineno)3s [%(levelname)8s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
stream=sys.stdout
)
graphcut_viewer = GraphCutViewer()
graphcut_viewer.mainloop()
| afunTW/moth-graphcut | src/view/graphcut_app.py | Python | mit | 11,346 |
from utils.api_factory import ApiFactory
api = ApiFactory.get_instance()
if __name__ == '__main__':
api.run(host='0.0.0.0', port=8000)
| istinspring/imscrape-template | api.py | Python | mit | 143 |
import json, io, re, requests
from bs4 import BeautifulSoup
from datetime import datetime
def get_datasets(url):
r = requests.get(url.format(0))
soup = BeautifulSoup(r.text)
href = soup.select('#block-system-main a')[-1]['href']
last_page = int(re.match(r'.*page=(.*)', href).group(1))
for page in range(last_page + 1):
print( '[DEBUG] page:', page )
r = requests.get(url.format(page))
soup = BeautifulSoup(r.text)
for link in soup.select('h2 a'):
yield (link['href'], link.text)
def get_metadata(url):
r = requests.get(url)
soup = BeautifulSoup(r.text)
metadata = dict()
metadata['_url'] = url.format(d)
metadata['_collection_date'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
for elem in soup.select('.datasetview_container .datasetview_row'):
for field in elem.select('.field'):
label = field.select('.field-label')[0].text[:-2]
item_list = list()
item = field.select('.field-item')
if label == 'Website':
metadata[label] = item[0].select('a')[0]['href']
elif len(item) == 0:
items = elem.select('.tag_list a')
for i in items:
item_list.append(i.text.strip())
metadata[label] = item_list
else:
metadata[label] = item[0].text.strip()
tags = set()
for elem in soup.select('.tag_list a'):
tags.add(elem.text.strip())
metadata['tags'] = list(tags)
return metadata
if __name__ == '__main__':
base_url = 'http://daten.berlin.de{}'
datasets_url = 'http://daten.berlin.de/datensaetze?page={}'
documents_url = 'http://daten.berlin.de/dokumente?page={}'
all_labels = set()
all_metadata = list()
done_datasets = set()
# iterate over all dataset urls
for d, t in get_datasets(datasets_url):
if d in done_datasets:
print('skip', d)
continue # skip datasets
m = get_metadata(base_url.format(d))
m['_type'] = 'dataset'
m['_title'] = t
all_metadata.append(m)
for k in m.keys(): all_labels.add(k)
print(json.dumps(m, sort_keys=1, ensure_ascii=False))
done_datasets.add(d)
# iterate over all document urls
for d, t in get_datasets(documents_url):
if d in done_datasets:
print('skip', d)
continue # skip datasets
m = get_metadata(base_url.format(d))
m['_type'] = 'document'
m['_title'] = t
all_metadata.append(m)
for k in m.keys(): all_labels.add(k)
print(json.dumps(m, sort_keys=1, ensure_ascii=False))
done_datasets.add(d)
# write json file
with io.open('daten-berlin_metadata.json', 'w', encoding='utf8') as json_file:
json_file.write((json.dumps(all_metadata, indent=2, sort_keys=True, ensure_ascii=False)))
# write csv
with open('daten-berlin_metadata.csv', 'wb') as csv_file:
for l in sorted(all_labels):
csv_file.write((l + ';').encode('utf8'))
csv_file.write('\n'.encode('utf8'))
for m in all_metadata:
for l in sorted(all_labels):
if l in m:
csv_file.write(str(m[l]).encode('utf8'))
csv_file.write(';'.encode('utf8'))
csv_file.write('\n'.encode('utf8')) | nbi-opendata/metadaten-scraper | get-metadata.py | Python | mit | 3,412 |
# models.py
import os
import socket
import datetime
import random
import re
from django import forms
from django import urls
from django.db import models
from django.db.models.signals import pre_save
from .unique_slugify import unique_slugify
from .titlecase import titlecase
from functools import reduce
def time2s(time):
""" given 's.s' or 'h:m:s.s' returns s.s """
if time:
sec = reduce(lambda x, i: x*60 + i,
list(map(float, time.split(':'))))
else:
sec = 0.0
return sec
class Client(models.Model):
sequence = models.IntegerField(default=1)
active = models.BooleanField(default=True,
help_text="Turn off to hide from UI.")
name = models.CharField(max_length=135)
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files", )
contacts = models.CharField(max_length=300, blank=True,
help_text='emails of people putting on the event.')
description = models.TextField(blank=True)
tags = models.TextField(null=True,blank=True,)
tweet_prefix = models.CharField(max_length=30, blank=True, null=True)
bucket_id = models.CharField(max_length=30, blank=True, null=True)
category_key = models.CharField(max_length=30, blank=True, null=True,
help_text = "Category for Richard")
# video encoding assets
template_mlt = models.CharField(max_length=60, null=True,
default="template.mlt",
help_text='template to make cutlist mlt from.')
title_svg = models.CharField(max_length=60, null=True,
default="title.svg",
help_text='template for event/title/authors title slide.')
preroll = models.CharField(max_length=335, blank=True,
help_text="name of video to prepend (not implemented)")
postroll = models.CharField(max_length=335, blank=True,
help_text="name of video to postpend (not implemented)")
credits = models.CharField(max_length=30, blank=True,
default="ndv-169.png",
help_text='added to end, store in assets dir')
# remote accounts to post to
host_user = models.CharField(max_length=30, blank=True, null=True,
help_text = "depricated - do not use.")
youtube_id = models.CharField(max_length=10, blank=True, null=True,
help_text = "key to lookup user/pw/etc from pw store" )
archive_id = models.CharField(max_length=10, blank=True, null=True)
vimeo_id = models.CharField(max_length=10, blank=True, null=True)
blip_id = models.CharField(max_length=10, blank=True, null=True)
rax_id = models.CharField(max_length=10, blank=True, null=True)
richard_id = models.CharField(max_length=10, blank=True, null=True)
email_id = models.CharField(max_length=10, blank=True, null=True)
tweet_id = models.CharField(max_length=10, blank=True, null=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return urls.reverse('client', [self.slug,])
class Meta:
ordering = ["sequence"]
class Location(models.Model):
sequence = models.IntegerField(default=1)
active = models.BooleanField( default=True,
help_text="Turn off to hide from UI.")
default = models.BooleanField(default=True,
help_text="Adds this loc to new Clients.")
name = models.CharField(max_length=135,
help_text="room name")
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files")
dirname = models.CharField(max_length=135, blank=True,
help_text="path to raw files. overrieds show/slug.")
channelcopy = models.CharField(max_length=2, blank=True,
help_text='audio adjustment for this room')
hours_offset = models.IntegerField(blank=True, null=True,
help_text='Adjust for bad clock setting')
description = models.TextField(blank=True)
lon = models.FloatField(null=True, blank=True )
lat = models.FloatField(null=True, blank=True )
def natural_key(self):
return self.name
def __str__(self):
return "%s" % ( self.name )
class Meta:
ordering = ["name"]
ANN_STATES=((1,'preview'),(2,'review'),(3,'approved'))
class Show(models.Model):
client = models.ForeignKey(Client)
locations = models.ManyToManyField(Location,
limit_choices_to={'active': True},
blank=True)
sequence = models.IntegerField(default=1)
active = models.BooleanField( default=True,
help_text="Turn off to hide from UI.")
name = models.CharField(max_length=135)
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="dir name to store input files")
category_key = models.CharField(max_length=30, blank=True, null=True,
help_text = "Category for Richard")
youtube_playlist_id = models.CharField(max_length=50, blank=True, null=True,
help_text = "Playlist ID for YouTube")
tags = models.TextField(null=True,blank=True,)
description = models.TextField(blank=True)
conf_url = models.CharField(max_length=200, null=True, blank=True)
schedule_url = models.CharField(max_length=235, null=True, blank=True)
announcement_state = models.IntegerField(null=True, blank=True,
choices=ANN_STATES, default=ANN_STATES[1][0], )
@property
def client_name(self):
return self.client
def __str__(self):
return "%s: %s" % ( self.client_name, self.name )
@models.permalink
def get_absolute_url(self):
return ('episode_list', [self.client.slug,self.slug,])
class Meta:
ordering = ["sequence"]
class Raw_File(models.Model):
location = models.ForeignKey(Location)
show = models.ForeignKey(Show)
filename = models.CharField(max_length=135,help_text="filename.dv")
filesize = models.BigIntegerField(default=1,help_text="size in bytes")
start = models.DateTimeField(null=True, blank=True,
help_text='when recorded (should agree with file name and timestamp)')
duration = models.CharField(max_length=11, blank=True, )
end = models.DateTimeField(null=True, blank=True)
trash = models.BooleanField(default=False,
help_text="This clip is trash")
ocrtext = models.TextField(null=True,blank=True)
comment = models.TextField(blank=True)
def __next__(self):
"""
gets the next clip in the room.
"""
rfs = Raw_File.objects.filter(location=self.location,
start__gt=self.start,
).order_by('start','id')
# id__gt=self.id).order_by('start','id')
if rfs:
rf=rfs[0]
else:
rf=None
return rf
def basename(self):
# strip the extension
# good for making 1-2-3/foo.png from 1-2-3/foo.dv
raise "homey don't play that no more."
return os.path.splitext(self.filename)[0]
def base_url(self):
""" Returns the url for the file, minus the MEDIA_URL and extension """
return "%s/%s/dv/%s/%s" % (self.show.client.slug,
self.show.slug,
self.location.slug,
self.filename)
@property
def get_adjusted_start(self):
return self.start + datetime.timedelta(
hours = 0 if self.location.hours_offset is None
else self.location.hours_offset )
@property
def get_adjusted_end(self):
return self.end + datetime.timedelta(
hours = 0 if self.location.hours_offset is None
else self.location.hours_offset )
def get_start_seconds(self):
return time2s( self.start )
def get_end_seconds(self):
return time2s( self.end )
def get_seconds(self):
# return durration in seconds (float)
delta = self.end - self.start
seconds = delta.days*24*60*60 + delta.seconds
return seconds
def get_minutes(self):
# return durration in minutes (float)
return self.get_seconds()/60.0
def __str__(self):
return self.filename
@models.permalink
def get_absolute_url(self):
return ('raw_file', [self.id,])
class Meta:
ordering = ["start", "location", "filename"]
class Mark(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location)
click = models.DateTimeField(
help_text='When Cut was Clicked.')
class Meta:
ordering = ["click"]
def __str__(self):
return self.click.isoformat()
class Quality(models.Model):
level = models.IntegerField()
name = models.CharField(max_length=35)
description = models.TextField(blank=True)
def __str__(self):
return self.name
STATES=[
(0, 'borked'),
(1, 'edit'), # enter cutlist data
(2, 'encode'), # assemble raw assets into final cut
(3, 'push to queue'), # push to data center box
(4, 'post'), # push to yourube and archive.org
(5, 'richard'), # push urls and description to PyVideo.org
(6, 'review 1'), # staff check to see if they exist on yourube/archive
(7, 'email'), # send private url to presenter, ask for feedback,
(8, 'review 2'), # wait for presenter to say good, or timeout
(9, 'make public'), # flip private to public
(10, 'tweet'), # tell world
(11, 'to-miror'),
(12, 'conf'),
(13, 'done')
]
def generate_edit_key():
""" Generate a random key """
return str(random.randint(10000000,99999999))
class Episode(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location, null=True)
active = models.BooleanField(default=True,
help_text="Turn off to hide from UI.")
state = models.IntegerField(null=True, blank=True,
choices=STATES, default=STATES[1][0],
help_text="" )
locked = models.DateTimeField(null=True, blank=True,
help_text="clear this to unlock")
locked_by = models.CharField(max_length=35, blank=True,
help_text="user/process that locked." )
sequence = models.IntegerField(null=True,blank=True,
help_text="process order")
start = models.DateTimeField(blank=True, null=False,
help_text="initially scheduled time from master, adjusted to match reality")
duration = models.CharField(max_length=15,null=True,blank=True,
help_text="length in hh:mm:ss")
end = models.DateTimeField(blank=True, null=False,
help_text="(calculated if start and duration are set.)")
name = models.CharField(max_length=170,
help_text="Video Title (shows in video search results)")
slug = models.CharField(max_length=135, blank=True, null=False,
help_text="file name friendly version of name")
priority = models.IntegerField(null=True,blank=True,
help_text="lower may not get recorded")
released = models.NullBooleanField(null=True,blank=True,
help_text="has someone authorised pubication")
conf_key = models.CharField(max_length=32, blank=True,
help_text='primary key of event in conference system database.')
conf_url = models.CharField(max_length=335,blank=True,default='',
help_text="Event's details on conference site (name,desc,time,author,files,etc)")
conf_meta = models.TextField(blank=True,default='', null=True,
help_text="Data provided by API")
authors = models.TextField(null=True,blank=True,)
emails = models.TextField(null=True,blank=True,
help_text="email(s) of the presenter(s)")
twitter_id = models.CharField(max_length=135, blank=True, null=True,
help_text="Data provided by API")
reviewers = models.TextField(blank=True,
help_text="email(s) of the reviewers(s)")
language = models.CharField(max_length=20, blank=True, null=True,
help_text="Spoken languge (German, English...)")
edit_key = models.CharField(max_length=32,
blank=True,
null=True,
default=generate_edit_key,
help_text="key to allow unauthenticated users to edit this item.")
summary = models.TextField(blank=True, help_text="short", null=True)
description = models.TextField(blank=True, help_text="markdown")
tags = models.CharField(max_length=175,null=True,blank=True,)
normalise = models.CharField(max_length=5,null=True,blank=True, )
channelcopy = models.CharField(max_length=2,null=True,blank=True,
help_text='m=mono, 01=copy left to right, 10=right to left, 00=ignore.' )
license = models.CharField(max_length=20, null=True,blank=True,
default='CC BY-SA',
help_text='see http://creativecommons.org/licenses/')
hidden = models.NullBooleanField(null=True,blank=True,
help_text='hidden (does not show up on public episode list')
thumbnail = models.CharField(max_length=135,blank=True,
help_text="filename.png" )
host_url = models.CharField(max_length=235, null=True,blank=True,
help_text = "URL of page video is hosted")
public_url = models.CharField(max_length=335, null=True,blank=True,
help_text = "URL public should use (like pvo or some aggregator")
archive_ogv_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to dl an ogv (like archive.org")
archive_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "not sure.. deprecated?")
archive_mp4_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to dl an mp4. (like archive.org")
rax_mp4_url = models.CharField(max_length=355, null=True,blank=True,
help_text = "URL public can use to get an mp4. (like rackspace cdn")
twitter_url = models.CharField(max_length=135, null=True,blank=True,
help_text = "URL of tweet to email presenters for retweeting")
video_quality = models.ForeignKey(Quality,null=True,blank=True,related_name='video_quality')
audio_quality = models.ForeignKey(Quality,null=True,blank=True,related_name='audio_quality')
comment = models.TextField(blank=True, help_text="production notes")
stop = models.NullBooleanField(
help_text="Stop process.py from processing anymore")
formfield_overrides = {
models.TextField: {
'widget': forms.Textarea({'cols': 30, 'rows': 2}),
}}
class Meta:
ordering = ["sequence"]
# unique_together = [("show", "slug")]
@models.permalink
def get_absolute_url(self):
return ('episode', [self.id])
def __str__(self):
return self.name
def cuts_time(self):
# get total time in seoonds of video based on selected cuts.
# or None if there are no clips.
cuts = Cut_List.objects.filter(episode=self, apply=True)
if not cuts:
ret = None
else:
s=0
for cut in cuts:
s+=int(cut.duration()) # durration is in seconds :p
ret = s
return ret
def get_minutes(self):
ct = self.cuts_time()
if ct is None:
# if there are no cuts, use scheduled time
delta = self.end - self.start
minutes = delta.days*60*24 + delta.seconds/60.0
else:
# use amount of video time
minutes = self.cuts_time()/60
return int(minutes)
def add_email(self, email):
if self.emails is None: emails=[]
else: emails = self.emails.split(',')
if email not in emails:
if self.emails:
emails.append(email)
self.emails = ','.join(emails)
else:
self.emails = email
self.save()
def get_authors(self):
authors = self.authors.split(',') if self.authors else []
return authors
@property
def titlecase(self):
return titlecase(self.name)
@property
def location_slug(self):
location_slug=self.location.slug
print(location_slug)
return location_slug
def approve_url(self):
url = "https://veyepar.nextdayvideo.com/main/approve/{id}/{slug}/{edit_key}/".format(id=self.id, slug=self.slug, edit_key=self.edit_key)
return url
def composed_description(self):
# build a wad of text to use as public facing description
show = self.show
client = show.client
footer = "Produced by NDV: https://youtube.com/channel/UCQ7dFBzZGlBvtU2hCecsBBg?sub_confirmation=1"
# (show tags seperate the talk from the event text)
descriptions = [self.authors,
self.public_url,
self.conf_url,
self.description,
show.tags,
show.description, client.description,
footer,
client.tags,
"{} at {}".format(
self.start.strftime("%c"),
self.location.name),
]
# remove blanks
descriptions = [d for d in descriptions if d]
# combine wiht CRs between each item
description = "\n\n".join(descriptions)
# remove extra blank lines
description = re.sub( r'\n{2,}', r'\n\n', description)
# description = "<br/>\n".join(description.split('\n'))
return description
class Cut_List(models.Model):
"""
note: this sould be Cut_list_ITEM
because it is not the whole list, just one entry.
"""
raw_file = models.ForeignKey(Raw_File)
episode = models.ForeignKey(Episode)
sequence = models.IntegerField(default=1)
start = models.CharField(max_length=11, blank=True,
help_text='offset from start in HH:MM:SS.ss')
end = models.CharField(max_length=11, blank=True,
help_text='offset from start in HH:MM:SS.ss')
apply = models.BooleanField(default=1)
comment = models.TextField(blank=True)
def get_absolute_url(self):
return urls.reverse('episode', [self.episode.id])
def __str__(self):
return "%s - %s" % (self.raw_file, self.episode.name)
class Meta:
ordering = ["sequence"]
def get_start_seconds(self):
return time2s( self.start )
def get_start_wall(self):
if self.start:
return self.raw_file.start + \
datetime.timedelta(seconds=self.get_start_seconds())
else:
return self.raw_file.start
def get_end_seconds(self):
return time2s( self.end )
def get_end_wall(self):
if self.end:
return self.raw_file.start + \
datetime.timedelta(seconds=self.get_end_seconds())
else:
return self.raw_file.end
def duration(self):
# calc size of clip in secconds
# may be size of raw, but take into account trimming start/end
def to_sec(time, default=0):
# convert h:m:s to s
if time:
sec = reduce(lambda x, i: x*60 + i,
list(map(float, time.split(':'))))
else:
sec=default
return sec
start = to_sec( self.start )
end = to_sec( self.end, to_sec(self.raw_file.duration))
dur = end-start
return dur
def duration_hms(self):
seconds = self.duration()
hms = seconds//3600, (seconds%3600)//60, seconds%60
duration = "%02d:%02d:%02d" % hms
return duration
def base_url(self):
""" Returns the url for the file, minus the MEDIA_URL and extension """
return self.raw_file.base_url()
class State(models.Model):
sequence = models.IntegerField(default=1)
slug = models.CharField(max_length=30)
description = models.CharField(max_length=135, blank=True)
class Meta:
ordering = ["sequence"]
def __str__(self):
return self.slug
class Image_File(models.Model):
show = models.ForeignKey(Show)
location = models.ForeignKey(Location, null=True)
episodes = models.ManyToManyField(Episode, blank=True)
filename = models.CharField(max_length=135, help_text="foo.png")
text = models.TextField(blank=True, help_text="OCRed text")
def get_absolute_url(self):
# https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
url = "{}?{}={}".format(
urls.reverse( 'admin:main_episode_changelist'),
"image_file__id__exact",
self.id)
return url
class Log(models.Model):
episode = models.ForeignKey(Episode)
state = models.ForeignKey(State, null=True, blank=True)
ready = models.DateTimeField()
start = models.DateTimeField(null=True, blank=True)
end = models.DateTimeField(null=True, blank=True)
user = models.CharField(max_length=50)
result = models.CharField(max_length=250)
def duration(self):
if self.start and self.end:
dur = self.end - self.start
dur = datetime.timedelta(dur.days,dur.seconds)
return dur
else:
return None
@models.permalink
def get_absolute_url(self):
return ('episode', [self.episode.id])
def set_slug(sender, instance, **kwargs):
if not instance.slug or instance.slug is None:
# instance.slug = fnify(instance.name)
return unique_slugify(instance, instance.name)
def set_end(sender, instance, **kwargs):
if instance.start:
if instance.duration:
seconds = reduce(lambda x, i: x*60 + i,
list(map(float, instance.duration.split(':'))))
instance.end = instance.start + \
datetime.timedelta(seconds=seconds)
elif instance.end:
# calc duration based on End
d = instance.end - instance.start
seconds = d.total_seconds()
hms = seconds//3600, (seconds%3600)//60, seconds%60
instance.duration = "%02d:%02d:%02d" % hms
else:
instance.end = None
else:
instance.end = None
pre_save.connect(set_slug,sender=Location)
pre_save.connect(set_slug,sender=Episode)
pre_save.connect(set_end,sender=Episode)
pre_save.connect(set_end,sender=Raw_File)
| CarlFK/veyepar | dj/main/models.py | Python | mit | 22,252 |
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 25 12:14:03 2015
@author: ktritz
"""
from __future__ import print_function
from builtins import str, range
import inspect
import types
#import numpy as np
from collections.abc import MutableMapping
from .container import containerClassFactory
class Shot(MutableMapping):
# _modules = None
_logbook = None
_machine = None
def __init__(self, shot, machine):
self.shot = shot
# set class attributes if needed
cls = self.__class__
if cls._machine is None:
cls._machine = machine
# if cls._modules is None:
# cls._modules = {module: None for module in self._machine._modules}
if cls._logbook is None:
cls._logbook = self._machine._logbook
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
self._efits = []
self._modules = {module: None for module in self._machine._modules}
self.xp = self._get_xp()
self.date = self._get_date()
def _get_xp(self):
# query logbook for XP, return XP (list if needed)
xplist = []
for entry in self._logbook_entries:
if entry['xp']:
xplist.append(entry['xp'])
return list(set(xplist))
def _get_date(self):
# query logbook for rundate, return rundate
if self._logbook_entries:
return self._logbook_entries[0]['rundate']
else:
return
def __getattr__(self, attr_name):
if attr_name in self._modules:
if self._modules[attr_name] is None:
self._modules[attr_name] = containerClassFactory(attr_name,
root=self._machine,
shot=self.shot,
parent=self)
return self._modules[attr_name]
else:
try:
attr = getattr(self._machine, attr_name)
except AttributeError as e:
# print('{} is not attribute of {}'.format(attr_name, self._machine._name))
raise e
if inspect.ismethod(attr):
return types.MethodType(attr.__func__, self)
else:
return attr
def __repr__(self):
return '<Shot {}>'.format(self.shot)
def __str__(self):
return 'Shot {}'.format(self.shot)
def __iter__(self):
# return iter(self._modules.values())
return iter(self._modules)
def __contains__(self, key):
return key in self._modules
def __len__(self):
return len(list(self._modules.keys()))
def __delitem__(self, item):
pass
def __getitem__(self, item):
return self._modules[item]
def __setitem__(self, item, value):
pass
def __dir__(self):
return list(self._modules.keys())
def logbook(self):
# show logbook entries
if not self._logbook_entries:
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
if self._logbook_entries:
print('Logbook entries for {}'.format(self.shot))
for entry in self._logbook_entries:
print('************************************')
print(('{shot} on {rundate} in XP {xp}\n'
'{username} in topic {topic}\n\n'
'{text}').format(**entry))
print('************************************')
else:
print('No logbook entries for {}'.format(self.shot))
def get_logbook(self):
# return a list of logbook entries
if not self._logbook_entries:
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
return self._logbook_entries
def check_efit(self):
if len(self._efits):
return self._efits
trees = ['efit{}'.format(str(index).zfill(2)) for index in range(1, 7)]
trees.extend(['lrdfit{}'.format(str(index).zfill(2))
for index in range(1, 13)])
if self.shot == 0:
return trees
tree_exists = []
for tree in trees:
data = None
connection = self._get_connection(self.shot, tree)
try:
data = connection.get('\\{}::userid'.format(tree)).value
except:
pass
if data and data != '*':
tree_exists.append(tree)
self._efits = tree_exists
return self._efits
| drsmith48/fdp | fdp/lib/shot.py | Python | mit | 4,629 |
# -*- coding: utf-8 -*-
import unittest
from pyparsing import ParseException
from cwr.grammar.field import basic
"""
Tests for Table/List Lookup (L) fields.
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestLookupName(unittest.TestCase):
def test_name_default(self):
"""
Tests that the default field name is correct for optional fields.
"""
field = basic.lookup(['AB1', 'CD2', 'EF3'])
self.assertEqual('Lookup Field', field.name)
def test_name_set(self):
"""
Tests that the given field name is set correctly for optional fields.
"""
name = "Field Name"
field = basic.lookup(['AB1', 'CD2', 'EF3'], name=name)
self.assertEqual(name, field.name)
def test_name_set_no_changes(self):
"""
Tests that the field name does not change for creating a new one
"""
field1 = basic.lookup(['AB1', 'CD2', 'EF3'], name='field1')
field2 = basic.lookup(['AB1', 'CD2', 'EF3'], name='field2')
self.assertEqual('field1', field1.name)
self.assertEqual('field2', field2.name)
class TestLookupValid(unittest.TestCase):
"""
Tests that the lookup field accepts and parse valid values.
"""
def setUp(self):
self.lookup = basic.lookup(['AB1', 'CD2', 'EF3'])
def test_valid(self):
"""
Tests that the field accepts a valid value
"""
result = self.lookup.parseString('CD2')
self.assertEqual('CD2', result[0])
class TestLookupExceptionCompulsory(unittest.TestCase):
def setUp(self):
self.lookup = basic.lookup(['AB1', 'CD2', 'EF3'])
def test_invalid(self):
"""
Tests that an exception is thrown when parsing an invalid value
"""
self.assertRaises(ParseException, self.lookup.parseString, 'AEI')
def test_empty(self):
"""
Tests that an exception is thrown when parsing an invalid value
"""
self.assertRaises(ParseException, self.lookup.parseString, '')
def test_whitespace(self):
"""
Tests that an exception is thrown when parsing an invalid value
"""
self.assertRaises(ParseException, self.lookup.parseString, ' ')
| weso/CWR-DataApi | tests/grammar/field/test_lookup.py | Python | mit | 2,301 |
import numpy as np
from numba import njit as jit
@jit
def _kepler_equation(E, M, ecc):
return E_to_M(E, ecc) - M
@jit
def _kepler_equation_prime(E, M, ecc):
return 1 - ecc * np.cos(E)
@jit
def _kepler_equation_hyper(F, M, ecc):
return F_to_M(F, ecc) - M
@jit
def _kepler_equation_prime_hyper(F, M, ecc):
return ecc * np.cosh(F) - 1
def newton_factory(func, fprime):
@jit
def jit_newton_wrapper(x0, args=(), tol=1.48e-08, maxiter=50):
p0 = float(x0)
for _ in range(maxiter):
fval = func(p0, *args)
fder = fprime(p0, *args)
newton_step = fval / fder
p = p0 - newton_step
if abs(p - p0) < tol:
return p
p0 = p
return np.nan
return jit_newton_wrapper
_newton_elliptic = newton_factory(_kepler_equation, _kepler_equation_prime)
_newton_hyperbolic = newton_factory(
_kepler_equation_hyper, _kepler_equation_prime_hyper
)
@jit
def D_to_nu(D):
r"""True anomaly from parabolic anomaly.
Parameters
----------
D : float
Eccentric anomaly.
Returns
-------
nu : float
True anomaly.
Notes
-----
From [1]_:
.. math::
\nu = 2 \arctan{D}
"""
return 2.0 * np.arctan(D)
@jit
def nu_to_D(nu):
r"""Parabolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Warnings
--------
The parabolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π].
No validation or wrapping is performed.
Notes
-----
The treatment of the parabolic case is heterogeneous in the literature,
and that includes the use of an equivalent quantity to the eccentric anomaly:
[1]_ calls it "parabolic eccentric anomaly" D,
[2]_ also uses the letter D but calls it just "parabolic anomaly",
[3]_ uses the letter B citing indirectly [4]_
(which however calls it "parabolic time argument"),
and [5]_ does not bother to define it.
We use this definition:
.. math::
B = \tan{\frac{\nu}{2}}
References
----------
.. [1] Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
.. [2] Bate, Muller, White.
.. [3] Vallado, David. "Fundamentals of Astrodynamics and Applications",
2013.
.. [4] IAU VIth General Assembly, 1938.
.. [5] Battin, Richard H. "An introduction to the Mathematics and Methods
of Astrodynamics, Revised Edition", 1999.
"""
# TODO: Rename to B
return np.tan(nu / 2.0)
@jit
def nu_to_E(nu, ecc):
r"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly, between -π and π radians.
Warnings
--------
The eccentric anomaly will be between -π and π radians,
no matter the value of the true anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
E = 2 \arctan \left ( \sqrt{\frac{1 - e}{1 + e}} \tan{\frac{\nu}{2}} \right)
\in (-\pi, \pi]
"""
E = 2 * np.arctan(np.sqrt((1 - ecc) / (1 + ecc)) * np.tan(nu / 2))
return E
@jit
def nu_to_F(nu, ecc):
r"""Hyperbolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
Warnings
--------
The hyperbolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π],
which should happen anyway
because the true anomaly is limited for hyperbolic orbits.
No validation or wrapping is performed.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
F = 2 \operatorname{arctanh} \left( \sqrt{\frac{e-1}{e+1}} \tan{\frac{\nu}{2}} \right)
"""
F = 2 * np.arctanh(np.sqrt((ecc - 1) / (ecc + 1)) * np.tan(nu / 2))
return F
@jit
def E_to_nu(E, ecc):
r"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
nu : float
True anomaly, between -π and π radians.
Warnings
--------
The true anomaly will be between -π and π radians,
no matter the value of the eccentric anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{1 + e}{1 - e}} \tan{\frac{E}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((1 + ecc) / (1 - ecc)) * np.tan(E / 2))
return nu
@jit
def F_to_nu(F, ecc):
r"""True anomaly from hyperbolic anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
nu : float
True anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{e + 1}{e - 1}} \tanh{\frac{F}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((ecc + 1) / (ecc - 1)) * np.tanh(F / 2))
return nu
@jit
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
Notes
-----
This uses a Newton iteration on the Kepler equation.
"""
if -np.pi < M < 0 or np.pi < M:
E0 = M - ecc
else:
E0 = M + ecc
E = _newton_elliptic(E0, args=(M, ecc))
return E
@jit
def M_to_F(M, ecc):
"""Hyperbolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
Notes
-----
This uses a Newton iteration on the hyperbolic Kepler equation.
"""
F0 = np.arcsinh(M / ecc)
F = _newton_hyperbolic(F0, args=(M, ecc), maxiter=100)
return F
@jit
def M_to_D(M):
"""Parabolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Notes
-----
This uses the analytical solution of Barker's equation from [5]_.
"""
B = 3.0 * M / 2.0
A = (B + (1.0 + B**2) ** 0.5) ** (2.0 / 3.0)
D = 2 * A * B / (1 + A + A**2)
return D
@jit
def E_to_M(E, ecc):
r"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
M : float
Mean anomaly.
Warnings
--------
The mean anomaly will be outside of (-π, π]
if the eccentric anomaly is.
No validation or wrapping is performed.
Notes
-----
The implementation uses the plain original Kepler equation:
.. math::
M = E - e \sin{E}
"""
M = E - ecc * np.sin(E)
return M
@jit
def F_to_M(F, ecc):
r"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
M : float
Mean anomaly.
Notes
-----
As noted in [5]_, by manipulating
the parametric equations of the hyperbola
we can derive a quantity that is equivalent
to the eccentric anomaly in the elliptic case:
.. math::
M = e \sinh{F} - F
"""
M = ecc * np.sinh(F) - F
return M
@jit
def D_to_M(D):
r"""Mean anomaly from parabolic anomaly.
Parameters
----------
D : float
Parabolic anomaly.
Returns
-------
M : float
Mean anomaly.
Notes
-----
We use this definition:
.. math::
M = B + \frac{B^3}{3}
Notice that M < ν until ν ~ 100 degrees,
then it reaches π when ν ~ 120 degrees,
and grows without bounds after that.
Therefore, it can hardly be called an "anomaly"
since it is by no means an angle.
"""
M = D + D**3 / 3
return M
@jit
def fp_angle(nu, ecc):
r"""Returns the flight path angle.
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
fp_angle: float
Flight path angle
Notes
-----
From [3]_, pp. 113:
.. math::
\phi = \arctan(\frac {e \sin{\nu}}{1 + e \cos{\nu}})
"""
return np.arctan2(ecc * np.sin(nu), 1 + ecc * np.cos(nu))
| poliastro/poliastro | src/poliastro/core/angles.py | Python | mit | 9,195 |
import numpy as np
def e_greedy(estimates, epsilon):
numBandits, numArms = estimates.shape
explore = np.zeros(numBandits)
explore[np.random.random(numBandits) <= epsilon] = 1
arm = np.argmax(estimates, axis=1)
arm[explore == 1] = np.random.randint(0, numArms, np.count_nonzero(explore))
return arm
def softmax(estimates, temperature):
temp_est = estimates.T / temperature
exponents = np.exp(temp_est - np.max(temp_est))
dist = exponents / np.sum(exponents, axis=0)
return (np.random.random(temp_est.shape) < dist.cumsum(axis=0)).argmax(axis=0)
def pref_softmax(preferences):
pref = preferences.T
exponents = np.exp(pref - np.max(pref))
dist = exponents / np.sum(exponents, axis=0)
return (np.random.random(pref.shape) < dist.cumsum(axis=0)).argmax(axis=0)
| clarson469/reinforcementLearning | solutions/solution_util.py | Python | mit | 819 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2018 acrazing <[email protected]>. All rights reserved.
# @since 2018-12-03 00:03:40
import time
from dbapi.DoubanAPI import DoubanAPI
class GroupAPI:
def __init__(self):
self.api = DoubanAPI(flush=False)
self._applied = {}
self._users = {}
def run(self):
self.api.flush()
groups = self.api.group.list_joined_groups()['results']
for group in groups:
self._applied[group['alias']] = True
self.handle_user(self.api.user_alias)
def handle_user(self, user_alias):
self.join_user_groups(user_alias)
users = self.api.people.list_contacts()['results']
for user in users:
if self._users.get(user['alias'], None) is None:
self.handle_user(user['alias'])
self._users[user['alias']] = True
time.sleep(30)
else:
print('skip user: %s' % (user['alias']))
def join_user_groups(self, user_alias):
groups = self.api.group.list_joined_groups(user_alias)['results']
for group in groups:
if self._applied.get(group['alias'], None) is None:
self.api.group.join_group(group['alias'], 'Hello ~')
self._applied[group['alias']] = True
time.sleep(30)
else:
print('skip group: %s' % (group['alias']))
if __name__ == '__main__':
group = GroupAPI()
group.run()
| acrazing/dbapi | scripts/join_group.py | Python | mit | 1,512 |
# -*- mode: python; coding: utf-8 -*-
# Copyright 2016-2017 Peter Williams <[email protected]> and collaborators
# Licensed under the MIT License
"""Various helpers for X-ray analysis that rely on CIAO tools.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
__all__ = str('''
get_region_area
count_events
compute_bgband
simple_srcflux
''').split ()
def get_region_area (env, evtpath, region):
with env.slurp (argv=['dmlist', '%s[sky=%s]' % (evtpath, region), 'subspace'], linebreak=True) as s:
for etype, payload in s:
if etype != 'stdout':
continue
if b'Region area' not in payload:
continue
return float (payload.split ()[-1])
raise Exception ('parsing of dmlist output failed')
def count_events (env, evtpath, filter):
"""TODO: this can probably be replaced with simply reading the file
ourselves!
"""
with env.slurp (argv=['dmstat', '%s%s[cols energy]' % (evtpath, filter)], linebreak=True) as s:
for etype, payload in s:
if etype != 'stdout':
continue
if b'good:' not in payload:
continue
return int (payload.split ()[-1])
raise Exception ('parsing of dmlist output failed')
def compute_bgband (evtpath, srcreg, bkgreg, ebins, env=None):
"""Compute background information for a source in one or more energy bands.
evtpath
Path to a CIAO events file
srcreg
String specifying the source region to consider; use 'region(path.reg)' if you
have the region saved in a file.
bkgreg
String specifying the background region to consider; same format as srcreg
ebins
Iterable of 2-tuples giving low and high bounds of the energy bins to
consider, measured in eV.
env
An optional CiaoEnvironment instance; default settings are used if unspecified.
Returns a DataFrame containing at least the following columns:
elo
The low bound of this energy bin, in eV.
ehi
The high bound of this energy bin, in eV.
ewidth
The width of the bin in eV; simply `abs(ehi - elo)`.
nsrc
The number of events within the specified source region and energy range.
nbkg
The number of events within the specified background region and energy range.
nbkg_scaled
The number of background events scaled to the source area; not an integer.
nsrc_subbed
The estimated number of non-background events in the source region; simply
`nsrc - nbkg_scaled`.
log_prob_bkg
The logarithm of the probability that all counts in the source region are due
to background events.
src_sigma
The confidence of source detection in sigma inferred from log_prob_bkg.
The probability of backgrounditude is computed as:
b^s * exp (-b) / s!
where `b` is `nbkg_scaled` and `s` is `nsrc`. The confidence of source detection is
computed as:
sqrt(2) * erfcinv (prob_bkg)
where `erfcinv` is the inverse complementary error function.
"""
import numpy as np
import pandas as pd
from scipy.special import erfcinv, gammaln
if env is None:
from . import CiaoEnvironment
env = CiaoEnvironment ()
srcarea = get_region_area (env, evtpath, srcreg)
bkgarea = get_region_area (env, evtpath, bkgreg)
srccounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (srcreg, elo, ehi))
for elo, ehi in ebins]
bkgcounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (bkgreg, elo, ehi))
for elo, ehi in ebins]
df = pd.DataFrame ({
'elo': [t[0] for t in ebins],
'ehi': [t[1] for t in ebins],
'nsrc': srccounts,
'nbkg': bkgcounts
})
df['ewidth'] = np.abs (df['ehi'] - df['elo'])
df['nbkg_scaled'] = df['nbkg'] * srcarea / bkgarea
df['log_prob_bkg'] = df['nsrc'] * np.log (df['nbkg_scaled']) - df['nbkg_scaled'] - gammaln (df['nsrc'] + 1)
df['src_sigma'] = np.sqrt (2) * erfcinv (np.exp (df['log_prob_bkg']))
df['nsrc_subbed'] = df['nsrc'] - df['nbkg_scaled']
return df
def _rmtree_error (func, path, excinfo):
from ...cli import warn
warn ('couldn\'t delete temporary file %s: %s (%s)', path, excinfo[0], func)
def simple_srcflux(env, infile=None, psfmethod='arfcorr', conf=0.68,
verbose=0, **kwargs):
"""Run the CIAO "srcflux" script and retrieve its results.
*infile*
The input events file; must be specified. The computation is done
in a temporary directory, so this path — and all others passed in
as arguments — **must be made absolute**.
*psfmethod* = "arfcorr"
The PSF modeling method to be used; see the "srcflux" documentation.
*conf* = 0.68
The confidence limit to detect. We default to 1 sigma, instead of
the 90% mark, which is the srcflux default.
*verbose* = 0
The level of verbosity to be used by the tool.
*kwargs*
Remaining keyword arguments are passed to the tool as command-line
keyword arguments, with values stringified.
Returns:
A :class:`pandas.DataFrame` extracted from the results table generated
by the tool. There is one row for each source analyzed; in common usage,
this means that there will be one row.
"""
from ...io import Path
import shutil, signal, tempfile
if infile is None:
raise ValueError('must specify infile')
kwargs.update(dict(
infile = infile,
psfmethod = psfmethod,
conf = conf,
verbose = verbose,
clobber = 'yes',
outroot = 'sf',
))
argv = ['srcflux'] + ['%s=%s' % t for t in kwargs.items()]
argstr = ' '.join(argv)
tempdir = None
try:
tempdir = tempfile.mkdtemp(prefix='srcflux')
proc = env.launch(argv, cwd=tempdir, shell=False)
retcode = proc.wait()
if retcode > 0:
raise RuntimeError('command "%s" failed with exit code %d' % (argstr, retcode))
elif retcode == -signal.SIGINT:
raise KeyboardInterrupt()
elif retcode < 0:
raise RuntimeError('command "%s" killed by signal %d' % (argstr, -retcode))
tables = list(Path(tempdir).glob('*.flux'))
if len(tables) != 1:
raise RuntimeError('expected exactly one flux table from srcflux; got %d' % len(tables))
return tables[0].read_fits_bintable(hdu=1)
finally:
if tempdir is not None:
shutil.rmtree(tempdir, onerror=_rmtree_error)
| pkgw/pwkit | pwkit/environments/ciao/analysis.py | Python | mit | 6,635 |
from checkpy.assertlib.basic import *
| Jelleas/CheckPy | checkpy/assertlib/__init__.py | Python | mit | 38 |
#!/usr/bin/python
import json, math, sys, string, random, subprocess, serial
from time import localtime, strftime, clock, time # for timestamping packets
import time
import hashlib #for checksum purposes
import mysql.connector # mysql database
import getpass
import urllib2
import requests
sys.path.append('/usr/lib/python2.7/dist-packages')
################################################################################
### Auxiliary functions
################################################################################
### Returns the hex value of the xor of all characters in a string.
def xor_string_hash(string):
ret = 0
for char in string:
ret ^= ord(char)
return hex(ret)[2:] ### return everything but the first two characters, "0x"
### Formats numbers with suffix e.g.: ord(1) -> "1st", ord(2) -> "2nd"
def ord(n):
return str(n)+("th" if 4<=n%100<=20 else {1:"st",2:"nd",3:"rd"}.get(n%10, "th"))
################################################################################
### 0. Logfile Setup
################################################################################
### TODO: may want to remove logging when debugging is complete, or allow it to be toggled
logfile_name = 'logs/UWNet-{0}.LOG'.format(strftime("20%y-%m-%d--%H:%M:%S", localtime()))
logfile = open(logfile_name, 'w')
################################################################################
### 1. Port configuration
################################################################################
### Setup the port to be read from ( /dev/ttyUSB0 ) with timeout to enable
### recovery from packet loss.
port_ttyUSB0 = serial.Serial(port='/dev/ttyUSB0', baudrate=115200)
port_ttyUSB1 = serial.Serial(port='/dev/ttyUSB1', baudrate=115200, timeout= 25)
### For each port, enter command mode (+++A) and enable checksum ($HHCRW,MMCHK,1),
### then check for success.
port_ttyUSB0.write("+++A\r\n")
if ("MMOKY" not in port_ttyUSB0.readline()):
print "error in here"
logfile.write('CRITICAL ERROR: cannot enter command mode for ttyUSB0 ... exiting')
exit(0) ### TODO: do something better upon failure, maybe try to fix!
### TODO: set MMCHK to 1 if want checksum
port_ttyUSB0.write("$HHCRW,MMCHK,0\r\n")
if ("MMOKY" not in port_ttyUSB0.readline()):
print "Something wrong with USB 0 "
logfile.write('error in setting the checksum register for ttyUSB0')
exit(0)
port_ttyUSB1.write("+++A\r\n")
if ("MMOKY" not in port_ttyUSB1.readline()):
print "Something wrong with USB1"
logfile.write('CRITICAL ERROR: cannot enter command mode for ttyUSB1 ... exiting')
exit(0)
port_ttyUSB1.write("$HHCRW,MMCHK,0\r\n")
if ("MMOKY" not in port_ttyUSB1.readline()):
print "Something wrong here, usb 1"
logfile.write('error in setting the checksum register for ttyUSB1')
exit(0)
################################################################################
### 2. Retrieve experiments
################################################################################
### Resources:
### http://dev.mysql.com/doc/refman/5.5/en/index.html
### https://docs.python.org/2/howto/webservers.html?highlight=mysql
### http://dev.mysql.com/doc/connector-python/en/connector-python-example-cursor-select.html
### Connect to the database.
cnx = mysql.connector.connect(user= 'ruolinfan', password='pass', host='localhost', database='UWNet')
### TODO: may need to change parameters for mysql.connector.connect() depending on
### which machine we are using.
### TODO: create a standard user for the database; include script in create.sql
### getpass library extracts information regarding the machine's user name. issue though is with password. Does it have to always be hardwired is there another way?
cursor = cnx.cursor()
cursor_insert = cnx.cursor()
### Retrieve rows from InputQueue table for experiments which have not been run
###retrieve_experiments = ("SELECT id, mpwr, lpwr, ppwr, mbkn, lbkn, pbkn, mmod, lmod, pmod, rptt, testData FROM InputQueue WHERE exitStatus IS NULL")
###cursor.execute(retrieve_experiments)
### Store each row in a dictionary.
someurl1 = 'http://apus.cs.ucla.edu/getParams.php'
content = urllib2.urlopen(someurl1).read()
###print content
###print content;
parsed_json = json.loads(content)
if not parsed_json['experiments']:
print "No experiment to run!"
exit(0)
rows = parsed_json['experiments'][0]['row']
###selected_rows = {
###for (id, mpwr, lpwr, ppwr, mbkn, lbkn, pbkn, mmod, lmod, pmod, rptt, testData) in cursor:
selected_rows = { 'mpwr': int(rows['mpwr']), 'lpwr': int(rows['lpwr']), 'ppwr': int(rows['ppwr']), 'mbkn':int(rows['mbkn']), 'lbkn': int(rows['lbkn']), 'pbkn': int(rows['pbkn']), 'mmod': int(rows['mmod']), 'lmod':int(rows['lmod']), 'pmod': int(rows['pmod']), 'rptt': int(rows['rptt']), 'testData': rows['testData'] }
id = rows['id']
###print id
fileid = selected_rows['testData']
fileid = fileid[5:]
someurl2 = 'http://apus.cs.ucla.edu/getFile.php?filename='+fileid
content1 = urllib2.urlopen(someurl2).read()
###print content1
### Collect results from each trial in this dictionary for insertion into Results
### table. The keys correspond to the 'id' column in the InputQueue table.
allResults = {}
exit_code = 0
################################################################################
### 3. Run each experiment on each combination of { pwr, bkn, mod }, rptt times
################################################################################
### handle each enqueued experiment
### TODO: report errors, store in database
### code NULL: exited normally
### code NOT NULL: error
### - KE: kermit configuration
### - DB: database access
### - PT: port configuration
### - etc...
### TODO: Devise error code scheme, or decide that it is unnecessary...
###for id in selected_rows:
for x in range(0, 1):
row = selected_rows
logfile.write('===== STARTING EXPERIMENT {0} =====\n\n'.format(id))
print '===== STARTING EXPERIMENT {0} =====\n'.format(id)
### Each element of the following list will be its own row in Results.
### All elements in this list will have the same experimentID.
resultsList = []
text_file = open("Output.txt", "w")
firstline = rows['id'] + '\n'
text_file.write(firstline)
secondline = str(exit_code) + '\n'
text_file.write(secondline)
for transmission_mode in range(row['lmod'], row['mmod'] + 1, row['pmod']):
if transmission_mode == 1:
bytes_per_block = 38
elif transmission_mode == 2:
bytes_per_block = 80
elif transmission_mode == 3:
bytes_per_block = 122
elif transmission_mode == 4:
bytes_per_block = 164
elif transmission_mode == 5:
bytes_per_block = 248
else:
logfile.write('ERROR: Transmit mode of {0} invalid; ranges from 1 to 5 ... skipping\n'.format(transmission_mode))
exit_code = 1
continue
logfile.write('-> transmission_mode := {0}\n'.format(transmission_mode))
print '-> transmission_mode := {0}'.format(transmission_mode)
for blocks_per_packet in range(row['lbkn'], row['mbkn'] + 1, row['pbkn']):
logfile.write('-> blocks_per_packet := {0}\n'.format(blocks_per_packet))
print '-> blocks_per_packet := {0}'.format(blocks_per_packet)
packet_length = bytes_per_block * blocks_per_packet
for transmission_power in range(row['lpwr'], row['mpwr'] + 1, row['ppwr']):
logfile.write('-> transmission_power := {0}\n'.format(transmission_power))
print '-> transmission_power := {0}'.format(transmission_power)
port_ttyUSB0.write("$HHCRW,TXPWR,{0}\r\n".format(transmission_power))
logfile.write('-> TXPWR := transmission_power\n\n')
print '-> TXPWR := transmission_power\n'
### Collect data for each trial in a dictionary, keyed by trial number.
collectionOfTrials = {}
for trial in range(row['rptt']): ### repeat the experiment!
logfile.write('\tTrial {0}\n\n'.format(trial))
print '\tTrial {0}\n'.format(trial)
### Keep track of packet loss, retransmissions, and execution time.
n_loss = 0
n_retx = 0
start_time = time.time()
### Transmit file across network.
### Get file handle for the filepath indicated by testData
with open(str(row['testData']), 'r') as read_file:
packet_to_send = read_file.read(packet_length)
packet_counter = 0
# while there is still data to send
while '' != packet_to_send:
packet_to_send_hex = packet_to_send.encode("hex")
packet_counter += 1
print "PACKET COUNTER IS {0}".format(packet_counter)
logfile.write('\tSending packet {0} {1} ({2} bytes) ... '.format(packet_counter, ord(packet_counter), len(packet_to_send)))
print '\tSending packet {0} {1} ({2} bytes) ... '.format(packet_counter, ord(packet_counter), len(packet_to_send))
### TODO: enable toggling of send mode: either in command mode, or data mode
### Write hex-encoded data to the write port, /dev/ttyUSB0.
max_len = 2500
### TODO: see if we can use packet_to_send instead:
if len(packet_to_send_hex) <= max_len:
print "Length of the packet to send is "
print len(packet_to_send_hex)
port_ttyUSB0.write("$HHTXD,0,T0M{0},0,{1}\r\n".format(transmission_mode, packet_to_send_hex))
else:
offset = 0
bytes_left = len(packet_to_send_hex)
while (bytes_left > max_len):
port_ttyUSB0.write("$HHTXD,{0}\r\n".format(packet_to_send_hex[offset:offset + max_len]))
bytes_left -= max_len
offset += max_len
port_ttyUSB0.write("$HHTXD,0,T0M{0},0,{1}\r\n".format(transmission_mode, packet_to_send_hex[offset:]))
### Check if packet was transmitted, then
### extract the data segment from the $MMRXD command.
###print "LOUIS IS COOL AT LINE 262"
read_buffer = port_ttyUSB1.readline()
###print read_buffer
###print "LOUIS IS COOLI AT LINE 265"
if len(read_buffer) == 0: # TODO: replace with timeout check
n_loss += 1
logfile.write("packet lost\n")
print "\t\tpacket lost"
else:
### will hold the data extracted from the read buffer
read_data = ''
if "$MMRXA," in read_buffer:
### if receive data in ASCII format, no need to call string.decode("hex")
### 11 to shave off "$MMRXD,#,0,", -2 to account for \r\n
read_data = read_buffer[11:len(read_buffer)-2]
logfile.write("{0} bytes transferred successfully\n".format(len(read_data)))
print "\t\t{0} bytes transferred successfully".format(len(read_data))
### Uncomment the following code block to use checksums
### * Unnecessary at this point since the process running this script
### * still has access to the data via program variables!
'''
checksum_sent = xor_string_hash(packet_to_send)
checksum_received = xor_string_hash(read_data)
if checksum_sent == checksum_received:
print("Correct File Transmission")
else:
print("Checksum indicated incorrect transmission.")
'''
if (read_data != packet_to_send):
logfile.write("\t\tCorruption detected!\n")
print "\t\t*** Corruption detected!"
elif "$MMRXD," in read_buffer:
### if receive data in HEX format, call string.decode("hex")
### 11 to shave off "$MMRXD,#,0,", -2 to account for \r\n
read_data = read_buffer[11:len(read_buffer)-2].decode("hex")
logfile.write("{0} bytes transferred\n".format(len(read_data)))
print "\t\t{0} bytes transferred".format(len(read_data))
### Uncomment the following code block to use checksums
### * Unnecessary at this point since the process running this script
### * still has access to the data via program variables!
'''
checksum_sent = xor_string_hash(packet_to_send)
checksum_received = xor_string_hash(read_data)
if checksum_sent == checksum_received:
print("Correct File Transmission")
else:
print("Checksum indicated incorrect transmission.")
'''
if (read_data != packet_to_send):
logfile.write("\t\tCorruption detected!\n")
print "\t\t*** Corruption detected!"
else:
n_loss += 1
logfile.write("packet #{0} lost\n".format(packet_counter))
print "\t\tpacket #{0} lost".format(packet_counter)
packet_to_send = read_file.read(packet_length)
### Report execution time, and add it with the other results to the list.
file_transmission_time = time.time() - start_time
collectionOfTrials[trial] = { "delay": file_transmission_time, "loss": n_loss, "retx": n_retx }
logfile.write("\n\tdelay:\t{0} seconds\n\tloss:\t{1}\n\tretx:\t{2}\n".format(file_transmission_time, n_loss, n_retx))
print "\n\tdelay:\t{0} seconds\n\tloss:\t{1}\n\tretx:\t{2}".format(file_transmission_time, n_loss, n_retx)
### Insert row into DB
dataline = rows['id'] + ' | {"bkn":' + str(blocks_per_packet) + ', "pwr": ' + str(blocks_per_packet) + ', "mod": '+str(transmission_mode) + ' | {"0": {"delay": ' + str(file_transmission_time) + ', "loss": ' + str(n_loss) + ', "retx": '+ str(n_retx) + '}}\n'
text_file.write(dataline)
logfile.write('\n')
print '\n'
### Experiment done running ! ###
### TO-DO Send Result file to the server using POST method
### text_file.close()
### requests.post('http://apus.cs.ucla.edu/updateData.php', files={'Output.txt': open('Output.txt', 'rb')})
### Send exit status to the server
r = requests.post("http://apus.cs.ucla.edu/updateParams.php", data={'exid': rows['id'], 'exit_status': exit_code})
print(r.status_code, r.reason)
print "Experiment {0} done!".format(id)
logfile.write("Experiment {0} done!\n\n".format(id))
################################################################################
### 4. Cleanup
################################################################################
cursor.close()
cnx.close()
logfile.close()
exit(0)
| cjordog/NRLWebsite | demo/uw1.py | Python | mit | 14,884 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "anigma1.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| aniruddhasanyal/idea_pot | manage.py | Python | mit | 805 |
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import NumericProperty, ObjectProperty, BoundedNumericProperty, ListProperty
from .node import Node
from math import sqrt
class HexCanvas(FloatLayout):
last_node = ObjectProperty(None, allownone=True)
grid = ObjectProperty([])
row_count = BoundedNumericProperty(11, min=0, max=11)
column_count = BoundedNumericProperty(22, min=0, max=22)
vvhelix_id = NumericProperty(0)
scaffold_path = ListProperty([])
"""docstring for NanoCanvas"""
def __init__(self, **kwargs):
#super(HexCanvas, self).__init__(**kwargs)
super().__init__(**kwargs)
self.__construct()
def __construct(self):
x_start, y_start = 30, 30
a = 60
x_offset = a / 2
y_offset = a * sqrt(3) / 2
y = y_start
for j in range(self.row_count):
row = []
if j % 2 != 0:
offset = x_offset
else:
offset = 0
x = x_start + offset
for i in range(self.column_count):
node = Node(pos=(x, y), grid_id=(j, i))
row.append(node)
self.add_widget(node)
x += a
y += y_offset
self.grid.append(row)
def clean(self):
# TODO remove vhelixes and other stuff !!!
self.last_node = None
# for row in self.grid:
# for node in row:
# del node
self.grid = []
self.vvhelix_id = 0
self.scaffold_path = []
self.__construct()
| tls-dna/k-router | app_ui/hexcanvas.py | Python | mit | 1,599 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.develop")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv) | Slack06/yadg | manage.py | Python | mit | 1,359 |
from mnist import MNIST
import numpy as np
from thirdparty import log_mvnpdf, log_mvnpdf_diag
data = MNIST('./data/mnist')
data.load_training()
data.load_testing()
train = np.array(data.train_images)/255.0
test = np.array(np.array(data.test_images)/255.0)
dataset = {i: [] for i in range(10) }
for it, x in enumerate(data.train_labels):
dataset[x].append(train[it])
mu = []
cov = []
for k in dataset:
mu.append(np.average(np.array(dataset[k])))
cov.append(np.cov(np.array(dataset[k]).T))
es = log_mvnpdf(train, np.array(mu), np.array(cov))
results = {i: [] for i in range(10) }
for it,e in enumerate(es):
results[np.argmax(e)].append(data.train_labels[it])
print(results)
| m87/pyEM | lda.py | Python | mit | 696 |
from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from provider import StackExchangeProvider
urlpatterns = default_urlpatterns(StackExchangeProvider)
| HackerEarth/django-allauth | allauth/socialaccount/providers/stackexchange/urls.py | Python | mit | 178 |
from __future__ import print_function
from __future__ import unicode_literals
import time
from netmiko.ssh_connection import BaseSSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
from netmiko.ssh_exception import NetMikoTimeoutException, NetMikoAuthenticationException
import paramiko
import socket
class CiscoWlcSSH(BaseSSHConnection):
def establish_connection(self, sleep_time=3, verbose=True, timeout=8, use_keys=False):
'''
Establish SSH connection to the network device
Timeout will generate a NetmikoTimeoutException
Authentication failure will generate a NetmikoAuthenticationException
WLC presents with the following on login
login as: user
(Cisco Controller)
User: user
Password:****
Manually send username/password to work around this.
'''
# Create instance of SSHClient object
self.remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure appropriate for your environment)
self.remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection
if verbose:
print("SSH connection established to {0}:{1}".format(self.ip, self.port))
try:
self.remote_conn_pre.connect(hostname=self.ip, port=self.port,
username=self.username, password=self.password,
look_for_keys=use_keys, allow_agent=False,
timeout=timeout)
except socket.error as e:
msg = "Connection to device timed-out: {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.ip, port=self.port)
raise NetMikoTimeoutException(msg)
except paramiko.ssh_exception.AuthenticationException as e:
msg = "Authentication failure: unable to connect {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.ip, port=self.port)
msg += '\n' + str(e)
raise NetMikoAuthenticationException(msg)
# Use invoke_shell to establish an 'interactive session'
self.remote_conn = self.remote_conn_pre.invoke_shell()
# Handle WLCs extra
self.remote_conn.send(self.username + '\n')
time.sleep(.2)
self.remote_conn.send(self.password + '\n')
if verbose:
print("Interactive SSH session established")
# Strip the initial router prompt
time.sleep(sleep_time)
return self.remote_conn.recv(MAX_BUFFER)
def session_preparation(self):
'''
Prepare the session after the connection has been established
Cisco WLC uses "config paging disable" to disable paging
'''
self.disable_paging(command="config paging disable\n")
self.set_base_prompt()
def cleanup(self):
'''
Reset WLC back to normal paging
'''
self.send_command("config paging enable\n")
| enzzzy/netmiko | netmiko/cisco/cisco_wlc_ssh.py | Python | mit | 3,082 |
# coding:utf-8
# 测试多线程
import threading
import time
from utils import fn_timer
from multiprocessing.dummy import Pool
import requests
from utils import urls
# 耗时任务:听音乐
def music(name):
print 'I am listening to music {0}'.format(name)
time.sleep(1)
# 耗时任务:看电影
def movie(name):
print 'I am watching movie {0}'.format(name)
time.sleep(5)
# 单线程操作:顺序执行听10首音乐,看2部电影
@fn_timer
def single_thread():
for i in range(10):
music(i)
for i in range(2):
movie(i)
# 多线程执行:听10首音乐,看2部电影
@fn_timer
def multi_thread():
# 线程列表
threads = []
for i in range(10):
# 创建一个线程,target参数为任务处理函数,args为任务处理函数所需的参数元组
threads.append(threading.Thread(target = music,args = (i,)))
for i in range(2):
threads.append(threading.Thread(target = movie,args = (i,)))
for t in threads:
# 设为守护线程
t.setDaemon(True)
# 开始线程
t.start()
for t in threads:
t.join()
# 使用线程池执行:听10首音乐,看2部电影
@fn_timer
def use_pool():
# 设置线程池大小为20,如果不设置,默认值是CPU核心数
pool = Pool(20)
pool.map(movie,range(2))
pool.map(music,range(10))
pool.close()
pool.join()
# 应用:使用单线程下载多个网页的内容
@fn_timer
def download_using_single_thread(urls):
resps = []
for url in urls:
resp = requests.get(url)
resps.append(resp)
return resps
# 应用:使用多线程下载多个网页的内容
@fn_timer
def download_using_multi_thread(urls):
threads = []
for url in urls:
threads.append(threading.Thread(target = requests.get,args = (url,)))
for t in threads:
t.setDaemon(True)
t.start()
for t in threads:
t.join()
# 应用:使用线程池下载多个网页的内容
@fn_timer
def download_using_pool(urls):
pool = Pool(20)
# 第一个参数为函数名,第二个参数一个可迭代对象,为函数所需的参数列表
resps = pool.map(requests.get,urls)
pool.close()
pool.join()
return resps
def main():
# 测试单线程
# single_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:single_thread in 20.14s]
'''
# 测试多线程
# multi_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:multi_thread in 5.02s]
'''
# 测试线程池
# use_pool()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:use_pool in 6.12s]
'''
# 1.使用单线程
# resps = download_using_single_thread(urls)
# print len(resps)
# 输出:
'''
[finished function:download_using_single_thread in 6.18s]
20
'''
# 2. 使用多线程
# download_using_multi_thread(urls)
# 输出:
'''
[finished function:download_using_multi_thread in 0.73s]
'''
# 3.使用线程池
resps = download_using_pool(urls)
print len(resps)
# 输出:
'''
[finished function:download_using_pool in 0.84s]
20
'''
if __name__ == '__main__':
main()
| dnxbjyj/python-basic | concurrence/multi_threading.py | Python | mit | 4,446 |
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plot
import matplotlib.pylab
from matplotlib.backends.backend_pdf import PdfPages
import re
def drawPlots(data,plotObj,name,yLabel,position):
drawing = plotObj.add_subplot(position,1,position)
drawing.set_ylabel(yLabel, fontsize=16)
drawing.set_xlabel("Sample", fontsize=18)
drawing.plot(data[name], label = name)
drawing.legend(loc = 'upper center', bbox_to_anchor=(0.9, 1.128))
# drawing.legend(loc = 'upper center')
def drawXtremIOCharts():
xenvData = np.genfromtxt('xenvPerfStats.csv', dtype=float, delimiter=',', names=True)
xmsData = np.genfromtxt('xmsPerfStats.csv', dtype=float, delimiter=',', names=True)
plot.ioff()
iops = plot.figure(figsize=(20,15))
iops.suptitle("IOPs", fontsize=20)
iopsInit = len(iops.axes)
bw = plot.figure(figsize=(20,15))
bw.suptitle("Bandwidth MB/s", fontsize=20)
bwInit = len(bw.axes)
latency = plot.figure(figsize=(20,15))
latency.suptitle("Latency, MicroSec.", fontsize=20)
latencyInit = len(latency.axes)
xCpu = plot.figure(figsize=(20,15))
xCpu.suptitle("X-ENV Utilization", fontsize=20)
xCpuInit = len(xCpu.axes)
for name in xmsData.dtype.names:
if re.search('iops', name):
drawPlots(xmsData,iops,name,"IOPs",iopsInit+1)
if re.search('bandwidth', name):
drawPlots(xmsData,bw,name,"Bandwidth, MB/s", bwInit+1)
if re.search('latency', name):
drawPlots(xmsData,latency,name,"Latency, MicroSec", latencyInit+1)
for name in xenvData.dtype.names:
drawPlots(xenvData,xCpu,name,"% CPU Utilization", xCpuInit+1)
pdfDoc = PdfPages('XtremPerfcharts.pdf')
pdfDoc.savefig(iops)
pdfDoc.savefig(bw)
pdfDoc.savefig(latency)
pdfDoc.savefig(xCpu)
pdfDoc.close()
plot.close(iops)
plot.close(bw)
plot.close(latency)
plot.close(xCpu)
# plot.show()
def drawVolPerfCharts(vol):
volData = np.genfromtxt('%s.csv' % (vol), dtype=float, delimiter=',', names=True)
plot.ioff()
iops = plot.figure(figsize=(20,15))
iops.suptitle("IOPs", fontsize=20)
iopsInit = len(iops.axes)
bw = plot.figure(figsize=(20,15))
bw.suptitle("Bandwidth MB/s", fontsize=20)
bwInit = len(bw.axes)
latency = plot.figure(figsize=(20,15))
latency.suptitle("Latency, MicroSec.", fontsize=20)
latencyInit = len(latency.axes)
for name in volData.dtype.names:
if re.search('iops', name):
drawPlots(volData,iops,name,"IOPs",iopsInit+1)
if re.search('bandwidth', name):
drawPlots(volData,bw,name,"Bandwidth, MB/s", bwInit+1)
if re.search('latency', name):
drawPlots(volData,latency,name,"Latency, MicroSec", latencyInit+1)
pdfDoc = PdfPages('%s.pdf' %(vol))
pdfDoc.savefig(iops)
pdfDoc.savefig(bw)
pdfDoc.savefig(latency)
pdfDoc.close()
plot.close(iops)
plot.close(bw)
plot.close(latency)
def drawEsxCharts(hostname,storageHba):
pdfDoc = PdfPages('host_%s.pdf'%(hostname))
data = np.genfromtxt('%s.csv' %(hostname), dtype=float, delimiter=',', names=True)
# print data.dtype.names
cpu = plot.figure(figsize=(20,15))
cpu.suptitle("% CPU-Utilization", fontsize=20)
cpuInit = len(cpu.axes)
memory = plot.figure(figsize=(20,15))
memory.suptitle("% Memory Usage", fontsize=20)
memoryInit = len(memory.axes)
for name in data.dtype.names:
if re.match('CPU_Utilization', name):
plotName = '% CPU Util'
drawPlots(data,cpu,name,"% CPU Util",cpuInit+1)
if re.match('Memory_Usage', name):
plotName = '% Usage'
drawPlots(data,memory,name,"% Memory Usage", memoryInit+1)
for hba in storageHba:
hba_iops = plot.figure(figsize=(20,15))
hba_iops.suptitle("%s IOPs"%(hba), fontsize=20)
hbaIopsInit = len(hba_iops.axes)
hba_bw = plot.figure(figsize=(20,15))
hba_bw.suptitle("%s Bandwidth"%(hba), fontsize=20)
hbaBwInit = len(hba_bw.axes)
hba_latency = plot.figure(figsize=(20,15))
hba_latency.suptitle("%s Latency"%(hba), fontsize=20)
hbaLatencyInit = len(hba_latency.axes)
for name in data.dtype.names:
if re.search('Storage_adapter%s'%(hba), name) and re.search('requests_per_second', name):
plotName = '%s IOPs' %(hba)
drawPlots(data,hba_iops,name,"IOPs",hbaIopsInit+1)
if re.search('Storage_adapter%s'%(hba), name) and re.search(r'_rate_average', name):
plotName = 'Bandwidth Utilization'
drawPlots(data,hba_bw,name,"Bandwidth Utilization", hbaBwInit+1)
if re.search('Storage_adapter%s'%(hba), name) and re.search(r'_latency_average', name):
plotName = 'Latency'
drawPlots(data,hba_latency,name,"Latency (msec)", hbaLatencyInit+1)
pdfDoc.savefig(hba_latency)
pdfDoc.savefig(hba_iops)
pdfDoc.savefig(hba_bw)
pdfDoc.savefig(cpu)
pdfDoc.savefig(memory)
pdfDoc.close()
plot.close(hba_iops)
plot.close(hba_bw)
plot.close(hba_latency)
plot.close(cpu)
plot.close(memory)
# plot.show()
def main():
drawXtremIOCharts()
# data = np.genfromtxt('xtremPerfStats.csv', dtype=float, delimiter=',', names=True)
# print data.dtype.names
# iops = plot.figure()
# iopsInit = len(iops.axes)
# bw = plot.figure()
# bwInit = len(bw.axes)
# latency = plot.figure()
# latencyInit = len(latency.axes)
# xCpu = plot.figure()
# xCpuInit = len(xCpu.axes)
# for name in data.dtype.names:
# if re.search('iops', name):
# drawPlots(data,iops,name,"IOPs",iopsInit+1)
# if re.search('bandwidth', name):
# drawPlots(data,bw,name,"Bandwidth, MB/s", bwInit+1)
# if re.search('latency', name):
# drawPlots(data,latency,name,"Latency, MicroSec", latencyInit+1)
# if re.search('SC', name):
# drawPlots(data,xCpu,name,"% CPU Utilization", xCpuInit+1)
# plot.show()
if __name__ == '__main__':
main() | nachiketkarmarkar/XtremPerfProbe | generatePlots.py | Python | mit | 6,181 |
import argparse
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.feature_selection import RFECV
from sklearn.ensemble import RandomForestClassifier
from beveridge.models import ModelStorage
import pickle
parser = argparse.ArgumentParser(description="Create model from CSV stats data.")
parser.add_argument('file')
parser.add_argument('outfile')
args = parser.parse_args()
#Create DataFrame in Pandas
data = pd.read_csv(args.file)
#Drop team
del data['team']
#Cleanse to numeric data
data = data.apply(lambda x: pd.to_numeric(x, errors='coerce'))
#Delete any completely empty columns
data = data.dropna(axis=1, how='all')
#Delete any rows with empty values
data = data.dropna(axis=0, how='any')
#Set up some columns
data['home'] = data['home'].astype('bool')
data['win'] = data['win'].astype('bool')
#Build relative columns
data['relRebounds'] = data['rebounds'] / data['oppRebounds']
data['relDisposals'] = data['disposals'] / data['oppDisposals']
data['relKicks'] = data['kicks'] / data['oppKicks']
data['relHandballs'] = data['handballs'] / data['oppHandballs']
data['relClearances'] = data['clearances'] / data['oppClearances']
data['relHitouts'] = data['hitouts'] / data['oppHitouts']
data['relMarks'] = data['marks'] / data['oppMarks']
data['relInside50s'] = data['inside50s'] / data['oppInside50s']
data['relTackles'] = data['tackles'] / data['oppTackles']
data['relClangers'] = data['clangers'] / data['oppClangers']
data['relFrees'] = data['frees'] / data['oppFrees']
data['relContested'] = data['contested'] / data['oppContested']
data['relUncontested'] = data['uncontested'] / data['oppUncontested']
data['relContestedMarks'] = data['contestedMarks'] / data['oppContestedMarks']
data['relMarksIn50'] = data['marksIn50'] / data['oppMarksIn50']
data['relOnePercenters'] = data['onePercenters'] / data['oppOnePercenters']
data['relBounces'] = data['bounces'] / data['oppBounces']
#Try building a logistic regression model
print("Building initial logistic regression model.")
model = LogisticRegression()
#Only use the relative columns. I've tested with the absolute values and they are much less useful than relative.
trainColumns = pd.Series(['relRebounds', 'relDisposals', 'relKicks', 'relHandballs', 'relClearances', 'relHitouts', 'relMarks', 'relInside50s', 'relTackles', 'relClangers', 'relFrees', 'relContested', 'relUncontested', 'relContestedMarks', 'relMarksIn50', 'relOnePercenters', 'relBounces', 'home'])
model.fit(data[trainColumns], data['win'])
print("Training data accuracy: {:%}".format(model.score(data[trainColumns], data['win'])))
#Recursive feature selection with cross-validation
print("Running feature selection.")
fs = RFECV(model)
fs.fit(data[trainColumns], data['win'])
print("Accuracy after feature selection: {:%}".format(fs.score(data[trainColumns], data['win'])))
filteredColumns = trainColumns[fs.support_]
#Ignoring filtered columns for the random forest. Seems to produce better results
#Create a random forest model
print("Building random forest")
rf = RandomForestClassifier(n_estimators=100, min_samples_split=0.02, class_weight='balanced')
rf.fit(data[trainColumns], data['win'])
print("Random forest accuracy: {:%}".format(rf.score(data[trainColumns], data['win'])))
#Save random forest model to given filename
with open(args.outfile, 'wb') as file:
storage = ModelStorage(trainColumns, rf)
pickle.dump(storage, file) | bairdj/beveridge | src/create_model.py | Python | mit | 3,408 |
import numpy as np
from scipy.io import netcdf_file
import bz2
import os
from fnmatch import fnmatch
from numba import jit
@jit
def binsum2D(data, i, j, Nx, Ny):
data_binned = np.zeros((Ny,Nx), dtype=data.dtype)
N = len(data)
for n in range(N):
data_binned[j[n],i[n]] += data[n]
return data_binned
class LatLonAggregator(object):
"""A class for aggregating L2 data into a gridded dataset."""
def __init__(self, dlon=1., dlat=1., lonlim=(-180,180), latlim=(-90,90)):
self.dlon = dlon
self.dlat = dlat
self.lonmin = lonlim[0]
self.lonmax = lonlim[1]
self.latmin = latlim[0]
self.latmax = latlim[1]
# define grids
self.lon = np.arange(self.lonmin, self.lonmax, dlon)
self.lat = np.arange(self.latmin, self.latmax, dlat)
self.Nx, self.Ny = len(self.lon), len(self.lat)
self.lonc = self.lon + self.dlon/2
self.latc = self.lat + self.dlat/2
def binsum(self, data, lon, lat):
"""Bin the data into the lat-lon grid.
Returns gridded dataset."""
i = np.digitize(lon.ravel(), self.lon)
j = np.digitize(lat.ravel(), self.lat)
return binsum2D(data.ravel(), i, j, self.Nx, self.Ny)
def zeros(self, dtype=np.dtype('f4')):
return np.zeros((self.Ny, self.Nx), dtype=dtype)
| rabernat/satdatatools | satdatatools/aggregator.py | Python | mit | 1,349 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Stardust Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test spending coinbase transactions.
# The coinbase transaction in block N can appear in block
# N+100... so is valid in the mempool when the best block
# height is N+99.
# This test makes sure coinbase spends that will be mature
# in the next block are accepted into the memory pool,
# but less mature coinbase spends are NOT.
#
from test_framework.test_framework import StardustTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolSpendCoinbaseTest(StardustTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
self.setup_clean_chain = False
def setup_network(self):
# Just need one node for this test
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.is_network_split = False
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
node0_address = self.nodes[0].getnewaddress()
# Coinbase at height chain_height-100+1 ok in mempool, should
# get mined. Coinbase at height chain_height-100+2 is
# is too immature to spend.
b = [ self.nodes[0].getblockhash(n) for n in range(101, 103) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spends_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.99) for txid in coinbase_txids ]
spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
# coinbase at height 102 should be too immature to spend
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, spends_raw[1])
# mempool should have just spend_101:
assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])
# mine a block, spend_101 should get confirmed
self.nodes[0].generate(1)
assert_equal(set(self.nodes[0].getrawmempool()), set())
# ... and now height 102 can be spent:
spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1])
assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ])
if __name__ == '__main__':
MempoolSpendCoinbaseTest().main()
| ctwiz/stardust | qa/rpc-tests/mempool_spendcoinbase.py | Python | mit | 2,474 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-24 16:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('product', '0009_auto_20170323_1823'),
]
operations = [
migrations.CreateModel(
name='Sale',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sale_date', models.DateTimeField()),
],
),
migrations.AddField(
model_name='product',
name='product_price',
field=models.FloatField(default=0.0),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='product_qtd',
field=models.IntegerField(default=1),
preserve_default=False,
),
migrations.AddField(
model_name='sale',
name='product',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='product.Product'),
),
migrations.AddField(
model_name='sale',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| rodrigocnascimento/django-teste | product/migrations/0010_auto_20170324_1304.py | Python | mit | 1,504 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from elftools.elf.elffile import ELFFile
from elftools.common.exceptions import ELFError
from elftools.elf.segments import NoteSegment
class ReadELF(object):
def __init__(self, file):
self.elffile = ELFFile(file)
def get_build(self):
for segment in self.elffile.iter_segments():
if isinstance(segment, NoteSegment):
for note in segment.iter_notes():
print note
def main():
if(len(sys.argv) < 2):
print "Missing argument"
sys.exit(1)
with open(sys.argv[1], 'rb') as file:
try:
readelf = ReadELF(file)
readelf.get_build()
except ELFError as err:
sys.stderr.write('ELF error: %s\n' % err)
sys.exit(1)
if __name__ == '__main__':
main()
| somat/samber | elf.py | Python | mit | 861 |
from urllib.parse import urlparse
from django.conf import settings
from django.core.files.storage import default_storage
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from api.applications.models import ShowApplicationSettings
from api.shows.models import upload_to_show_cover, upload_to_show_banner
def verify_origin(request):
origin = request.META.get("HTTP_ORIGIN")
return origin in settings.CORS_ORIGIN_WHITELIST
@csrf_exempt
def upload_cover(request):
if not verify_origin(request):
return JsonResponse({'success': False, 'message': "Invalid origin"})
cfg = ShowApplicationSettings.get_solo()
if not cfg.applications_open:
return JsonResponse({'success': False, 'message': "Show applications not open."})
if request.method == 'POST':
file = request.FILES['file']
filename = upload_to_show_cover(None, file.name)
default_storage.save(filename, file)
return JsonResponse({'success': True, 'filename': filename})
@csrf_exempt
def upload_banner(request):
if not verify_origin(request):
return JsonResponse({'success': False, 'message': "Invalid origin"})
cfg = ShowApplicationSettings.get_solo()
if not cfg.applications_open:
return JsonResponse({'success': False, 'message': "Show applications not open."})
if request.method == 'POST':
file = request.FILES['file']
filename = upload_to_show_banner(None, file.name)
default_storage.save(filename, file)
return JsonResponse({'success': True, 'filename': filename})
| urfonline/api | api/applications/views.py | Python | mit | 1,609 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
##############################################
# The MIT License (MIT)
# Copyright (c) 2018 Kevin Walchko
# see LICENSE for full details
##############################################
from pygecko.multiprocessing import geckopy
from pygecko.multiprocessing import GeckoSimpleProcess
from pygecko.transport.protocols import MsgPack, MsgPackCustom
import time
from math import cos, pi
def pub(**kwargs):
geckopy.init_node(**kwargs)
rate = geckopy.Rate(2)
p = geckopy.pubBinderTCP("local", "bob")
if (p == None):
print("ERROR setting up publisher")
return
cnt = 0
while not geckopy.is_shutdown():
# msg = "hi" + str(cnt)
msg = [pi, cos(pi), cos(pi/2,)]
p.publish(msg)
print("sent")
rate.sleep()
cnt += 1
def sub(**kwargs):
geckopy.init_node(**kwargs)
rate = geckopy.Rate(2)
s = geckopy.subConnectTCP("local", "bob")
if (s == None):
print("ERROR setting up subscriber")
return
cnt = 0
while not geckopy.is_shutdown():
data = s.recv_nb()
print("sub:", data)
rate.sleep()
if __name__ == '__main__':
args = {}
p = GeckoSimpleProcess()
p.start(func=pub, name='pub', kwargs=args)
s = GeckoSimpleProcess()
s.start(func=sub, name='sub', kwargs=args)
| walchko/pygecko | dev/cpp-simple/subpub.py | Python | mit | 1,370 |
from shinymud.lib.world import World
import json
world = World.get_world()
def to_bool(val):
"""Take a string representation of true or false and convert it to a boolean
value. Returns a boolean value or None, if no corresponding boolean value
exists.
"""
bool_states = {'true': True, 'false': False, '0': False, '1': True}
if not val:
return None
if isinstance(val, bool):
return val
val = str(val)
val = val.strip().lower()
return bool_states.get(val)
def read_dict(val):
# val is a string like "foo=bar,name=fred"
# return {'foo':'bar', 'name':'fred'}
return dict([thing.split('=') for thing in val.split(',')])
def write_dict(val):
return ",".join('='.join([str(k),str(v)]) for k,v in val.items())
def copy_dict(val):
return dict(val.items())
def read_list(val):
if isinstance(val, list):
return val
if not val:
return []
return val.split(',')
def write_list(val):
if not val:
return None
return ','.join(map(str,val))
def copy_list(val):
return val[:]
def read_area(val):
if isinstance(val, basestring):
return world.get_area(val)
return val
def write_area(val):
if isinstance(val, basestring):
return val
return val.name
def read_merchandise(val):
return [read_dict(each) for each in val.split('<>')]
def write_merchandise(val):
lst = []
for dicts in val:
if dicts.get('keywords'):
del dicts['keywords']
lst.append(write_dict(dicts))
return '<>'.join(lst)
def read_json(val):
return json.loads(val)
def write_json(val):
return json.dumps(val)
def write_model(val):
if isinstance(val, int):
return val
return val.dbid
def read_int_dict(val):
d = {}
if val:
for a in val.split(','):
key, val = a.split('=')
d[key] = int(val)
return d
def write_int_dict(val):
s = []
if val:
for key, val in val.items():
s.append("%s=%s" % (str(key), str(val)))
return ",".join(s)
def read_damage(val):
dmg = []
if val:
for d in val.split('|'):
dmg.append(Damage(d))
return dmg
def write_damage(val):
return '|'.join([str(d) for d in val])
def read_channels(val):
d = {}
for pair in val.split(','):
k,v = pair.split('=')
d[k] = to_bool(v)
return d
def read_location(val):
#loc == 'area,id'
loc = val.split(',')
return world.get_location(loc[0], loc[1])
def write_location(val):
if val:
return '%s,%s' % (val.area.name, val.id)
return None
def read_int(val):
try:
r = int(val)
except ValueError:
r = 0
return r
def read_float(val):
try:
r = float(val)
except ValueError:
r = 0.0
return r
| shinymud/ShinyMUD | src/shinymud/models/shiny_types.py | Python | mit | 2,852 |
GPIO_HUB_RST_N = 30
GPIO_UBLOX_RST_N = 32
GPIO_UBLOX_SAFEBOOT_N = 33
GPIO_UBLOX_PWR_EN = 34
GPIO_STM_RST_N = 124
GPIO_STM_BOOT0 = 134
def gpio_init(pin, output):
try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
f.write(b"out" if output else b"in")
except Exception as e:
print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high):
try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to set gpio {pin} value: {e}")
| vntarasov/openpilot | common/gpio.py | Python | mit | 568 |
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk
import os
from subprocess import call, Popen, PIPE, STDOUT
class GtkPassWindow(Gtk.Window):
def __init__(self):
self.search_text = ''
self.search_result_text = ''
self.get_pass_path()
self.build_gui()
self.build_data_structures()
def get_pass_path(self):
self.pass_path = os.path.expanduser('~/.password-store')
def build_gui(self):
Gtk.Window.__init__(self, title='pass')
self.set_border_width(10)
self.set_default_size(300, -1)
self.text_view = Gtk.Entry()
self.text_view.set_editable(False)
self.text_view.set_can_focus(False)
self.text_entry = Gtk.Entry()
self.text_entry.connect('key-release-event', self.on_key_release)
self.text_entry.connect('activate', self.on_activate)
self.text_entry.set_icon_from_icon_name(Gtk.EntryIconPosition.PRIMARY,
'system-search-symbolic')
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.box.pack_start(self.text_view, True, True, 0)
self.box.pack_start(self.text_entry, True, True, 0)
self.add(self.box)
self.text_entry.grab_focus()
def build_data_structures(self):
self.pass_list = []
for root, dirs, files in os.walk(self.pass_path):
for file_ in files:
file_ = os.path.join(root, file_)
if os.path.splitext(file_)[1] == '.gpg':
pass_list_item = os.path.relpath(file_, self.pass_path)
pass_list_item = os.path.splitext(pass_list_item)[0]
self.pass_list.append(pass_list_item)
def fuzzy_find(self):
env = os.environ.copy()
fzf_bin = os.path.expanduser('~/.fzf/bin')
if fzf_bin not in env['PATH']:
env['PATH'] += '{}:{}'.format(env['PATH'], fzf_bin)
p = Popen(['fzf', '-f', self.search_text], env=env,
stdin=PIPE, stdout=PIPE, stderr=STDOUT)
fzf_in = '\n'.join(self.pass_list).encode('utf-8')
return p.communicate(fzf_in)[0].decode().strip().split('\n')
def on_key_release(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
Gtk.main_quit()
self.search_text = self.text_entry.get_text().strip()
if self.search_text == '':
self.search_result_text = None
else:
search_result = self.fuzzy_find()
if search_result == []:
self.search_result_text = None
else:
self.search_result_text = search_result[0]
if self.search_result_text:
self.text_view.set_text(self.search_result_text)
else:
self.text_view.set_text('')
def on_button_release(self, widget, event):
self.copy_to_clipboard()
def on_activate(self, event):
self.copy_to_clipboard()
def copy_to_clipboard(self):
if self.search_result_text:
p = call(['pass', '-c', self.search_result_text])
self.text_entry.set_icon_from_icon_name(
Gtk.EntryIconPosition.SECONDARY,
'edit-paste-symbolic')
def main():
win = GtkPassWindow()
win.connect('delete-event', Gtk.main_quit)
win.show_all()
Gtk.main()
if __name__ == '__main__':
main()
| raghavsub/gtkpass | gtkpass/main.py | Python | mit | 3,474 |
#!/usr/bin/env python
import re
from operator import itemgetter
ref_file = open('../../../data/RNA-seq_miR-124_miR-155_transfected_HeLa/gene_exp_miR-155_overexpression_RefSeq_Rep_isoforms.diff','r')
input_file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSeq_NM_2015-07-30.txt','r')
output_file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSeq_NM_2015-07-30_miR-155_overexpression.result','w')
ref_dict = {}
header = ''
for line in ref_file:
line = line.rstrip()
data = line.split("\t")
if data[0] == 'gr_id':
header = line
continue
refid = data[2]
ref_dict[refid] = line
for line in input_file:
line = line.rstrip()
data = line.split("\t")
if data[0] == 'miRNA_name_id':
print(header,line, sep="\t",end="\n",file=output_file)
continue
refid = data[1]
if refid in ref_dict:
print(ref_dict[refid],line, sep="\t",end="\n",file=output_file)
ref_file.close()
input_file.close()
output_file.close()
| Naoto-Imamachi/MIRAGE | scripts/module/preparation/AA2_add_miRNA_infor_miR-155_rev_seed.py | Python | mit | 1,046 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 11 20:47:53 2017
@author: fernando
"""
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
plt.style.use('ggplot')
df = pd.read_csv("/home/fernando/CoursePythonDS/DAT210x/Module3/Datasets/wheat.data")
print df.describe()
df[df.groove>5].asymmetry.plot.hist(alpha=0.3, normed=True)
df[df.groove<=5].asymmetry.plot.hist(alpha=0.5, normed=True)
plt.show() | FernanOrtega/DAT210x | Module3/notes/histogram_example.py | Python | mit | 446 |
import numpy as np
from tfs.core.util import run_once_for_each_obj
from tfs.core.initializer import DefaultInit
from tfs.core.loss import DefaultLoss
from tfs.core.regularizers import DefaultRegularizer
from tfs.core.monitor import DefaultMonitor
from tfs.core.optimizer import DefaultOptimizer
from tfs.core.layer import func_table,Layer
from tfs.core.elem import Component
from tfs.core.layer import ops
import pickle
import tensorflow as tf
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.client import device_lib
from sklearn import metrics
# for supporting multi-gpu:
# https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L174
#
# we use shared variables on CPU and model distributed on each GPU
from tfs.network.net_struct import NetStructure
#################### Network
# decorators
def with_graph(f):
def with_graph_run(self,*args,**kwargs):
with self.graph.as_default():
return f(self,*args,**kwargs)
# this is important to make the decorator compatiable with run_once_each_obj.
with_graph_run.__name__=f.__name__
return with_graph_run
class Network(object):
__hash__=object.__hash__
def __init__(self):
self._init_graph_sess()
self._struct = NetStructure(self)
self._true_out=None
self._in = None
self._out = None
self._loss=None
self.variables = {}
self.initializer = DefaultInit(self)
self.losser = DefaultLoss(self)
self.regularizer =DefaultRegularizer(self)
self.monitor = {}
self.monitor['default']=DefaultMonitor(self)
self._optimizer = DefaultOptimizer(self)
# this must be set when define a network
self.loss_input_layer_name = None
self._regulization=None
self.grads = None
self._train_op = None
self.num_gpu = 0
self.i_step = 0
self.n_epoch = 0
self._dtype = None
def to_pickle(self):
return [
self.in_shape,
self.loss_input_layer_name,
self.optimizer.to_pickle(),
self.losser.to_pickle(),
self.regularizer.to_pickle()
]
def restore(self,objs):
inshape = objs[0]
self.loss_input_layer_name = objs[1]
self.optimizer = Component.restore(objs[2],self)
self.losser = Component.restore(objs[3],self)
self.regularizer = Component.restore(objs[4],self)
if inshape:
self.build(inshape)
def _init_graph_sess(self):
self._graph = tf.Graph()
with self.graph.as_default():
self._sess = tf.Session()
@property
def optimizer(self):
return self._optimizer
@optimizer.setter
def optimizer(self,opt):
self.grads=None
self._optimizer=opt
def add_monitor(self,name,monitor):
self.monitor[name] = monitor
@staticmethod
def available_devices():
local_device_protos = device_lib.list_local_devices()
return [x for x in local_device_protos]
def __len__(self):
return len(self.net_def)
@property
@deprecated("2017-05-01", "Use `net_def` instead.")
def layers(self):
return self._struct
@property
def nodes(self):
return self._struct
@property
def net_def(self):
return self._struct
def node_to_index(self,l):
return self.net_def.find_index(l)
def node_by_index(self,idx):
return self.net_def[idx]
@deprecated("2017-05-01", "Use `node_by_name` instead.")
def layer_by_name(self,name):
return self.net_def.by_name(name)
def node_by_name(self,name):
return self.net_def.by_name(name)
def __del__(self):
self.sess.close()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def setup_with_def(self,struct_def,in_shape=None):
if isinstance(struct_def,list):
struct_def = NetStructure(self,nodes=struct_def)
self._struct = struct_def.copy_to(self)
if in_shape:
self.build(in_shape)
@property
def graph(self):
return self._graph
@property
def input(self):
return self._in
@property
def output(self):
return self._out
@property
def true_output(self):
return self._true_out
@property
def sess(self):
return self._sess
def _init_in_out_size(self):
if self.num_gpu and self._in is None and self._out is None:
self._in = [None]*self.num_gpu
self._out = [None]*self.num_gpu
self._true_out = [None]*self.num_gpu
self._loss = [None]*self.num_gpu
def tf_graph_str(self):
info=[]
for n in self.graph.as_graph_def().node:
s = '%-20s@%20s'%(n.name,n.device)
if hasattr(n,'tfs_nodename'):
s=s+' --%s'%n.tfs_nodename
info.append(s)
return '\n'.join(info)
@with_graph
@run_once_for_each_obj
def build(self,input_shape,dtype=tf.float32):
self._dtype = dtype
"""Build the computational graph
inTensor: the network input tensor.
"""
if not self.num_gpu:
self._build(input_shape,dtype)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
self._build(input_shape,dtype,i)
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
tower_grads.append(_grad)
self.build_variables_table()
self._initialize()
self.compute_gradients()
return self.output
def compute_gradients(self):
if self.loss is None:
return
if not self.num_gpu:
self.grads = self.optimizer.compute_gradients(self.loss,self.variables)
else:
tower_grads = []
for i in range(self.num_gpu):
with tf.device('/gpu:%d' % i):
with tf.name_scope('%s_%d' % ('GPU', i)) as scope:
tf.get_variable_scope().reuse_variables()
_loss = self.loss[i]
_grad = self.optimizer.compute_gradients(_loss,self.variables.values())
tower_grads.append(_grad)
self.grads = self.average_gradients(tower_grads)
def average_gradients(self,tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
# this function is called only in build() under current graph.
def _build(self,input_shape,dtype,idx=None):
self._init_in_out_size()
tmp = tf.placeholder(dtype,input_shape)
if idx is None:
self._in = tmp
else:
self._in[idx] = tmp
for l in self.net_def:
tmp = l.build(tmp,idx)
if idx is None:
self._out = tmp
output_shape=self._out.get_shape().as_list()
output_dtype=self._out.dtype
self._true_out=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss = self._compute_loss(idx)
else:
self._out[idx] = tmp
output_shape=self._out[idx].get_shape().as_list()
output_dtype=self._out[idx].dtype
self._true_out[i]=tf.placeholder(dtype=output_dtype,shape=output_shape)
self._loss[idx] = self._compute_loss(idx)
return self
def _initialize(self):
self.run_initor(self.initializer)
def _compute_loss(self,idx):
loss = self.losser.compute(idx)
if loss is None:
return loss
return loss + self.regularizer.compute()
@property
def loss(self):
return self._loss
def build_variables_table(self):
for l in self.net_def:
for k in l.variables:
v = l.variables[k]
self.variables[v.name] = v
def has_built(self):
if hasattr(self,'_has_run'):
if Network.build.__name__ in self._has_run:
return True
return False
def fit(self,dataset,batch_size,n_epoch,
shuffle_epoch=True,max_step=10000000):
if dataset.train.labels.shape[-1] != self.out_shape[-1]:
dataset = dataset.to_one_hot()
train_set = dataset.train
test_set = dataset.test
train_set.before_iter()
self.i_step = 0
self.n_epoch = 0
while True:
self.i_step += 1
self.n_epoch = train_set.epochs_completed
X,y = train_set.next_batch(batch_size,shuffle=shuffle_epoch)
self.step(X,y,self.i_step)
for v in self.monitor.values():
v.status(train_set,test_set,self.i_step,self.n_epoch)
if self.n_epoch>=n_epoch:
break
if self.i_step >= max_step:
break
return self
@property
def train_op(self):
if self._train_op is None:
self._train_op = self._get_train_op()
return self._train_op
@with_graph
def _get_train_op(self,step=None):
if self.loss is None:
return None
if self.grads is None:
self.compute_gradients()
op = self.optimizer.apply_gradients(self.grads,step)
# initialize the uninitalized variable (the optimizer would introduce
# uninitalized variable)
vars = self.optimizer.variables
self.run(tf.variables_initializer(vars.values()))
return op
def step(self,X,y,step):
self.run(self.train_op,feed_dict={self.input:X,self.true_output:y})
def predict(self,X):
if self.num_gpu==0:
_in = self.input
_out = self.output
else:
_in = self.input[0]
_out = self.output[0]
return self.run(_out,feed_dict={_in:X})
def eval_node_input(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).input
else:
_out = node.input
return self.run(_out,feed_dict={_in:X})
def eval_node(self,node,X):
_in = self.input
if isinstance(node,str):
_out = self.node_by_name(node).output
else:
_out = node.output
return self.run(_out,feed_dict={_in:X})
def function(self,input_tensors,output_tensors):
def _func(input_vals):
feed = {t:v in zip(input_vals,input_tensors)}
return self.run(output_tensors,feed_dict=feed)
return _func
def score(self,datasubset):
y_pred = self.predict(datasubset.data)
y_pred = np.argmax(y_pred,1)
y_true = datasubset.labels
y_true = np.argmax(y_true,1)
return metrics.accuracy_score(y_true,y_pred)
def measure_loss(self,X,y):
if self.num_gpu==0:
_in = self.input
_true_out = self.true_output
_loss = self.loss
else:
_in = self.input[0]
_true_out = self.true_output[0]
_loss = self.loss[0]
return self.run(_loss,feed_dict={_in:X,_true_out:y})
def run(self,eval_list,feed_dict=None):
return self.sess.run(eval_list, feed_dict=feed_dict)
def run_initor(self,initor):
op = initor.compute()
return self.sess.run(op)
def save(self,filename):
self.save_def(filename)
to_save={}
for k,v in self.variables.items():
to_save[k]=self.run(v)
f=open(filename+'.model','wb')
pickle.dump(to_save,f)
f.close()
def save_def(self,filename):
self.net_def.save(filename+'.modeldef')
def load(self,filename):
self._init_graph_sess()
self.load_def(filename)
f=open(filename+'.model','rb')
data_dict=pickle.load(f)
f.close()
if self.has_built():
with self._graph.as_default():
op = self.initializer.op_by_value_table(data_dict)
self.run(op)
def load_def(self,filename):
self.net_def.load(filename+'.modeldef')
@property
def in_shape(self):
if self._in is not None:
if self.num_gpu==0:
return self._in.get_shape().as_list()
else:
return self._in[0].get_shape().as_list()
return None
@property
def dtype(self):
return self._dtype
@property
def out_shape(self):
if self._out is not None:
if self.num_gpu==0:
return self._out.get_shape().as_list()
else:
return self._out[0].get_shape().as_list()
return None
def copy(self):
obj = Network()
obj.loss_input_layer_name = self.loss_input_layer_name
obj.setup_with_def(self.net_def,self.in_shape)
return obj
def __str__(self):
return '\n'.join([str(l) for l in self.nodes])
def print_shape(self):
for l in self.nodes:
print('%-20s %20s %s %-20s'%(
l.name,
l.input.get_shape(),
'->',
l.output.get_shape()))
def subnet(self,begin_index,end_index):
obj = Network()
obj.setup_with_def(self.layers[begin_index:end_index])
return obj
def supported_layers(self):
return func_table.keys()
def conv2d(self,
ksize,
knum,
strides,
activation=ops.relu,
padding='SAME',
group=1,
biased=True,
name=None):
self.net_def.append(
func_table['conv2d'](
self,ksize,knum,strides,activation,padding,group,biased,name
))
return self
def fc(self,
outdim,
activation = ops.relu,
name=None):
self.net_def.append(
func_table['fc'](
self,outdim,activation,name
))
return self
def dropout(self,
keep_prob,
name=None):
self.net_def.append(
func_table['dropout'](
self,keep_prob,name
))
return self
def lrn(self,
radius,
alpha,
beta,
bias=1.0,
name=None):
self.net_def.append(
func_table['lrn'](
self,radius,alpha,beta,bias,name
))
return self
def bn(self,
scale_offset=True,
activation=ops.relu,
name=None):
self.net_def.append(
func_table['bn'](
self,scale_offset,activation,name
))
return self
def softmax(self,
name=None):
self.net_def.append(
func_table['softmax'](
self,name
))
return self
def maxpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['maxpool'](
self,ksize,strides,padding,name
))
return self
def avgpool(self,
ksize,
strides,
padding='SAME',
name=None):
self.net_def.append(
func_table['avgpool'](
self,ksize,strides,padding,name
))
return self
class CustomNetwork(Network):
"""Automatically called setup and build when construct
"""
def __init__(self):
Network.__init__(self)
self.default_in_shape = None
self.setup()
in_shape = self.default_in_shape
if not in_shape:
raise ValueError("must sepecify the default_in_shape attributes, or pass the shape as an argument when construction")
def setup(self):
raise NotImplementedError("CustomNetwork Must Implement setup Method")
def build(self,inshape=None):
inshape = inshape or self.default_in_shape
return Network.build(self,inshape)
| crackhopper/TFS-toolbox | tfs/network/base.py | Python | mit | 15,008 |
"""
"""
import unittest
import arcpy
from gsfarc.test import config
class TestDataTypeStringArray(unittest.TestCase):
"""Tests the string array task datatype"""
@classmethod
def setUpClass(cls):
config.setup_idl_toolbox('test_datatype_stringarray','qa_idltaskengine_datatype_stringarray')
@classmethod
def tearDownClass(cls):
pass
def test_datatype_stringarray_one_dimension(self):
"""Verify a one dimensional array of strings returns a semicolon separated string list."""
input = ['foo', 'bar', 'baz']
expect_dims = [len(input)]
result = arcpy.QA_IDLTaskEngine_DataType_StringArray_GSF(input, expect_dims)
self.assertEqual(result[0], ';'.join(str(i) for i in input))
if __name__ == '__main__':
unittest.main() | geospatial-services-framework/gsfpyarc | gsfarc/test/test_datatype_stringarray.py | Python | mit | 803 |
# Import 'datasets' from 'sklearn'
import numpy as np
from sklearn import datasets
from sklearn.decomposition import PCA, RandomizedPCA
import matplotlib.pyplot as plt
# Load in the 'digits' data
digits = datasets.load_digits()
# Print the 'digits' data
print(digits)
# Print the keys
print(digits.keys)
# Print out the data
print(digits.data)
# Print out the target
print(digits.target)
# Print out the description of 'digits' data
print(digits.DESCR)
# Get the digits data
digits_data = digits.data
# Print the digits data
print(digits_data.shape)
# Get the target digits
digits_target = digits.target
# Print target data shape
print(digits_target.shape)
# Get the number of unique labels
number_digits = len(np.unique(digits_target))
# Print unique values
print(number_digits)
# Isolate the 'images'
digits_images = digits.images
# Inspect the shape
print(digits_images.shape)
# Figure size (width, height) in inches
fig = plt.figure(figsize=(6, 6))
# Adjust the subplots
fig.subplots_adjust(left=0, right=1, bottom=0, top=1, hspace=0.05, wspace=0.05)
# For each of the 64 images
for i in range(64):
# Initialize the subplots: add a subplot in the grid of 8 by 8, at the i+1-th position
ax = fig.add_subplot(8, 8, i + 1, xticks=[], yticks=[])
# Display an image at the i-th position
ax.imshow(digits.images[i], cmap=plt.cm.binary, interpolation='nearest')
# label the image with the target value
ax.text(0, 7, str(digits.target[i]))
# Show the plot
plt.show()
# Create a Randomized PCA model that takes two components
randomized_pca = RandomizedPCA(n_components=2)
# Fit and transform the data to the model
reduced_data_rpca = randomized_pca.fit_transform(digits.data)
# Create a regular PCA model
pca = PCA(n_components=2)
# Fit and transform the data to the model
reduced_data_pca = pca.fit_transform(digits.data)
# Inspect the shape
print(reduced_data_pca.shape)
# Print out the data
print(reduced_data_rpca)
print(reduced_data_pca)
colors = ['black', 'blue', 'purple', 'yellow', 'white', 'red', 'lime', 'cyan', 'orange', 'gray']
for i in range(len(colors)):
x = reduced_data_rpca[:, 0][digits.target == i]
y = reduced_data_rpca[:, 1][digits.target == i]
plt.scatter(x, y, c=colors[i])
plt.legend(digits.target_names, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.xlabel('First Principal Component')
plt.ylabel('Second Principal Component')
plt.title("PCA Scatter Plot")
plt.show()
| monal94/digits-scikit-learn | main.py | Python | mit | 2,459 |
import os
import datetime
import logging
ORDER = 999
POSTS_PATH = 'posts/'
POSTS = []
from django.template import Context
from django.template.loader import get_template
from django.template.loader_tags import BlockNode, ExtendsNode
def getNode(template, context=Context(), name='subject'):
"""
Get django block contents from a template.
http://stackoverflow.com/questions/2687173/
django-how-can-i-get-a-block-from-a-template
"""
for node in template:
if isinstance(node, BlockNode) and node.name == name:
return node.render(context)
elif isinstance(node, ExtendsNode):
return getNode(node.nodelist, context, name)
raise Exception("Node '%s' could not be found in template." % name)
def preBuild(site):
global POSTS
# Build all the posts
for page in site.pages():
if page.path.startswith(POSTS_PATH):
# Skip non html posts for obious reasons
if not page.path.endswith('.html'):
continue
# Find a specific defined variable in the page context,
# and throw a warning if we're missing it.
def find(name):
c = page.context()
if not name in c:
logging.info("Missing info '%s' for post %s" % (name, page.path))
return ''
return c.get(name, '')
# Build a context for each post
postContext = {}
postContext['title'] = find('title')
postContext['author'] = find('author')
postContext['date'] = find('date')
postContext['path'] = page.path
postContext['body'] = getNode(get_template(page.path), name="body")
# Parse the date into a date object
try:
postContext['date'] = datetime.datetime.strptime(postContext['date'], '%d-%m-%Y')
except Exception, e:
logging.warning("Date format not correct for page %s, should be dd-mm-yy\n%s" % (page.path, e))
continue
POSTS.append(postContext)
# Sort the posts by date
POSTS = sorted(POSTS, key=lambda x: x['date'])
POSTS.reverse()
indexes = xrange(0, len(POSTS))
for i in indexes:
if i+1 in indexes: POSTS[i]['prevPost'] = POSTS[i+1]
if i-1 in indexes: POSTS[i]['nextPost'] = POSTS[i-1]
def preBuildPage(site, page, context, data):
"""
Add the list of posts to every page context so we can
access them from wherever on the site.
"""
context['posts'] = POSTS
for post in POSTS:
if post['path'] == page.path:
context.update(post)
return context, data | Knownly/webcrafters.knownly.net | plugins/blog.py | Python | mit | 2,349 |
from __future__ import print_function
import os
import sys
import json
if __name__=='__main__':
SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(SRC_ROOT)
from datetime import datetime
import requests
from jinja2 import Template
from jinja2 import Environment, PackageLoader
from utils.msg_util import *
from github_issues.md_translate import translate_for_github
from github_issues.milestone_helper import MilestoneHelper
from github_issues.label_helper import LabelHelper
import csv
from settings.base import get_github_auth, REDMINE_SERVER
import pygithub3
class GithubIssueMaker:
"""
Given a Redmine issue in JSON format, create a GitHub issue.
These issues should be moved from Redmine in order of issue.id. This will allow mapping of Redmine issue ID's against newly created Github issued IDs. e.g., can translate related issues numbers, etc.
"""
ISSUE_STATE_CLOSED = 'closed'
def __init__(self, user_map_helper=None, label_mapping_filename=None, milestone_mapping_filename=None):
self.github_conn = None
self.comments_service = None
self.milestone_manager = MilestoneHelper(milestone_mapping_filename)
self.label_helper = LabelHelper(label_mapping_filename)
self.jinja_env = Environment(loader=PackageLoader('github_issues', 'templates'))
self.user_map_helper = user_map_helper
def get_comments_service(self):
if self.comments_service is None:
self.comments_service = pygithub3.services.issues.Comments(**get_github_auth())
return self.comments_service
def get_github_conn(self):
if self.github_conn is None:
self.github_conn = pygithub3.Github(**get_github_auth())
return self.github_conn
def format_name_for_github(self, author_name, include_at_sign=True):
"""
(1) Try the user map
(2) If no match, return the name
"""
if not author_name:
return None
if self.user_map_helper:
github_name = self.user_map_helper.get_github_user(author_name, include_at_sign)
if github_name is not None:
return github_name
return author_name
def get_redmine_assignee_name(self, redmine_issue_dict):
"""
If a redmine user has a github account mapped, add the person as the assignee
"assigned_to": {
"id": 4,
"name": "Philip Durbin"
},
/cc @kneath @jresig
"""
if not type(redmine_issue_dict) is dict:
return None
redmine_name = redmine_issue_dict.get('assigned_to', {}).get('name', None)
if redmine_name is None:
return None
return redmine_name
def get_assignee(self, redmine_issue_dict):
"""
If a redmine user has a github account mapped, add the person as the assignee
"assigned_to": {
"id": 4,
"name": "Philip Durbin"
},
/cc @kneath @jresig
"""
if not type(redmine_issue_dict) is dict:
return None
redmine_name = redmine_issue_dict.get('assigned_to', {}).get('name', None)
if redmine_name is None:
return None
github_username = self.format_name_for_github(redmine_name, include_at_sign=False)
return github_username
def update_github_issue_with_related(self, redmine_json_fname, redmine2github_issue_map):
"""
Update a GitHub issue with related tickets as specfied in Redmine
- Read the current github description
- Add related notes to the bottom of description
- Update the description
"relations": [
{
"delay": null,
"issue_to_id": 4160,
"issue_id": 4062,
"id": 438,
"relation_type": "relates"
},
{
"delay": null,
"issue_to_id": 3643,
"issue_id": 4160,
"id": 439,
"relation_type": "relates"
}
],
"id": 4160,
"""
if not os.path.isfile(redmine_json_fname):
msgx('ERROR. update_github_issue_with_related. file not found: %s' % redmine_json_fname)
#msg('issue map: %s' % redmine2github_issue_map)
json_str = open(redmine_json_fname, 'rU').read()
rd = json.loads(json_str) # The redmine issue as a python dict
#msg('rd: %s' % rd)
if rd.get('relations', None) is None:
msg('no relations')
return
redmine_issue_num = rd.get('id', None)
if redmine_issue_num is None:
return
github_issue_num = redmine2github_issue_map.get(str(redmine_issue_num), None)
if github_issue_num is None:
msg('Redmine issue not in nap')
return
# Related tickets under 'relations'
#
github_related_tickets = []
original_related_tickets = []
for rel in rd.get('relations'):
issue_to_id = rel.get('issue_to_id', None)
if issue_to_id is None:
continue
if rd.get('id') == issue_to_id: # skip relations pointing to this ticket
continue
original_related_tickets.append(issue_to_id)
related_github_issue_num = redmine2github_issue_map.get(str(issue_to_id), None)
msg(related_github_issue_num)
if related_github_issue_num:
github_related_tickets.append(related_github_issue_num)
github_related_tickets.sort()
original_related_tickets.sort()
#
# end: Related tickets under 'relations'
# Related tickets under 'children'
#
# "children": [{ "tracker": {"id": 2, "name": "Feature" }, "id": 3454, "subject": "Icons in results and facet" }, ...]
#
github_child_tickets = []
original_child_tickets = []
child_ticket_info = rd.get('children', [])
if child_ticket_info:
for ctick in child_ticket_info:
child_id = ctick.get('id', None)
if child_id is None:
continue
original_child_tickets.append(child_id)
child_github_issue_num = redmine2github_issue_map.get(str(child_id), None)
msg(child_github_issue_num)
if child_github_issue_num:
github_child_tickets.append(child_github_issue_num)
original_child_tickets.sort()
github_child_tickets.sort()
#
# end: Related tickets under 'children'
#
# Update github issue with related and child tickets
#
#
if len(original_related_tickets) == 0 and len(original_child_tickets)==0:
return
# Format related ticket numbers
#
original_issues_formatted = [ """[%s](%s)""" % (x, self.format_redmine_issue_link(x)) for x in original_related_tickets]
original_issues_str = ', '.join(original_issues_formatted)
related_issues_formatted = [ '#%d' % x for x in github_related_tickets]
related_issue_str = ', '.join(related_issues_formatted)
msg('Redmine related issues: %s' % original_issues_str)
msg('Github related issues: %s' % related_issue_str)
# Format children ticket numbers
#
original_children_formatted = [ """[%s](%s)""" % (x, self.format_redmine_issue_link(x)) for x in original_child_tickets]
original_children_str = ', '.join(original_children_formatted)
github_children_formatted = [ '#%d' % x for x in github_child_tickets]
github_children_str = ', '.join(github_children_formatted)
msg('Redmine sub-issues: %s' % original_children_str)
msg('Github sub-issues: %s' % github_children_str)
try:
issue = self.get_github_conn().issues.get(number=github_issue_num)
except pygithub3.exceptions.NotFound:
msg('Issue not found!')
return
template = self.jinja_env.get_template('related_issues.md')
template_params = { 'original_description' : issue.body\
, 'original_issues' : original_issues_str\
, 'related_issues' : related_issue_str\
, 'child_issues_original' : original_children_str\
, 'child_issues_github' : github_children_str\
}
updated_description = template.render(template_params)
issue = self.get_github_conn().issues.update(number=github_issue_num, data={'body':updated_description})
msg('Issue updated!')#' % issue.body)
def format_redmine_issue_link(self, issue_id):
if issue_id is None:
return None
return os.path.join(REDMINE_SERVER, 'issues', '%d' % issue_id)
def close_github_issue(self, github_issue_num):
if not github_issue_num:
return False
msgt('Close issue: %s' % github_issue_num)
try:
issue = self.get_github_conn().issues.get(number=github_issue_num)
except pygithub3.exceptions.NotFound:
msg('Issue not found!')
return False
if issue.state == self.ISSUE_STATE_CLOSED:
msg('Already closed')
return True
updated_issue = self.get_github_conn().issues.update(number=github_issue_num, data={'state': self.ISSUE_STATE_CLOSED })
if not updated_issue:
msg('Failed to close issue')
return False
if updated_issue.state == self.ISSUE_STATE_CLOSED:
msg('Issue closed')
return True
msg('Failed to close issue')
return False
def make_github_issue(self, redmine_json_fname, **kwargs):
"""
Create a GitHub issue from JSON for a Redmine issue.
- Format the GitHub description to include original redmine info: author, link back to redmine ticket, etc
- Add/Create Labels
- Add/Create Milestones
"""
if not os.path.isfile(redmine_json_fname):
msgx('ERROR. make_github_issue. file not found: %s' % redmine_json_fname)
include_comments = kwargs.get('include_comments', True)
include_assignee = kwargs.get('include_assignee', True)
json_str = open(redmine_json_fname, 'rU').read()
rd = json.loads(json_str) # The redmine issue as a python dict
#msg(json.dumps(rd, indent=4))
msg('Attempt to create issue: [#%s][%s]' % (rd.get('id'), rd.get('subject') ))
# (1) Format the github issue description
#
#
template = self.jinja_env.get_template('description.md')
author_name = rd.get('author', {}).get('name', None)
author_github_username = self.format_name_for_github(author_name)
desc_dict = {'description' : translate_for_github(rd.get('description', 'no description'))\
, 'redmine_link' : self.format_redmine_issue_link(rd.get('id'))\
, 'redmine_issue_num' : rd.get('id')\
, 'start_date' : rd.get('start_date', None)\
, 'author_name' : author_name\
, 'author_github_username' : author_github_username\
, 'redmine_assignee' : self.get_redmine_assignee_name(rd)
}
description_info = template.render(desc_dict)
#
# (2) Create the dictionary for the GitHub issue--for the github API
#
#self.label_helper.clear_labels(151)
github_issue_dict = { 'title': rd.get('subject')\
, 'body' : description_info\
, 'labels' : self.label_helper.get_label_names_from_issue(rd)
}
milestone_number = self.milestone_manager.get_create_milestone(rd)
if milestone_number:
github_issue_dict['milestone'] = milestone_number
if include_assignee:
assignee = self.get_assignee(rd)
if assignee:
github_issue_dict['assignee'] = assignee
msg( github_issue_dict)
#
# (3) Create the issue on github
#
issue_obj = self.get_github_conn().issues.create(github_issue_dict)
#issue_obj = self.get_github_conn().issues.update(151, github_issue_dict)
msgt('Github issue created: %s' % issue_obj.number)
msg('issue id: %s' % issue_obj.id)
msg('issue url: %s' % issue_obj.html_url)
# Map the new github Issue number to the redmine issue number
#
#redmine2github_id_map.update({ rd.get('id', 'unknown') : issue_obj.number })
#print( redmine2github_id_map)
#
# (4) Add the redmine comments (journals) as github comments
#
if include_comments:
journals = rd.get('journals', None)
if journals:
self.add_comments_for_issue(issue_obj.number, journals)
#
# (5) Should this issue be closed?
#
if self.is_redmine_issue_closed(rd):
self.close_github_issue(issue_obj.number)
return issue_obj.number
def is_redmine_issue_closed(self, redmine_issue_dict):
"""
"status": {
"id": 5,
"name": "Completed"
},
"""
if not type(redmine_issue_dict) == dict:
return False
status_info = redmine_issue_dict.get('status', None)
if not status_info:
return False
if status_info.has_key('id') and status_info.get('id', None) == 5:
return True
return False
def add_comments_for_issue(self, issue_num, journals):
"""
Add comments
"""
if journals is None:
msg('no journals')
return
comment_template = self.jinja_env.get_template('comment.md')
for j in journals:
notes = j.get('notes', None)
if not notes:
continue
author_name = j.get('user', {}).get('name', None)
author_github_username = self.format_name_for_github(author_name)
note_dict = { 'description' : translate_for_github(notes)\
, 'note_date' : j.get('created_on', None)\
, 'author_name' : author_name\
, 'author_github_username' : author_github_username\
}
comment_info = comment_template.render(note_dict)
comment_obj = None
try:
comment_obj = self.get_comments_service().create(issue_num, comment_info)
except requests.exceptions.HTTPError as e:
msgt('Error creating comment: %s' % e.message)
continue
if comment_obj:
dashes()
msg('comment created')
msg('comment id: %s' % comment_obj.id)
msg('api issue_url: %s' % comment_obj.issue_url)
msg('api comment url: %s' % comment_obj.url)
msg('html_url: %s' % comment_obj.html_url)
#msg(dir(comment_obj))
if __name__=='__main__':
#auth = dict(login=GITHUB_LOGIN, password=GITHUB_PASSWORD_OR_PERSONAL_ACCESS_TOKEN, repo=GITHUB_TARGET_REPOSITORY, user=GITHUB_TARGET_USERNAME)
#milestone_service = pygithub3.services.issues.Milestones(**auth)
#comments_service = pygithub3.services.issues.Comments(**auth)
#fname = 03385.json'
#gm.make_github_issue(fname, {})
import time
issue_filename = '/Users/rmp553/Documents/iqss-git/redmine2github/working_files/redmine_issues/2014-0702/04156.json'
gm = GithubIssueMaker()
for x in range(100, 170):
gm.close_github_issue(x)
#gm.make_github_issue(issue_filename, {})
sys.exit(0)
root_dir = '/Users/rmp553/Documents/iqss-git/redmine2github/working_files/redmine_issues/2014-0702/'
cnt =0
for fname in os.listdir(root_dir):
if fname.endswith('.json'):
num = int(fname.replace('.json', ''))
if num < 3902: continue
msg('Add issue from: %s' % fname)
cnt+=1
fullname = os.path.join(root_dir, fname)
gm.make_github_issue(fullname, {})
if cnt == 150:
break
if cnt%50 == 0:
msg('sleep 2 secs')
time.sleep(2)
#sys.exit(0)
| jperelli/redmine2github | src/github_issues/github_issue_maker.py | Python | mit | 17,531 |
import pile
import matplotlib.pyplot as plt
import time
import random
x,y = [],[]
for i in range(0,10):
p = 10 ** i
print(i)
start = time.time()
pile.pile(p)
final = time.time()
delta = final - start
x.append(p)
y.append(delta)
plt.plot(x,y)
plt.ylabel("The time taken to compute the pile splitting of a pile os size n")
print(y)
plt.show()
plt.savefig("data.jpg")
def cutter(tsize,dsize):
p = [tsize]
soma = 0
size = 1
for i in range(dsize):
if size == 0:
break
update = []
for n in p:
if n == 1:
soma += 0
else:
a = random.randint(1,n-1)
b = n - a
soma += a*b
update.append(a)
update.append(b)
p = list(update)
size = len(p)
print(update,soma)
return(p,soma)
print(cutter(30,99))
| victor-cortez/Heimdall | pile/timer.py | Python | mit | 914 |
import os, sys
PATH = os.path.join(os.path.dirname(__file__), '..')
sys.path += [
os.path.join(PATH, 'project/apps'),
os.path.join(PATH, 'project'),
os.path.join(PATH, '..'),
PATH]
os.environ['DJANGO_SETTINGS_MODULE'] = 'project.settings.production'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| claudiob/pypeton | pypeton/files/django/deploy/django_wsgi_production.py | Python | mit | 344 |
from django.core.urlresolvers import reverse
from django.test import TestCase as DjangoTestCase
from blognajd.models import Story, SiteSettings
from blognajd.sitemaps import StaticSitemap, StoriesSitemap
class StaticSitemap1TestCase(DjangoTestCase):
fixtures = ['sitesettings_tests.json']
def test_staticsitemap_items_disabled(self):
sitesettings = SiteSettings.objects.get(pk=1)
sitesettings.has_about_page = False
sitesettings.has_projects_page = False
sitesettings.has_contact_page = False
sitesettings.save()
self.assertEqual([i for i in StaticSitemap().items()], ['blog'])
def test_staticsitemap_items_enabled(self):
sitesettings = SiteSettings.objects.get(pk=1)
sitesettings.has_about_page = True
sitesettings.has_projects_page = True
sitesettings.has_contact_page = True
sitesettings.save()
self.assertEqual(sorted([i for i in StaticSitemap().items()]),
['about', 'blog', 'contact', 'projects'])
def test_staticsitemap_location(self):
sitemap = StaticSitemap()
for item in sitemap.items():
if item == 'contact':
urlname = 'contactme-get-contact-form'
else:
urlname = item
self.assertEqual(sitemap.location(item), reverse(urlname))
class StoriesSitemapTestCase(DjangoTestCase):
fixtures = ['story_tests.json']
def setUp(self):
self.story = Story.objects.get(pk=1)
self.sitemap = StoriesSitemap()
def test_storiessitemap_items(self):
self.assertEqual(len(self.sitemap.items()), 1)
def test_storiessitemap_lastmod(self):
for item in self.sitemap.items():
self.assertEqual(self.sitemap.lastmod(item), self.story.mod_date)
| danirus/blognajd | blognajd/tests/test_sitemaps.py | Python | mit | 1,817 |
#!/usr/bin/python
import subprocess
import os
import time
import platform
import glob
import shutil
import csbuild
from csbuild import log
csbuild.Toolchain("gcc").Compiler().SetCppStandard("c++11")
csbuild.Toolchain("gcc").SetCxxCommand("clang++")
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("all", "extra", "ctor-dtor-privacy", "overloaded-virtual", "init-self", "missing-include-dirs", "switch-default", "no-switch-enum", "undef", "no-old-style-cast")
csbuild.DisablePrecompile()
csbuild.AddOption("--with-mongo", action="store", help="Path to mongo include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--with-boost", action="store", help="Path to boost include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--no-threads", action="store_true", help="Build without thread support")
csbuild.AddOption("--no-exceptions", action="store_true", help="Build without exception support")
csbuild.AddOption("--no-unit-tests", action="store_true", help="Don't automatically run unit tests as part of build")
csbuild.SetHeaderInstallSubdirectory("sprawl/{project.name}")
csbuild.SetUserData("subdir", platform.system())
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddDefines("_XOPEN_SOURCE");
csbuild.Toolchain("gcc").SetCppStandardLibrary("libc++")
csbuild.SetOutputDirectory("lib/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.SetIntermediateDirectory("Intermediate/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}/{project.name}")
csbuild.Toolchain("msvc").AddCompilerFlags(
"/fp:fast",
"/wd\"4530\"",
"/wd\"4067\"",
"/wd\"4351\"",
"/constexpr:steps1000000",
)
if not csbuild.GetOption("no_threads"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-pthread")
if csbuild.GetOption("no_exceptions"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-fno-exceptions")
else:
csbuild.Toolchain("msvc").AddCompilerFlags("/EHsc")
@csbuild.project("collections", "collections")
def collections():
csbuild.SetOutput("libsprawl_collections", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("tag", "tag")
def collections():
csbuild.SetOutput("libsprawl_tag", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("if", "if")
def collections():
csbuild.SetOutput("libsprawl_if", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("network", "network")
def network():
csbuild.SetOutput("libsprawl_network", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("serialization", "serialization")
def serialization():
csbuild.SetOutput("libsprawl_serialization", csbuild.ProjectType.StaticLibrary)
csbuild.AddExcludeDirectories("serialization/mongo")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("time", "time")
def timeProject():
csbuild.SetOutput("libsprawl_time", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles("time/*_linux.cpp", "time/*_osx.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("filesystem", "filesystem")
def filesystem():
csbuild.SetOutput("libsprawl_filesystem", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("filesystem/*_windows.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles("filesystem/*_linux.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("threading", "threading")
def threading():
csbuild.SetOutput("libsprawl_threading", csbuild.ProjectType.StaticLibrary)
if platform.system() != "Darwin":
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
csbuild.Toolchain("gcc").Linker().AddLinkerFlags("-pthread")
csbuild.Toolchain("gcc").AddExcludeFiles("threading/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"threading/*_linux.cpp",
"threading/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
MongoDir = csbuild.GetOption("with_mongo")
BoostDir = csbuild.GetOption("with_boost")
if (not MongoDir) ^ (not BoostDir):
log.LOG_ERROR("Both mongo and boost directories must be specified to build MongoSerializer.");
csbuild.Exit(1)
if MongoDir and BoostDir:
MongoDir = os.path.abspath(MongoDir)
BoostDir = os.path.abspath(BoostDir)
@csbuild.project("serialization-mongo", "serialization/mongo")
def serialization():
csbuild.SetOutput("libsprawl_serialization-mongo", csbuild.ProjectType.StaticLibrary)
csbuild.AddDefines("BOOST_ALL_NO_LIB")
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include"),
os.path.join(BoostDir, "include")
)
csbuild.AddLibraryDirectories(
os.path.join(MongoDir, "lib"),
os.path.join(BoostDir, "lib")
)
csbuild.SetHeaderInstallSubdirectory("sprawl/serialization")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("memory", "memory")
def memory():
csbuild.SetOutput("libsprawl_memory", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("string", "string")
def string():
csbuild.SetOutput("libsprawl_string", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("hash", "hash")
def hash():
csbuild.SetOutput("libsprawl_hash", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("logging", "logging")
def logging():
csbuild.SetOutput("libsprawl_logging", csbuild.ProjectType.StaticLibrary)
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
if platform.system() != "Darwin":
csbuild.Toolchain("gcc").AddLibraries(
"bfd",
)
csbuild.Toolchain("msvc").AddLibraries(
"DbgHelp"
)
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"logging/*_linux.cpp",
"logging/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("common", "common")
def common():
csbuild.SetOutput("libsprawl_common", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
UnitTestDepends = ["serialization", "string", "hash", "time", "threading", "filesystem", "logging"]
if MongoDir:
UnitTestDepends.append("serialization-mongo")
@csbuild.project("UnitTests", "UnitTests", UnitTestDepends)
def UnitTests():
csbuild.DisableChunkedBuild()
csbuild.SetOutput("SprawlUnitTest")
csbuild.SetOutputDirectory("bin/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.EnableOutputInstall()
csbuild.AddIncludeDirectories(
"UnitTests/gtest",
"UnitTests/gtest/include",
)
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("no-undef", "no-switch-enum", "no-missing-field-initializers")
csbuild.AddExcludeFiles(
"UnitTests/gtest/src/gtest-death-test.cc",
"UnitTests/gtest/src/gtest-filepath.cc",
"UnitTests/gtest/src/gtest-internal-inl.h",
"UnitTests/gtest/src/gtest-port.cc",
"UnitTests/gtest/src/gtest-printers.cc",
"UnitTests/gtest/src/gtest-test-part.cc",
"UnitTests/gtest/src/gtest-typed-test.cc",
"UnitTests/gtest/src/gtest.cc",
)
if MongoDir:
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include"),
os.path.join(BoostDir, "include")
)
csbuild.AddLibraryDirectories(
os.path.join(MongoDir, "lib"),
os.path.join(BoostDir, "lib")
)
csbuild.AddLibraries(
"mongoclient",
"boost_filesystem",
"boost_system",
"boost_thread",
"boost_program_options",
"ssl",
"crypto",
)
csbuild.Toolchain("gcc").AddLibraries("pthread")
csbuild.Toolchain("gcc").AddCompilerFlags("-pthread")
csbuild.AddDefines("WITH_MONGO")
else:
csbuild.AddExcludeFiles(
"UnitTests/UnitTests_MongoReplicable.cpp",
)
@csbuild.project("QueueTests", "QueueTests", ["time", "threading"])
def UnitTests():
csbuild.DisableChunkedBuild()
csbuild.SetOutput("QueueTests")
csbuild.SetOutputDirectory("bin/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.EnableOutputInstall()
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("no-undef", "no-switch-enum", "no-missing-field-initializers")
csbuild.AddIncludeDirectories("QueueTests/ext/include")
csbuild.AddLibraryDirectories("QueueTests/ext/lib/{project.userData.subdir}-{project.outputArchitecture}")
csbuild.AddExcludeDirectories("QueueTests/ext")
csbuild.AddLibraries("tbb")
if platform.system() == "Windows":
@csbuild.postMakeStep
def postMake(project):
for f in glob.glob("QueueTests/ext/lib/{project.userData.subdir}-{project.outputArchitecture}/*".format(project=project)):
basename = os.path.basename(f)
dest = os.path.join(project.outputDir, basename)
if not os.path.exists(dest):
print("Copying {} to {}".format(f, dest))
shutil.copyfile(f, dest) | 3Jade/Sprawl | make.py | Python | mit | 9,814 |
#
# This file is part of Gruvi. Gruvi is free software available under the
# terms of the MIT license. See the file "LICENSE" that was provided
# together with this source file for the licensing terms.
#
# Copyright (c) 2012-2014 the Gruvi authors. See the file "AUTHORS" for a
# complete list.
from __future__ import absolute_import, print_function, division
import time
import unittest
from gruvi.http import HttpProtocol, HttpServer, HttpClient
from support import PerformanceTest, MockTransport
def hello_app(environ, start_response):
headers = [('Content-Type', 'text/plain')]
start_response('200 OK', headers)
return [b'Hello!']
class PerfHttp(PerformanceTest):
def perf_parsing_speed(self):
transport = MockTransport()
protocol = HttpProtocol()
transport.start(protocol)
r = b'HTTP/1.1 200 OK\r\nContent-Length: 10000\r\n\r\n'
r += b'x' * 10000
reqs = 4 * r
nbytes = 0
t0 = t1 = time.time()
while t1 - t0 < 1.0:
protocol.data_received(reqs)
del protocol._queue._heap[:]
nbytes += len(reqs)
t1 = time.time()
speed = nbytes / (t1 - t0) / (1024 * 1024)
self.add_result(speed)
def perf_server_throughput(self):
server = HttpServer(hello_app)
server.listen(('localhost', 0))
addr = server.addresses[0]
client = HttpClient()
client.connect(addr)
nrequests = 0
pipeline = 10
t0 = t1 = time.time()
while t1 - t0 < 1.0:
for i in range(pipeline):
client.request('GET', '/')
for i in range(pipeline):
resp = client.getresponse()
self.assertEqual(resp.body.read(), b'Hello!')
nrequests += pipeline
t1 = time.time()
throughput = nrequests / (t1 - t0)
self.add_result(throughput)
server.close()
client.close()
if __name__ == '__main__':
unittest.defaultTestLoader.testMethodPrefix = 'perf'
unittest.main()
| geertj/gruvi | tests/perf_http.py | Python | mit | 2,074 |
#randNNIWalks.py
#writes random SPR walks to files
#calls GTP on each NNI random walk file to get
#the ditances between each tree and the first tree of the sequence
#the results are written to csv files with lines delimited by \t
import tree_utils as tu
import w_tree_utils as wtu
import os
import sys
import numpy as np
import random
from math import sqrt
__pid__ = 0
__prefix__ = "NNI_"
#daf: distance algorithm file
def randNNIwalk(daf,size,steps,runs,seed,weighted = False):
global __pid__
global __prefix__
#set the seed
random.seed(seed)
np.random.seed(seed)
#select tree utils module
if weighted:
tum = wtu
genRandBinTree = lambda leaves: wtu.genRandBinTree(leaves,np.random.exponential)
else:
tum = tu
genRandBinTree = lambda leaves: tu.genRandBinTree(leaves)
tum.treeNorm = lambda x: 0.25
out_file_name = __prefix__ + str(size) + "_" + str(steps) + "_" +\
str(runs) + "_" + str(seed)
normsfile_name = out_file_name + '.norms'
#create a file for each spr sequence
for k in range(runs):
rand_tree = genRandBinTree(list(range(size)))
total_nodes = size-1
#write current sequence to file
infile_prefix = "tmpnniseq" + str(__pid__)
infile = infile_prefix + str(k)
with open(infile,'w') as treefile, open(normsfile_name,'w') as nrmfile:
treefile.write(tum.toNewickTree(rand_tree) + "\n")
current_tree = rand_tree
#write tree norms-----
#save norm of first tree
norm1 = sqrt(tum.treeNorm(rand_tree))
walknorms = ''
for i in range(steps):
current_tree = tum.randNNI(current_tree,total_nodes)
treefile.write(tum.toNewickTree(current_tree) + "\n")
#write ||T1|| + ||T2||
walknorms += str(norm1 + sqrt(tum.treeNorm(current_tree))) + ','
#write norms sequence
nrmfile.write(walknorms[0:-1] + '\n')
#assumes GTP file is in current working directory
outfile = "tempseq" + str(__pid__) + ".csv"
infile_prefix = "tmpnniseq" + str(__pid__)
infile = infile_prefix + str(k)
os.system("java -jar " + daf + " -r 0 -o " + outfile + " " + infile)
#append output to final sequence file
os.system("cat " + outfile + " | ./toLines.py >> " + out_file_name)
#cleanup
os.system("rm " + outfile)
os.system("rm " + infile_prefix + "*")
if __name__=='__main__':
if len(sys.argv)<6:
print ("Too few arguments!!")
print ("Usage: [-w] <distance algorithm file .jar> <size or size range> <no. NNI steps> <no. runs> <seed or seed range>")
sys.exit(-1)
WEIGHTED = False
if len(sys.argv) == 7:
WEIGHTED = sys.argv.pop(1) == '-w'
dist_algo_file = sys.argv[1]
if dist_algo_file != "gtp.jar":
__prefix__ = "RNI_"
if WEIGHTED:
__prefix__ = 'W' + __prefix__
else:
__prefix__ = 'U' + __prefix__
#take a single size or a range of sizes
if ":" in sys.argv[2]:
size_start, size_end = map(lambda x: int(x),sys.argv[2].split(':'))
else:
size_start = int(sys.argv[2])
size_end = size_start + 1
size_range = range(size_start,size_end)
steps = int(sys.argv[3])
runs = int(sys.argv[4])
#take a single seed or a range of seeds
if ":" in sys.argv[5]:
seed_start,seed_end = map(lambda x: int(x),sys.argv[5].split(':'))
else:
seed_start = int(sys.argv[5])
seed_end = seed_start + 1
seed_range = range(seed_start,seed_end)
#set pid property before calling randSPRWalk
__pid__ = os.getpid()
for size in size_range:
for seed in seed_range:
randNNIwalk(dist_algo_file,size,steps,runs,seed,WEIGHTED) | alejandro-mc/trees | randNNIWalks.py | Python | mit | 4,063 |
# -*- coding: utf-8 -*-
import cv2
# device number "0"
cap = cv2.VideoCapture(0)
while(True):
# Capture a frame
ret, frame = cap.read()
# show on display
cv2.imshow('frame',frame)
# waiting for keyboard input
key = cv2.waitKey(1) & 0xFF
# Exit if "q" pressed
if key == ord('q'):
break
# Save if "s" pressed
if key == ord('s'):
path = "photo.jpg"
cv2.imwrite(path,frame)
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
| tomoyuki-nakabayashi/ICS-IoT-hackathon | sample/python-camera/python-camera.py | Python | mit | 475 |
import importlib
from django.conf import settings
from django.views import View
class BaseView(View):
"""后台管理基类"""
def __init__(self):
self.context = {}
self.context["path"] = {}
def dispatch(self,request,*args,**kwargs):
_path = request.path_info.split("/")[1:]
self.context["path"]["app"] = _path[0]
self.context["path"]["module"] = _path[1]
self.context["path"]["action"] = _path[-1]
imp_module_path = self.context["path"]["app"]+".views."+self.context["path"]["module"]
imp_module = importlib.import_module(imp_module_path)
imp_cls = getattr(imp_module,self.context["path"]["module"].capitalize())
return getattr(imp_cls,self.context["path"]["action"])(self,request)
| wdcxc/blog | admin/views/base.py | Python | mit | 789 |
import _plotly_utils.basevalidators
class SymmetricValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="symmetric", parent_name="scatter3d.error_y", **kwargs
):
super(SymmetricValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/scatter3d/error_y/_symmetric.py | Python | mit | 472 |
from flask import request, render_template
from flask.ext.login import current_user, login_user
from mysite.weibo import Client
from mysite import app, db
from mysite.models import Wuser, User
from . import weibo
@weibo.route('/oauthreturn')
def oauthreturn():
code = request.args.get('code', '')
if code:
client = Client(app.config['API_KEY'], app.config['API_SECRET'], app.config['REDIRECT_URI'])
client.set_code(code)
uid = client.token['uid']
profile = client.get('users/show', access_token=client.access_token, uid=uid)
wuser = Wuser.query.filter_by(uid=uid).first()
if wuser:
login_user(wuser.user)
else:
user = User()
wuser = Wuser(uid=uid)
wuser.user = user
db.session.add(user)
login_user(user)
wuser.update_access_token(client.token['access_token'])
wuser.update_profile(profile)
db.session.add(wuser)
db.session.commit()
return render_template("weibo/profile.html", wuser=wuser) | liyigerry/caixiang | mysite/views/weibo/oauthreturn.py | Python | mit | 941 |
"""
WSGI config for billboards project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "billboards.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| serefimov/billboards | billboards/billboards/wsgi.py | Python | mit | 1,568 |
# !/usr/bin/env python
# coding: utf-8
__author__ = 'Moch'
import tornado.ioloop
import tornado.options
import tornado.httpserver
from application import application
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
print("Development server is runing at http://127.0.0.1:{}".format(options.port))
print("Quit the server with Control-C")
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| snownothing/Python | web/server.py | Python | mit | 651 |
from stard.services import BaseService
class Service(BaseService):
def init_service(self):
self.add_parent('multiuser')
self.add_parent('dhcpcd')
self.add_parent('getty', terminal=1)
| DexterLB/stard | src/etc/stard/init.py | Python | mit | 212 |
import json
import enum
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib import request
class APINonSingle:
def __init__(self, api_key, agent = "webnews-python", webnews_base = "https://webnews.csh.rit.edu/"):
self.agent = agent
self.api_key = api_key
self.webnews_base = webnews_base
class Actions(enum.Enum):
user = "user"
unread_counts = "unread_counts"
newsgroups = "newsgroups"
search = "search"
compose = "compose"
def POST(self, action, args={}):
if type(action) == API.Actions:
action = action.value
args['api_key'] = self.api_key
args['api_agent'] = self.agent
args = urlencode(args).encode('utf-8')
req = request.Request(self.webnews_base+ action)
req.add_header('Accept', 'application/json')
resp = urlopen(req, args).read().decode('utf-8')
return json.loads(resp)
def GET(self, action, args={}):
if type(action) == API.Actions:
action = action.value
args['api_key'] = self.api_key
args['api_agent'] = self.agent
args = urlencode(args)
req = request.Request(self.webnews_base + action + '?' + args)
req.add_header('Accept', 'application/json')
resp = urlopen(req).read().decode('utf-8')
return json.loads(resp)
def user(self):
return self.GET(API.Actions.user)
def unread_counts(self):
return self.GET(API.Actions.unread_counts)
def newsgroups(self):
return self.GET(API.Actions.newsgroups)
def newsgroups_search(self, newsgroup):
return self.GET("newsgroups/" + newsgroup)
def newsgroup_posts(self, newsgroup, params={}):
return self.GET(newsgroup + '/index', params)
def search(self, params = {}):
return self.GET(API.Actions.search, params)
def post_specifics(self, newsgroup, index, params={}):
return self.GET(str(newsgroup)+"/"+str(index), params)
def compose(self, newsgroup, subject, body, params={}):
params['subject'] = subject
params['body'] = body
params['newsgroup'] = newsgroup
return self.POST(API.Actions.compose, params)
"""
Wrap the APINonSingle object so that
only a single object for each key will exist.
Optimization for object implementation
"""
class API(APINonSingle):
_instance = {}
def __new__(cls, *args, **kwargs):
if not args[0] in cls._instance:
cls._instance[args[0]] = APINonSingle(*args, **kwargs)
return cls._instance[args[0]]
| AndrewHanes/Python-Webnews | webnews/api.py | Python | mit | 2,609 |
from django.contrib import admin
from .models import User
from application.models import (Contact, Personal, Wife, Occupation, Children,
Hod, Committee, UserCommittee, Legal)
# Register your models here.
class ContactInline(admin.StackedInline):
model = Contact
class PersonalInline(admin.StackedInline):
model = Personal
class WifeInline(admin.StackedInline):
model = Wife
class OccupationInline(admin.StackedInline):
model = Occupation
class HodInline(admin.StackedInline):
model = Hod
class ChildrenInline(admin.StackedInline):
model = Children
class UserCommitteeInline(admin.StackedInline):
model = UserCommittee
class UserAdmin(admin.ModelAdmin):
inlines = [
ContactInline,
PersonalInline,
WifeInline,
OccupationInline,
HodInline,
ChildrenInline,
UserCommitteeInline
]
class LegalAdmin(admin.ModelAdmin):
model = Legal
admin.site.register(User, UserAdmin)
admin.site.register(Legal, LegalAdmin)
admin.site.site_header = 'Hebrew Order of David Administration'
| dhosterman/hebrew_order_david | accounts/admin.py | Python | mit | 1,111 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._agent_pools_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_available_agent_pool_versions_request, build_get_request, build_get_upgrade_profile_request, build_list_request, build_upgrade_node_image_version_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AgentPoolsOperations:
"""AgentPoolsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2022_01_02_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> AsyncIterable["_models.AgentPoolListResult"]:
"""Gets a list of agent pools in the specified managed cluster.
Gets a list of agent pools in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AgentPoolListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AgentPoolListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPool":
"""Gets the specified managed cluster agent pool.
Gets the specified managed cluster agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPool, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> "_models.AgentPool":
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'AgentPool')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AgentPool', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> AsyncLROPoller["_models.AgentPool"]:
"""Creates or updates an agent pool in the specified managed cluster.
Creates or updates an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:param parameters: The agent pool to create or update.
:type parameters: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AgentPool or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes an agent pool in the specified managed cluster.
Deletes an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace_async
async def get_upgrade_profile(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPoolUpgradeProfile":
"""Gets the upgrade profile for an agent pool.
Gets the upgrade profile for an agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolUpgradeProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolUpgradeProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolUpgradeProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_upgrade_profile_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get_upgrade_profile.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolUpgradeProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeProfiles/default'} # type: ignore
@distributed_trace_async
async def get_available_agent_pool_versions(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.AgentPoolAvailableVersions":
"""Gets a list of supported Kubernetes versions for the specified agent pool.
See `supported Kubernetes versions
<https://docs.microsoft.com/azure/aks/supported-kubernetes-versions>`_ for more details about
the version lifecycle.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolAvailableVersions, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPoolAvailableVersions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolAvailableVersions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_available_agent_pool_versions_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get_available_agent_pool_versions.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolAvailableVersions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_agent_pool_versions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/availableAgentPoolVersions'} # type: ignore
async def _upgrade_node_image_version_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> Optional["_models.AgentPool"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AgentPool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_upgrade_node_image_version_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._upgrade_node_image_version_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 202:
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_upgrade_node_image_version_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore
@distributed_trace_async
async def begin_upgrade_node_image_version(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.AgentPool"]:
"""Upgrades the node image version of an agent pool to the latest.
Upgrading the node image version of an agent pool applies the newest OS and runtime updates to
the nodes. AKS provides one new image per week with the latest updates. For more details on
node image versions, see: https://docs.microsoft.com/azure/aks/node-image-upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AgentPool or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2022_01_02_preview.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._upgrade_node_image_version_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_upgrade_node_image_version.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore
| Azure/azure-sdk-for-python | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2022_01_02_preview/aio/operations/_agent_pools_operations.py | Python | mit | 30,148 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import os
import sys
import uuid
import logging
import simplejson as json
import paho.mqtt.client as mqtt
from time import sleep
try:
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/../../')
from sanji.connection.connection import Connection
except ImportError as e:
print(e)
print("Please check the python PATH for import test module.")
exit(1)
_logger = logging.getLogger("sanji.sdk.connection.mqtt")
class Mqtt(Connection):
"""
Mqtt
"""
def __init__(
self,
broker_host=os.getenv('BROKER_PORT_1883_TCP_ADDR', "localhost"),
broker_port=os.getenv('BROKER_PORT_1883_TCP_PORT', 1883),
broker_keepalive=60
):
# proerties
self.tunnels = {
"internel": (uuid.uuid4().hex, None),
"model": (None, None),
"view": (None, None)
}
self.broker_host = broker_host
self.broker_port = broker_port
self.broker_keepalive = broker_keepalive
self.client = mqtt.Client()
self.connect_delay = 3
# methods
self.subscribe = self.client.subscribe
self.unsubscribe = self.client.unsubscribe
self.message_callback_add = self.client.message_callback_add
self.message_callback_remove = self.client.message_callback_remove
self.client.on_log = self.on_log
def on_log(self, mosq, obj, level, string):
pass
def connect(self):
"""
connect
"""
_logger.debug("Start connecting to broker")
while True:
try:
self.client.connect(self.broker_host, self.broker_port,
self.broker_keepalive)
break
except Exception:
_logger.debug(
"Connect failed. wait %s sec" % self.connect_delay)
sleep(self.connect_delay)
self.client.loop_forever()
def disconnect(self):
"""
disconnect
"""
_logger.debug("Disconnect to broker")
self.client.loop_stop()
def set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
set_tunnel(self, tunnel_type, tunnel, callback=None):
"""
orig_tunnel = self.tunnels.get(tunnel_type, (None, None))[0]
if orig_tunnel is not None:
_logger.debug("Unsubscribe: %s", (orig_tunnel,))
self.client.unsubscribe(str(orig_tunnel))
self.tunnels[tunnel_type] = (tunnel, callback)
if callback is not None:
self.message_callback_add(tunnel, callback)
self.client.subscribe(str(tunnel))
_logger.debug("Subscribe: %s", (tunnel,))
def set_tunnels(self, tunnels):
"""
set_tunnels(self, tunnels):
"""
for tunnel_type, (tunnel, callback) in tunnels.iteritems():
if tunnel is None:
continue
self.set_tunnel(tunnel_type, tunnel, callback)
def set_on_connect(self, func):
"""
set_on_connect
"""
self.client.on_connect = func
def set_on_message(self, func):
"""
set_on_message
"""
self.client.on_message = func
def set_on_publish(self, func):
"""
set_on_publish
"""
self.client.on_publish = func
def publish(self, topic="/controller", qos=0, payload=None):
"""
publish(self, topic, payload=None, qos=0, retain=False)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS to
indicate success or MQTT_ERR_NO_CONN if the client is not currently
connected. mid is the message ID for the publish request. The mid
value can be used to track the publish request by checking against the
mid argument in the on_publish() callback if it is defined.
"""
result = self.client.publish(topic,
payload=json.dumps(payload),
qos=qos)
if result[0] == mqtt.MQTT_ERR_NO_CONN:
raise RuntimeError("No connection")
return result[1]
| imZack/sanji | sanji/connection/mqtt.py | Python | mit | 4,240 |
# -*- coding: utf-8 -*-
from rest_framework import viewsets
from . import serializers, models
class FileViewSet(viewsets.ModelViewSet):
queryset = models.File.objects.all()
serializer_class = serializers.FileSerializer
| nathanhi/deepserve | deepserve/fileupload/views.py | Python | mit | 231 |
import json
f = open('stations.json')
dict = json.load(f)
f.close()
tables = ["|station_id|group_id|name|type|", "|---:|---:|:--:|:--:|"]
row = "|%s|%s|%s|%s|"
stations = dict['stations']
for s in stations:
r = row % (s['station_id'], s['group_id'], s['station_name'], s['station_type'])
tables.append(r)
md = '\n'.join(tables)
md_file_name = "station.md"
with open(md_file_name, 'w') as f:
f.write(md)
| upamune/weatherhist | tools/gen_table.py | Python | mit | 422 |
#!/usr/bin/env python
from nodes import Node
class Input(Node):
char = "z"
args = 0
results = 1
contents = ""
def func(self):
"""input() or Input.contents"""
return input() or Input.contents | muddyfish/PYKE | node/input.py | Python | mit | 228 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.conf import settings
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Set site domain and name."""
Site = orm['sites.Site']
site = Site.objects.get(id=settings.SITE_ID)
site.domain = "example.com"
site.name = "distadmin"
site.save()
def backwards(self, orm):
"""Revert site domain and name to default."""
Site = orm['sites.Site']
site = Site.objects.get(id=settings.SITE_ID)
site.domain = 'example.com'
site.name = 'example.com'
site.save()
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'users.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
}
}
complete_apps = ['sites', 'users']
symmetrical = True | somethingnew2-0/distadmin | distadmin/users/migrations/0002_set_site_domain_and_name.py | Python | mit | 4,351 |
from django.db import models
from cms.models import CMSPlugin
from community.models import Clan, Member, Game
from django.utils.translation import ugettext as _
class Match(models.Model):
datetime = models.DateTimeField(_("Date"))
game = models.ForeignKey(Game, verbose_name=_("Game"))
clanA = models.ForeignKey(Clan, related_name="clanA_set")
clanB = models.ForeignKey(Clan, related_name="clanB_set")
winner = models.ForeignKey(Clan, null=True, blank=True)
def __unicode__(self):
return "%s vs %s" % (self.clanA, self.clanB)
class MemberScore(models.Model):
match = models.ForeignKey(Match, verbose_name=_("Match"))
player = models.ForeignKey(Member, verbose_name=_("Player"))
kills = models.IntegerField(_("Kills"), null=True, blank=True)
deaths = models.IntegerField(_("Deaths"), null=True, blank=True)
assists = models.IntegerField(_("Assists"), null=True, blank=True)
class LastMatchesPlugin(CMSPlugin):
amount = models.IntegerField(_("Aantal")) | Zundrium/djangocms-gamegroup | matches/models.py | Python | mit | 967 |
from....import a
from...import b
from..import c
from.import d
from : keyword.control.import.python, source.python
.... : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
a : source.python
from : keyword.control.import.python, source.python
... : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
b : source.python
from : keyword.control.import.python, source.python
.. : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
c : source.python
from : keyword.control.import.python, source.python
. : punctuation.separator.period.python, source.python
import : keyword.control.import.python, source.python
: source.python
d : source.python
| MagicStack/MagicPython | test/statements/import3.py | Python | mit | 1,061 |
"""
WSGI config for Courseware project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Courseware.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| shayan72/Courseware | Courseware/wsgi.py | Python | mit | 395 |
import numpy as np
import six
import tensorflow as tf
from tensorflow_probability.python.bijectors.masked_autoregressive import (
AutoregressiveNetwork, _create_degrees, _create_input_order,
_make_dense_autoregressive_masks, _make_masked_constraint,
_make_masked_initializer)
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
__all__ = ['AutoregressiveDense']
class AutoregressiveDense(AutoregressiveNetwork):
""" Masked autoregressive network - a generalized version of MADE.
MADE is autoencoder which require equality in the number of dimensions
between input and output.
MAN enables these numbers to be different.
"""
def build(self, input_shape):
"""See tfkl.Layer.build."""
assert self._event_shape is not None, \
'Unlike MADE, MAN require specified event_shape at __init__'
# `event_shape` wasn't specied at __init__, so infer from `input_shape`.
self._input_size = input_shape[-1]
# Construct the masks.
self._input_order = _create_input_order(
self._input_size,
self._input_order_param,
)
units = [] if self._hidden_units is None else list(self._hidden_units)
units.append(self._event_size)
masks = _make_dense_autoregressive_masks(
params=self._params,
event_size=self._input_size,
hidden_units=units,
input_order=self._input_order,
hidden_degrees=self._hidden_degrees,
)
masks = masks[:-1]
masks[-1] = np.reshape(
np.tile(masks[-1][..., tf.newaxis], [1, 1, self._params]),
[masks[-1].shape[0], self._event_size * self._params])
self._masks = masks
# create placeholder for ouput
inputs = tf.keras.Input((self._input_size,), dtype=self.dtype)
outputs = [inputs]
if self._conditional:
conditional_input = tf.keras.Input((self._conditional_size,),
dtype=self.dtype)
inputs = [inputs, conditional_input]
# Input-to-hidden, hidden-to-hidden, and hidden-to-output layers:
# [..., self._event_size] -> [..., self._hidden_units[0]].
# [..., self._hidden_units[k-1]] -> [..., self._hidden_units[k]].
# [..., self._hidden_units[-1]] -> [..., event_size * self._params].
layer_output_sizes = list(
self._hidden_units) + [self._event_size * self._params]
for k in range(len(self._masks)):
autoregressive_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=self._use_bias,
kernel_initializer=_make_masked_initializer(self._masks[k],
self._kernel_initializer),
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
kernel_constraint=_make_masked_constraint(self._masks[k],
self._kernel_constraint),
bias_constraint=self._bias_constraint,
dtype=self.dtype)(outputs[-1])
if (self._conditional and
((self._conditional_layers == 'all_layers') or
((self._conditional_layers == 'first_layer') and (k == 0)))):
conditional_output = tf.keras.layers.Dense(
layer_output_sizes[k],
activation=None,
use_bias=False,
kernel_initializer=self._kernel_initializer,
bias_initializer=None,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=None,
kernel_constraint=self._kernel_constraint,
bias_constraint=None,
dtype=self.dtype)(conditional_input)
outputs.append(
tf.keras.layers.Add()([autoregressive_output, conditional_output]))
else:
outputs.append(autoregressive_output)
# last hidden layer, activation
if k + 1 < len(self._masks):
outputs.append(
tf.keras.layers.Activation(self._activation)(outputs[-1]))
self._network = tf.keras.models.Model(inputs=inputs, outputs=outputs[-1])
# Allow network to be called with inputs of shapes that don't match
# the specs of the network's input layers.
self._network.input_spec = None
# Record that the layer has been built.
super(AutoregressiveNetwork, self).build(input_shape)
def call(self, x, conditional_input=None):
"""Transforms the inputs and returns the outputs.
Suppose `x` has shape `batch_shape + event_shape` and `conditional_input`
has shape `conditional_batch_shape + conditional_event_shape`. Then, the
output shape is:
`broadcast(batch_shape, conditional_batch_shape) + event_shape + [params]`.
Also see `tfkl.Layer.call` for some generic discussion about Layer calling.
Args:
x: A `Tensor`. Primary input to the layer.
conditional_input: A `Tensor. Conditional input to the layer. This is
required iff the layer is conditional.
Returns:
y: A `Tensor`. The output of the layer. Note that the leading dimensions
follow broadcasting rules described above.
"""
with tf.name_scope(self.name or 'MaskedAutoregressiveNetwork_call'):
x = tf.convert_to_tensor(x, dtype=self.dtype, name='x')
input_shape = ps.shape(x)
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
if self._conditional:
if conditional_input is None:
raise ValueError('`conditional_input` must be passed as a named '
'argument')
conditional_input = tf.convert_to_tensor(conditional_input,
dtype=self.dtype,
name='conditional_input')
conditional_batch_shape = ps.shape(conditional_input)[:-1]
if tensorshape_util.rank(conditional_input.shape) == 1:
conditional_input = conditional_input[tf.newaxis, ...]
x = [x, conditional_input]
output_shape = ps.concat([
ps.broadcast_shape(conditional_batch_shape, input_shape[:-1]),
(self._event_size,)
],
axis=0)
else:
output_shape = ps.concat([input_shape[:-1], (self._event_size,)],
axis=0)
return tf.reshape(self._network(x),
tf.concat([output_shape, [self._params]], axis=0))
| imito/odin | odin/bay/layers/autoregressive_layers.py | Python | mit | 6,507 |
import os
from pyelliptic.openssl import OpenSSL
def randomBytes(n):
try:
return os.urandom(n)
except NotImplementedError:
return OpenSSL.rand(n)
| hb9kns/PyBitmessage | src/helper_random.py | Python | mit | 172 |
from abc import ABC, abstractmethod
from typing import Dict, List, Optional, Tuple
from limits.storage.registry import StorageRegistry
from limits.util import LazyDependency
class Storage(LazyDependency, metaclass=StorageRegistry):
"""
Base class to extend when implementing an async storage backend.
.. warning:: This is a beta feature
.. versionadded:: 2.1
"""
STORAGE_SCHEME: Optional[List[str]]
"""The storage schemes to register against this implementation"""
def __init__(self, uri: Optional[str] = None, **options: Dict) -> None:
super().__init__()
@abstractmethod
async def incr(
self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1
) -> int:
"""
increments the counter for a given rate limit key
:param key: the key to increment
:param expiry: amount in seconds for the key to expire in
:param elastic_expiry: whether to keep extending the rate limit
window every hit.
:param amount: the number to increment by
"""
raise NotImplementedError
@abstractmethod
async def get(self, key: str) -> int:
"""
:param key: the key to get the counter value for
"""
raise NotImplementedError
@abstractmethod
async def get_expiry(self, key: str) -> int:
"""
:param key: the key to get the expiry for
"""
raise NotImplementedError
@abstractmethod
async def check(self) -> bool:
"""
check if storage is healthy
"""
raise NotImplementedError
@abstractmethod
async def reset(self) -> Optional[int]:
"""
reset storage to clear limits
"""
raise NotImplementedError
@abstractmethod
async def clear(self, key: str) -> int:
"""
resets the rate limit key
:param key: the key to clear rate limits for
"""
raise NotImplementedError
class MovingWindowSupport(ABC):
"""
Abstract base for storages that intend to support
the moving window strategy
.. warning:: This is a beta feature
.. versionadded:: 2.1
"""
async def acquire_entry(
self, key: str, limit: int, expiry: int, amount: int = 1
) -> bool:
"""
:param key: rate limit key to acquire an entry in
:param limit: amount of entries allowed
:param expiry: expiry of the entry
:param amount: the number of entries to acquire
"""
raise NotImplementedError
async def get_moving_window(self, key, limit, expiry) -> Tuple[int, int]:
"""
returns the starting point and the number of entries in the moving
window
:param key: rate limit key
:param expiry: expiry of entry
:return: (start of window, number of acquired entries)
"""
raise NotImplementedError
| alisaifee/limits | limits/aio/storage/base.py | Python | mit | 2,931 |
def DataToTreat(Catalogue = 'WHT_observations'):
Catalogue_Dictionary = {}
if Catalogue == 'WHT_observations':
Catalogue_Dictionary['Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_observations/'
Catalogue_Dictionary['Datatype'] = 'WHT'
Catalogue_Dictionary['Obj_Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_observations/' + 'objects/'
Catalogue_Dictionary['Data_Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_observations/' + 'data/'
Catalogue_Dictionary['dataframe'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_observations/catalogue_df'
if Catalogue == 'WHT_HII_Galaxies':
Catalogue_Dictionary['Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_Catalogue_SulfurRegression/'
Catalogue_Dictionary['Datatype'] = 'WHT'
Catalogue_Dictionary['Obj_Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_Catalogue_SulfurRegression/' + 'Objects/SHOC579/'
Catalogue_Dictionary['Data_Folder'] = '/home/vital/Dropbox/Astrophysics/Data/WHT_Catalogue_SulfurRegression/' + 'Data/'
if Catalogue == 'WHT_CandiatesObjects':
Catalogue_Dictionary['Folder'] = "Dropbox/Astrophysics/Data/WHT_CandiatesObjects/"
Catalogue_Dictionary['Datatype'] = "dr10"
Catalogue_Dictionary['Obj_Folder'] = "Dropbox/Astrophysics/Data/WHT_CandiatesObjects/"
if Catalogue == 'WHT_CandiatesObjectsFabian':
Catalogue_Dictionary['Folder'] = '/home/vital/Dropbox/Astrophysics/Data/Fabian_Catalogue/'
Catalogue_Dictionary['Datatype'] = "dr10"
Catalogue_Dictionary['Obj_Folder'] = '/home/vital/Dropbox/Astrophysics/Data/Fabian_Catalogue/'
if Catalogue == 'Marta_Catalogue':
Catalogue_Dictionary['Folder'] = "/home/vital/Dropbox/Astrophysics/Data/WHT_MartaCandidates_2016/"
Catalogue_Dictionary['Datatype'] = "dr10"
Catalogue_Dictionary['Obj_Folder'] = "/home/vital/Dropbox/Astrophysics/Data/WHT_MartaCandidates_2016/Objects/"
if Catalogue == 'SDSS_Catalogue':
Catalogue_Dictionary['Folder'] = "Dropbox/Astrophysics/Data/Fabian_Catalogue/"
Catalogue_Dictionary['Datatype'] = "dr10"
Catalogue_Dictionary['Obj_Folder'] = "Dropbox/Astrophysics/Data/Fabian_Catalogue/"
if Catalogue == 'Testing_Pypeline':
Catalogue_Dictionary['Folder'] = "Dropbox/Astrophysics/Data/ToCompare/"
Catalogue_Dictionary['Datatype'] = "dr10"
Catalogue_Dictionary['Obj_Folder'] = "Dropbox/Astrophysics/Data/ToCompare/"
return Catalogue_Dictionary
| Delosari/dazer | bin/user_conf/ManageFlow.py | Python | mit | 2,633 |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_get_operations.py
DESCRIPTION:
This sample demonstrates how to list/get all document model operations (succeeded, in-progress, failed)
associated with the Form Recognizer resource. Kinds of operations returned are "documentModelBuild",
"documentModelCompose", and "documentModelCopyTo". Note that operation information only persists for
24 hours. If the operation was successful, the document model can be accessed using get_model or list_models APIs.
USAGE:
python sample_get_operations.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
"""
import os
def sample_get_operations():
# [START list_operations]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import DocumentModelAdministrationClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
document_model_admin_client = DocumentModelAdministrationClient(endpoint=endpoint, credential=AzureKeyCredential(key))
operations = list(document_model_admin_client.list_operations())
print("The following document model operations exist under my resource:")
for operation in operations:
print("\nOperation ID: {}".format(operation.operation_id))
print("Operation kind: {}".format(operation.kind))
print("Operation status: {}".format(operation.status))
print("Operation percent completed: {}".format(operation.percent_completed))
print("Operation created on: {}".format(operation.created_on))
print("Operation last updated on: {}".format(operation.last_updated_on))
print("Resource location of successful operation: {}".format(operation.resource_location))
# [END list_operations]
# [START get_operation]
# Get an operation by ID
if operations:
print("\nGetting operation info by ID: {}".format(operations[0].operation_id))
operation_info = document_model_admin_client.get_operation(operations[0].operation_id)
if operation_info.status == "succeeded":
print("My {} operation is completed.".format(operation_info.kind))
result = operation_info.result
print("Model ID: {}".format(result.model_id))
elif operation_info.status == "failed":
print("My {} operation failed.".format(operation_info.kind))
error = operation_info.error
print("{}: {}".format(error.code, error.message))
else:
print("My operation status is {}".format(operation_info.status))
else:
print("No operations found.")
# [END get_operation]
if __name__ == '__main__':
sample_get_operations()
| Azure/azure-sdk-for-python | sdk/formrecognizer/azure-ai-formrecognizer/samples/v3.2-beta/sample_get_operations.py | Python | mit | 3,206 |
import tornado.ioloop
import tornado.web
import socket
import os
import sys
import time
import signal
# import datetime
import h5py
from datetime import datetime, date
import tornado.httpserver
from browserhandler import BrowseHandler
from annotationhandler import AnnotationHandler
from projecthandler import ProjectHandler
from helphandler import HelpHandler
from defaulthandler import DefaultHandler
base_path = os.path.dirname(__file__)
sys.path.insert(1,os.path.join(base_path, '../common'))
from utility import Utility
from database import Database
from paths import Paths
MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 0.5
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", DefaultHandler),
(r"/browse.*", BrowseHandler),
(r"/project.*", ProjectHandler),
(r"/annotate.*", AnnotationHandler),
(r'/help*', HelpHandler),
(r'/settings/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/settings/'}),
(r'/js/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/'}),
(r'/js/vendors/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/vendors/'}),
(r'/css/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/css/'}),
(r'/uikit/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/uikit/'}),
(r'/images/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/images/'}),
(r'/open-iconic/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/open-iconic/'}),
(r'/input/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/train/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/validate/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
#(r"/annotate/(.*)", AnnotationHandler, dict(logic=self)),
]
settings = {
"template_path": 'resources',
"static_path": 'resources',
}
tornado.web.Application.__init__(self, handlers, **settings)
import numpy as np
class Server():
def __init__(self, name, port):
self.name = name
self.port = port
application = Application()
self.http_server = tornado.httpserver.HTTPServer( application )
hostname = socket.gethostname()
print 'hostname:', hostname
self.ip = hostname #socket.gethostbyname( hostname )
def print_status(self):
Utility.print_msg ('.')
Utility.print_msg ('\033[93m'+ self.name + ' running/' + '\033[0m', True)
Utility.print_msg ('.')
Utility.print_msg ('open ' + '\033[92m'+'http://' + self.ip + ':' + str(self.port) + '/' + '\033[0m', True)
Utility.print_msg ('.')
def start(self):
self.print_status()
self.http_server.listen( self.port )
tornado.ioloop.IOLoop.instance().start()
def stop(self):
msg = 'shutting down %s in %s seconds'%(self.name, MAX_WAIT_SECONDS_BEFORE_SHUTDOWN)
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
io_loop = tornado.ioloop.IOLoop.instance()
deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN
def stop_loop():
now = time.time()
if now < deadline and (io_loop._callbacks or io_loop._timeouts):
io_loop.add_timeout(now + 1, stop_loop)
else:
io_loop.stop()
Utility.print_msg ('\033[93m'+ 'shutdown' + '\033[0m', True, 'done')
stop_loop()
def sig_handler(sig, frame):
msg = 'caught interrupt signal: %s'%sig
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
tornado.ioloop.IOLoop.instance().add_callback(shutdown)
def shutdown():
server.stop()
def main():
global server
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler)
port = 8888
name = 'icon webserver'
server = Server(name, port)
server.start()
if __name__ == "__main__":
main()
| fegonda/icon_demo | code/web/server.py | Python | mit | 3,926 |
import numpy as np
import cPickle
import math
import string
import re
import subprocess
from datetime import datetime
from cwsm.performance import Performance
def cafferun(params):
# load general and optimization parameters
with open('../tmp/optparams.pkl', 'rb') as f:
paramdescr = cPickle.load(f)
with open('../tmp/genparams.pkl', 'rb') as f:
genparams = cPickle.load(f)
CAFFE_ROOT = genparams['CAFFE_ROOT']
optimize = genparams['optimize']
# transform parameters accoring to transformation specified in the model file
print params
for p in params:
if paramdescr[p].get('transform', None) is not None:
# X<>: multiplier where <> stands for any number (examples: X10, X100, X22)
if paramdescr[p]['transform'][0] == 'X':
multiplier = int(paramdescr[p]['transform'][1:])
params[p][0] *= multiplier
# LOG<>: number which goes to Spearmint corresponds to log with base <> of an actual
# number (example: value 2 of LOG10 corresponds to 100)
if paramdescr[p]['transform'][0:3] == 'LOG':
base = int(paramdescr[p]['transform'][3:])
params[p][0] = math.log(params[p][0], base)
# NEGEXP<>: where <> is the base, the number which goes to Spearmint is negative of the
# exponent (example: value 3 with NEGEXP10 means 10^-3 and correpsonds to 0.001)
if paramdescr[p]['transform'][0:6] == 'NEGEXP':
negexp = float(paramdescr[p]['transform'][6:])
params[p] = [negexp ** float(-params[p][0])]
# unique prefix for this run
prefix = datetime.now().strftime('%Y-%d-%m-%H-%M-%S')
# generate .prototxt files with current set of paramters
trainnet = open('../tmp/template_trainval.prototxt', 'r').read()
solver = open('../tmp/template_solver.prototxt', 'r').read()
for p in params:
trainnet = string.replace(trainnet, 'OPTIMIZE_' + p, str(params[p][0]), 1)
solver = string.replace(solver, 'OPTIMIZE_' + p, str(params[p][0]), 1)
# kappa optimizer has a special treatment
if optimize == 'kappa':
valnet = open('../tmp/template_val.prototxt', 'r').read()
for p in params:
valnet = string.replace(valnet, 'OPTIMIZE_' + p, str(params[p][0]), 1)
# update paths for this run
solver = string.replace(solver, 'PLACEHOLDER_NET', '../tmp/%s_trainval.prototxt' % prefix, 1)
solver = string.replace(solver, 'PLACEHOLDER_MODEL_STORE', '../caffeout/%s' % prefix, 1)
# store .prototxt for this run
with open('../tmp/%s_trainval.prototxt' % prefix, 'w') as f:
f.write(trainnet)
if optimize == 'kappa':
with open('../tmp/%s_val.prototxt' % prefix, 'w') as f:
f.write(valnet)
with open('../tmp/%s_solver.prototxt' % prefix, 'w') as f:
f.write(solver)
# run caffe training procedure
caffe_return_code = subprocess.call(CAFFE_ROOT + '/build/tools/caffe train --solver ../tmp/%s_solver.prototxt 2> ../caffeout/%s_log.txt' % (prefix, prefix), shell=True)
print 'CAFFE RETURN CODE ' + str(caffe_return_code)
# set result to None by default
result = None
# if Caffe ran successfully update the result
if int(caffe_return_code) == 0:
# run the performace measure estimator
if optimize == 'loss':
result = Performance.loss(prefix)
elif optimize == 'accuracy':
result = Performance.accuracy(prefix)
elif optimize == 'kappa':
result = Performance.kappasq(prefix, CAFFE_ROOT)
else:
print 'ERROR: Unknown perfomance measure %s' % optimize
print '-----------------------------'
print prefix, result
print '-----------------------------'
return result
# Write a function like this called 'main'
def main(job_id, params):
return cafferun(params)
| mylxiaoyi/caffe-with-spearmint | cwsm/cafferun.py | Python | mit | 3,964 |
import curses
from curses.textpad import Textbox, rectangle
import asyncio
import readline
import rlcompleter
# import readline # optional, will allow Up/Down/History in the console
# import code
# vars = globals().copy()
# vars.update(locals())
# shell = code.InteractiveConsole(vars)
# shell.interact()
class CursesUi:
def __init__(self):
self.screen = curses.initscr()
self.screen.clear()
def __enter__(self):
print('__enter__')
return self
def __exit__(self, exc_type, exc_val, exc_tb):
print('__exit__')
curses.nocbreak()
self.screen.keypad(False)
curses.echo()
curses.endwin()
def main(stdscr):
stdscr.addstr(0, 0, "Enter IM message: (hit Ctrl-G to send)")
editwin = curses.newwin(5,30, 2,1)
rectangle(stdscr, 1,0, 1+5+1, 1+30+1)
stdscr.refresh()
box = Textbox(editwin, True)
# Let the user edit until Ctrl-G is struck.
box.edit()
# Get resulting contents
message = box.gather()
if __name__ == '__main__':
curses.wrapper(main)
class MyCompleter(rlcompleter.Completer):
def complete(self, text, state):
print(text)
print(state)
if state == 2:
return None
return text+str(state) | manly-man/moodle-destroyer-tools | frontend/interactive.py | Python | mit | 1,271 |
import _plotly_utils.basevalidators
class MinexponentValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name="minexponent",
parent_name="histogram.marker.colorbar",
**kwargs
):
super(MinexponentValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/histogram/marker/colorbar/_minexponent.py | Python | mit | 507 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class IPConfiguration(SubResource):
"""IP configuration.
:param id: Resource ID.
:type id: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP allocation method.
Possible values are 'Static' and 'Dynamic'. Possible values include:
'Static', 'Dynamic'
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2017_08_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2017_08_01.models.Subnet
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address:
~azure.mgmt.network.v2017_08_01.models.PublicIPAddress
:param provisioning_state: Gets the provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, *, id: str=None, private_ip_address: str=None, private_ip_allocation_method=None, subnet=None, public_ip_address=None, provisioning_state: str=None, name: str=None, etag: str=None, **kwargs) -> None:
super(IPConfiguration, self).__init__(id=id, **kwargs)
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/ip_configuration_py3.py | Python | mit | 2,962 |
"""
Liana REST API client
Copyright Liana Technologies Ltd 2018
"""
import json
import hashlib
import hmac
import requests
import time
class APIException(Exception):
pass
class RestClient:
def __init__(self, user_id, api_secret, api_url, api_version, api_realm):
self._response = None
self._user_id = user_id
self._api_secret = api_secret
self._api_url = api_url
self._api_realm = api_realm
self._api_version = api_version
self._content_type = 'application/json'
def call(self, path, params=[], method='POST'):
""" Perform API request and return the API result"""
request_function = getattr(requests, method.lower())
self._set_request_data(path, params, method)
self._response = request_function(
self._api_url + self._full_path,
headers=self._get_headers(),
data=self._json_string
)
self._response_body = self._response.text;
if self._response.status_code >= 400:
raise APIException('API response with status code ' +str(self._response.status_code))
try:
data = json.loads(self._response_body);
except ValueError: # Python 2.x
raise APIException('API did not return a valid json string')
except json.decoder.JSONDecodeError: # Python 3.5+
raise APIException('API did not return a valid json string')
if 'succeed' in data.keys() and not data['succeed']:
raise APIException(data['message'])
if 'result' in data.keys():
return data['result']
return data
def get_http_response(self):
""" Returns the raw response object of last performed API request """
return self._response
""" INTERNAL METHODS FOLLOW """
def _get_new_timestamp(self):
""" Returns a fresh timestamp in proper format """
return time.strftime('%Y-%m-%dT%H:%M:%S%z')
def _get_hash(self):
""" Form and return the parameters hash for the API request """
md5 = hashlib.md5()
md5.update(self._json_string.encode('utf-8'))
return md5.hexdigest()
def _get_message(self):
""" Return the message in the format which is used to create signature of the request """
message = "\n".join([
self._method,
self._get_hash(),
self._content_type,
self._timestamp,
self._json_string,
self._full_path
])
return message.encode('utf-8')
def _get_signature(self):
""" Get signature for the API request """
return hmac.new(
self._api_secret.encode('utf-8'),
self._get_message(),
hashlib.sha256
).hexdigest()
def _get_headers(self):
""" Get headers for the API HTTP request """
return {
'Content-Type': self._content_type,
'Content-MD5': self._get_hash(),
'Date': self._timestamp,
'Authorization': self._api_realm + ' ' + str(self._user_id) + ':' + self._get_signature(),
}
def _set_request_data(self, path, params, method):
""" Set API request data """
self._full_path = '/api/v' + str(self._api_version) + '/' + path
self._json_string = json.dumps(params)
if method == 'GET':
self._json_string = ''
self._timestamp = self._get_new_timestamp()
self._method = method
| LianaTech/rest-client | python/RestClient.py | Python | mit | 3,505 |
from datetime import datetime
from email.mime import text as mime_text
from unittest.mock import MagicMock
from unittest.mock import Mock
from unittest.mock import patch
import cauldron as cd
from cauldron.session import reloading
from cauldron.test import support
from cauldron.test.support import scaffolds
from cauldron.test.support.messages import Message
class TestSessionReloading(scaffolds.ResultsTest):
"""Test suite for the reloading module"""
def test_watch_bad_argument(self):
"""Should not reload a module"""
self.assertFalse(
reloading.refresh(datetime, force=True),
Message('Should not reload not a module')
)
def test_watch_good_argument(self):
"""Should reload the specified package/subpackage"""
self.assertTrue(
reloading.refresh('datetime', force=True),
Message('Should reload the datetime module')
)
def test_watch_not_needed(self):
"""Don't reload modules that haven't changed."""
support.create_project(self, 'betty')
project = cd.project.get_internal_project()
project.current_step = project.steps[0]
self.assertFalse(
reloading.refresh(mime_text),
Message('Expect no reload if the step has not been run before.')
)
support.run_command('run')
project.current_step = project.steps[0]
self.assertFalse(
reloading.refresh(mime_text),
Message('Expect no reload if module has not changed recently.')
)
def test_watch_recursive(self):
"""Should reload the email module."""
self.assertTrue(
reloading.refresh('email', recursive=True, force=True),
Message('Expected email module to be reloaded.')
)
def test_get_module_name(self):
"""Should get the module name from the name of its spec."""
target = MagicMock()
target.__spec__ = MagicMock()
target.__spec__.name = 'hello'
self.assertEqual('hello', reloading.get_module_name(target))
def test_get_module_name_alternate(self):
"""
Should get the module name from its dunder name if the spec name
does not exist.
"""
target = Mock(['__name__'])
target.__name__ = 'hello'
self.assertEqual('hello', reloading.get_module_name(target))
@patch('cauldron.session.reloading.os.path')
@patch('cauldron.session.reloading.importlib.reload')
def test_do_reload_error(self, reload: MagicMock, os_path: MagicMock):
"""Should fail to import the specified module and so return False."""
target = MagicMock()
target.__file__ = None
target.__path__ = ['fake']
os_path.getmtime.return_value = 10
reload.side_effect = ImportError('FAKE')
self.assertFalse(reloading.do_reload(target, 0))
self.assertEqual(1, reload.call_count)
@patch('cauldron.session.reloading.os.path')
@patch('cauldron.session.reloading.importlib.reload')
def test_do_reload(self, reload: MagicMock, os_path: MagicMock):
"""Should import the specified module and return True."""
target = MagicMock()
target.__file__ = 'fake'
os_path.getmtime.return_value = 10
self.assertTrue(reloading.do_reload(target, 0))
self.assertEqual(1, reload.call_count)
@patch('cauldron.session.reloading.os.path')
@patch('cauldron.session.reloading.importlib.reload')
def test_do_reload_skip(self, reload: MagicMock, os_path: MagicMock):
"""
Should skip reloading the specified module because it hasn't been
modified and return False.
"""
target = MagicMock()
target.__file__ = 'fake'
os_path.getmtime.return_value = 0
self.assertFalse(reloading.do_reload(target, 10))
self.assertEqual(0, reload.call_count)
def test_reload_children_module(self):
"""Should abort as False for a module that has no children."""
target = Mock()
reloading.reload_children(target, 10)
| sernst/cauldron | cauldron/test/session/test_session_reloading.py | Python | mit | 4,115 |
import logging
from ..report.individual import IndividualReport
class IndividualGenerator(object):
logger = logging.getLogger("ddvt.rep_gen.ind")
def __init__(self, test):
self.test = test
async def generate(self, parent):
test_group = None
try:
test_group = self.test(parent.filename)
except OSError as e:
parent.report.valid = False
parent.report.reports.append(IndividualReport("FileValid", 0,
{'error': str(e)}))
return
for test in test_group._tests_:
self.logger.info("Starting Test: {}".format(test))
try:
result, status = getattr(test_group, test)()
parent.report.reports.append(IndividualReport(test, status, result))
# TODO: Figure out what to do next
except Exception as e:
self.logger.warning("failed test")
parent.report.valid = False
parent.report.reports.append(IndividualReport(test, 0,
{'error': str(e)}))
| HEP-DL/dl_data_validation_toolset | dl_data_validation_toolset/framework/report_gen/individual.py | Python | mit | 1,048 |
#
# YetAnotherPythonSnake 0.94
# Author: Simone Cingano ([email protected])
# Web: http://simonecingano.it
# Licence: MIT
#
import pygame
import os
# YASP common imports
import data
if pygame.mixer:
pygame.mixer.init()
class dummysound:
def play(self): pass
class SoundPlayer:
def __init__(self, sounds):
self.sounds = {}
for s in sounds:
self.load(*s)
def play(self, sound):
self.sounds[sound].play()
def load(self, key, filename):
self.sounds[key] = self.load_sound(filename)
def load_sound(self, filename):
if not pygame.mixer:
return dummysound()
filepath = data.filepath("sfx", filename)
if filepath:
sound = pygame.mixer.Sound(filepath)
return sound
else:
return dummysound()
EXTENSION = os.name == 'nt' and '.mp3' or '.ogg'
class MusicPlayer:
def __init__(self, track=None):
if track is not None:
self.load(track)
def load(self, track):
pygame.mixer.music.load(data.filepath("music", track + EXTENSION))
def play(self):
pygame.mixer.music.play(-1)
def once(self):
pygame.mixer.music.play()
def stop(self):
pygame.mixer.music.stop()
| yupswing/yaps | lib/sound_engine.py | Python | mit | 1,288 |
from sacred import Experiment
ex = Experiment('my_commands')
@ex.config
def cfg():
name = 'kyle'
@ex.command
def greet(name):
print('Hello {}! Nice to greet you!'.format(name))
@ex.command
def shout():
print('WHAZZZUUUUUUUUUUP!!!????')
@ex.automain
def main():
print('This is just the main command. Try greet or shout.') | zzsza/TIL | python/sacred/my_command.py | Python | mit | 345 |
from OpenGLCffi.GLES3 import params
@params(api='gles3', prms=['first', 'count', 'v'])
def glViewportArrayvNV(first, count, v):
pass
@params(api='gles3', prms=['index', 'x', 'y', 'w', 'h'])
def glViewportIndexedfNV(index, x, y, w, h):
pass
@params(api='gles3', prms=['index', 'v'])
def glViewportIndexedfvNV(index, v):
pass
@params(api='gles3', prms=['first', 'count', 'v'])
def glScissorArrayvNV(first, count, v):
pass
@params(api='gles3', prms=['index', 'left', 'bottom', 'width', 'height'])
def glScissorIndexedNV(index, left, bottom, width, height):
pass
@params(api='gles3', prms=['index', 'v'])
def glScissorIndexedvNV(index, v):
pass
@params(api='gles3', prms=['first', 'count', 'v'])
def glDepthRangeArrayfvNV(first, count, v):
pass
@params(api='gles3', prms=['index', 'n', 'f'])
def glDepthRangeIndexedfNV(index, n, f):
pass
@params(api='gles3', prms=['target', 'index', 'data'])
def glGetFloati_vNV(target, index):
pass
@params(api='gles3', prms=['target', 'index'])
def glEnableiNV(target, index):
pass
@params(api='gles3', prms=['target', 'index'])
def glDisableiNV(target, index):
pass
@params(api='gles3', prms=['target', 'index'])
def glIsEnablediNV(target, index):
pass
| cydenix/OpenGLCffi | OpenGLCffi/GLES3/EXT/NV/viewport_array.py | Python | mit | 1,222 |
import os
import atexit
import string
import importlib
import threading
import socket
from time import sleep
def BYTE(message):
return bytes("%s\r\n" % message, "UTF-8")
class UserInput(threading.Thread):
isRunning = False
parent = None
def __init__(self, bot):
super().__init__()
self.parent = bot
self.setDaemon(True)
self.isRunning = False
self.start()
def createMessage(self, message):
temp = ""
for i in range(len(message)):
if (i != len(message) - 1):
temp += message[i] + " "
else:
temp += message[i]
return temp
def run(self):
self.isRunning = True
while (self.isRunning):
try:
message = input()
message = message.split(" ")
if (message[0] != ""):
if (message[0] == "/r" or message[0] == "/reload"):
self.parent.reloadAll()
elif (message[0] == "/q" or message[0] == "/quit"):
print("Quitting.")
self.parent.quit()
self.isRunning = False
elif (message[0] == "/j" or message[0] == "/join"):
if (len(message) < 2 or len(message) > 2):
print("Incorrect usage.")
else:
self.parent.switch(message[1])
elif (message[0] == "/l" or message[0] == "/leave"):
if (len(message) >= 2):
if (len(message) > 2):
for i in range(1, len(message)):
self.parent.leave(message[i], False)
if (len(self.parent.channels) > 0):
self.parent.focusedChannel = self.parent.channels[0]
print("Left channels. Focusing on %s" % self.parent.focusedChannel)
else:
print("No channels left.")
else:
self.parent.leave(message[1], False)
if (len(self.parent.channels) > 0):
self.parent.focusedChannel = self.parent.channels[0]
print("Left %s. Focusing on %s" % (message[1], self.parent.focusedChannel))
else:
print("No channels left.")
else:
print("Incorrect usage.")
elif (message[0] == "/?" or message[0] == "/help"):
print("1. Type anything to chat with others in %s." % self.parent.focusedChannel)
print("2. /? or /help -- Bring up the bot commands.")
print("3. /j or /join -- Join a new channel. Channel focus will switch over.")
print("4. /l or /leave -- Leave channel. Channel focus will change.")
print("5. /r or /reload -- Reload all plugins. (Hotswapping is supported.)")
print("6. /q or /quit -- Quit the bot.")
else:
self.parent.s.send(BYTE("PRIVMSG %s :%s" % (self.parent.focusedChannel, self.createMessage(message))))
except WindowsError as winError:
print(winError)
if (self.parent.s != None):
self.parent.s.close(socket.SHUT_RDWR)
self.parent.s = None
self.parent.connect()
except Exception as error:
print(error)
| tommai78101/IRCBot | UserInput.py | Python | mit | 2,759 |
# Configuration file for ipython.
c = get_config() # noqa: F821
c.Completer.use_jedi = False
# ------------------------------------------------------------------------------
# InteractiveShellApp configuration
# ------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.InteractiveShellApp.exec_PYTHONSTARTUP = True
# lines of code to run at IPython startup.
c.InteractiveShellApp.exec_lines = [
"import biokbase.narrative.magics",
"from biokbase.narrative.services import *",
"from biokbase.narrative.widgetmanager import WidgetManager",
"from biokbase.narrative.jobs import *",
]
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.InteractiveShellApp.reraise_ipython_extension_failures = False
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.InteractiveShellApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
# ------------------------------------------------------------------------------
# TerminalIPythonApp configuration
# ------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.TerminalIPythonApp.exec_PYTHONSTARTUP = True
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.TerminalIPythonApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.
# c.TerminalIPythonApp.force_interact = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.TerminalIPythonApp.ipython_dir = u''
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.TerminalIPythonApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.TerminalIPythonApp.reraise_ipython_extension_failures = False
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# ------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
# ------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'LightBG'
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.TerminalInteractiveShell.display_page = False
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.TerminalInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: '
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.TerminalInteractiveShell.autocall = 0
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
# c.TerminalInteractiveShell.editor = 'vi'
# Deprecated, use PromptManager.justify
# c.TerminalInteractiveShell.prompts_pad_left = True
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 2.7.6 (default, Nov 18 2013, 15:12:51) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.2.0-dev -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
#
# c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# The part of the banner to be printed after the profile
# c.TerminalInteractiveShell.banner2 = ''
#
# c.TerminalInteractiveShell.separate_out2 = ''
#
# c.TerminalInteractiveShell.wildcards_case_sensitive = True
#
# c.TerminalInteractiveShell.debug = False
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.TerminalInteractiveShell.confirm_exit = True
#
# c.TerminalInteractiveShell.ipython_dir = ''
#
# c.TerminalInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file in overwrite mode. Use `logappend` to
# specify a log file to **append** logs to.
# c.TerminalInteractiveShell.logstart = False
# The name of the logfile to use.
# c.TerminalInteractiveShell.logfile = ''
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
# Enable magic commands to be called without the leading %.
# c.TerminalInteractiveShell.automagic = True
# Save multi-line entries as one entry in readline history
# c.TerminalInteractiveShell.multiline_history = True
#
# c.TerminalInteractiveShell.readline_use = True
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.TerminalInteractiveShell.deep_reload = False
# Start logging to the given file in append mode. Use `logfile` to specify a log
# file to **overwrite** logs to.
# c.TerminalInteractiveShell.logappend = ''
#
# c.TerminalInteractiveShell.xmode = 'Context'
#
# c.TerminalInteractiveShell.quiet = False
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
#
# c.TerminalInteractiveShell.object_info_string_level = 0
# Deprecated, use PromptManager.out_template
# c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: '
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.TerminalInteractiveShell.cache_size = 1000
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr'
# Automatically call the pdb debugger after every exception.
# c.TerminalInteractiveShell.pdb = False
# ------------------------------------------------------------------------------
# PromptManager configuration
# ------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Output prompt. '\#' will be transformed to the prompt number
# c.PromptManager.out_template = 'Out[\\#]: '
# Continuation prompt.
# c.PromptManager.in2_template = ' .\\D.: '
# If True (default), each prompt will be right-aligned with the preceding one.
# c.PromptManager.justify = True
# Input prompt. '\#' will be transformed to the prompt number
# c.PromptManager.in_template = 'In [\\#]: '
#
# c.PromptManager.color_scheme = 'Linux'
# ------------------------------------------------------------------------------
# HistoryManager configuration
# ------------------------------------------------------------------------------
# A class to organize all history-related functionality in one place.
# HistoryManager will inherit config from: HistoryAccessor
# Should the history database include output? (default: no)
# c.HistoryManager.db_log_output = False
# Write to database every x commands (higher values save disk access & power).
# Values of 1 or less effectively disable caching.
# c.HistoryManager.db_cache_size = 0
# Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
# c.HistoryManager.hist_file = u''
# Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
# c.HistoryManager.connection_options = {}
# enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
# c.HistoryManager.enabled = True
# ------------------------------------------------------------------------------
# ProfileDir configuration
# ------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
# ------------------------------------------------------------------------------
# PlainTextFormatter configuration
# ------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# PlainTextFormatter will inherit config from: BaseFormatter
#
# c.PlainTextFormatter.type_printers = {}
# Truncate large collections (lists, dicts, tuples, sets) to this size.
#
# Set to 0 to disable truncation.
# c.PlainTextFormatter.max_seq_length = 1000
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.deferred_printers = {}
#
# c.PlainTextFormatter.newline = '\n'
#
# c.PlainTextFormatter.max_width = 79
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.singleton_printers = {}
# ------------------------------------------------------------------------------
# IPCompleter configuration
# ------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# IPCompleter will inherit config from: Completer
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
# c.IPCompleter.limit_to__all__ = False
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.IPCompleter.greedy = False
# ------------------------------------------------------------------------------
# ScriptMagics configuration
# ------------------------------------------------------------------------------
# Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
# Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_paths
# c.ScriptMagics.script_magics = []
# Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
# c.ScriptMagics.script_paths = {}
# ------------------------------------------------------------------------------
# StoreMagics configuration
# ------------------------------------------------------------------------------
# Lightweight persistence for python variables.
#
# Provides the %store magic.
# If True, any %store-d variables will be automatically restored when IPython
# starts.
# c.StoreMagics.autorestore = False
| kbase/narrative | kbase-extension/ipython/profile_default/ipython_config.py | Python | mit | 20,674 |
import string
from operator import ge as greater_than_or_equal, gt as greater_than
from collections import deque
OPERATOR_PRECEDENCE = {
'(':0,
'+':1,
'-':1,
'*':2,
'/':2,
'^':3,
}
RIGHT_ASSOCIATIVE_OPERATORS = '^'
LEFT_ASSOCIATIVE_OPERATORS = '+-/*'
def pop_operator_queue(operators, output, token):
"""
Pop operators from the queue. left associative and right assoc fns are compared slightly differently!
:type operators: deque
:type output: deque
:type token: str
:return: None
"""
comparison_op = greater_than if token in RIGHT_ASSOCIATIVE_OPERATORS else greater_than_or_equal
while operators and comparison_op(OPERATOR_PRECEDENCE[operators[-1]], OPERATOR_PRECEDENCE[token]):
output.append(operators.pop())
operators.append(token)
def to_postfix (infix):
infix = deque(infix)
output = deque()
operators = deque()
while infix:
token = infix.popleft()
if token in string.digits:
output.append(token)
elif token == '(':
operators.append(token)
elif token == ')':
while operators and operators[-1] != '(':
output.append(operators.pop())
output.append(operators.pop())
elif token in LEFT_ASSOCIATIVE_OPERATORS:
# >=
pop_operator_queue(operators, output, token)
elif token in RIGHT_ASSOCIATIVE_OPERATORS:
# >
pop_operator_queue(operators, output, token)
while operators:
output.append(operators.pop())
return ''.join(output).replace('(','')
import unittest
class TestFirst(unittest.TestCase):
def testFirst(self):
test = self
Test = self
test.assert_equals = Test.assertEqual
Test.assert_equals = Test.assertEqual
Test.assert_equals(to_postfix("2+7"), "27+")
Test.assert_equals(to_postfix("2+7+9"), "27+9+")
Test.assert_equals(to_postfix("2+7*5"), "275*+")
Test.assert_equals(to_postfix("99*6+"), "996*+")
#'337/*1+'
Test.assert_equals("33*8/", to_postfix("3*3/8"))
Test.assert_equals("33*71+/", to_postfix("3*3/(7+1)"))
Test.assert_equals("562-9*+", to_postfix("5+(6-2)*9"))
Test.assert_equals("562-9*+36^+", to_postfix("5+(6-2)*9+3^6"))
Test.assert_equals("562-9*+371-^+", to_postfix("5+(6-2)*9+3^(7-1)"))
Test.assert_equals(to_postfix("(5-4-1)+9/5/2-7/1/7"), "54-1-95/2/+71/7/-")
| julzhk/codekata | InfixtoPostfixConverter.py | Python | mit | 2,483 |
#!/usr/bin/env python3
import random
"""
Generates random trees
"""
import argparse
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
def generate_random_item(length=8, chars=alphabet):
item = ""
for i in range(length):
index = random.randint(0, len(chars) - 1)
item += chars[index]
return item
def generate_random_tree_lines(
depth,
items,
length,
chars=alphabet,
current_indentation=''):
lines = []
if depth > 0:
remaining_items_to_add = items
while remaining_items_to_add > 0:
lines.append('{0}{1}'.format(current_indentation, generate_random_item(length, chars)))
remaining_items_to_add -= 1
sub_lines = generate_random_tree_lines(
depth - 1,
items,
length,
chars,
current_indentation + ' ')
for sub_line in sub_lines:
lines.append(sub_line)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser('Tree sorting Stress Test')
parser.add_argument('--Depth',
help='The depth of the trees.',
type=int,
default=3)
parser.add_argument('--Items',
help='The number of items for each node of the tree.',
type=int,
default=10)
parser.add_argument('--Length',
help='The length of each item.',
type=int,
default=8)
parser.add_argument('--Alphabet',
help='The alphabet of allowed characters.',
type=str,
default=alphabet)
args = parser.parse_args()
random_tree_lines = generate_random_tree_lines(
args.Depth,
args.Items,
args.Length,
args.Alphabet)
for line in random_tree_lines:
print(line)
| robert-impey/tree-sorter | randomtrees.py | Python | mit | 2,051 |
# coding: utf-8
from .common import CommonTestCase
class SuggestionsTest(CommonTestCase):
def test_suggestion_url(self):
client = self.client
# self.assertEqual(client.suggestions.address.url, "https://suggestions.dadata.ru/suggestions/api/4_1/rs/suggest/address")
self.assertEqual(client.suggestions.address.url, "https://dadata.ru/api/v2/suggest/address")
def test_that_suggestion_url_is_not_private(self):
self.assertEqual(self.client.suggestions.address.private, False)
def test_that_assigned_data_is_query(self):
self.client.suggest_address = "test"
self.assertEqual(self.client.data, {'query' : 'test'})
| tigrus/dadata-python | tests/test_suggestions.py | Python | mit | 674 |
# Copyright (c) 2011, Found IT A/S and Piped Project Contributors.
# See LICENSE for details.
import json
from StringIO import StringIO
from twisted.application import service
from twisted.internet import defer, address
from twisted.python import filepath, failure
from twisted.trial import unittest
from twisted.web import resource, server, http_headers
from twisted.web.test import test_web
from piped import exceptions, util, processing, dependencies
from piped.providers import web_provider
class DummyRequest(test_web.DummyRequest, server.Request):
channel = Ellipsis
headers = None
def __init__(self, *a, **kw):
test_web.DummyRequest.__init__(self, *a, **kw)
self.requestHeaders = http_headers.Headers()
self.content = StringIO()
def getHeader(self, key):
return server.Request.getHeader(self, key)
def setHeader(self, name, value):
return server.Request.setHeader(self, name, value)
def set_content(self, content):
if not hasattr(content, 'read'):
self.content = StringIO(content)
else:
self.content = content
def setResponseCode(self, code, message=None):
server.Request.setResponseCode(self, code, message)
@property
def written_as_string(self):
return ''.join(self.written)
class WebProviderTest(unittest.TestCase):
def setUp(self):
self.runtime_environment = processing.RuntimeEnvironment()
self.service = service.IService(self.runtime_environment.application)
self.dependency_manager = self.runtime_environment.dependency_manager
self.configuration_manager = self.runtime_environment.configuration_manager
self.resource_manager = self.runtime_environment.resource_manager
self.dependency_manager.configure(self.runtime_environment)
def tearDown(self):
if self.service.running:
self.service.stopService()
def _create_configured_web_resource(self, routing, site_configuration=None):
site_configuration = site_configuration or dict()
web_site = web_provider.WebSite('site_name', site_configuration)
web_resource = web_provider.WebResource(web_site, routing)
web_resource.configure(self.runtime_environment)
return web_resource
def assertConfiguredWithProcessor(self, web_resource, processor=None, no_resource_processor=None):
if processor:
self.assertNotEquals(web_resource.processor_dependency, None)
self.assertEquals(web_resource.processor_dependency.provider, processor)
else:
self.assertEquals(web_resource.processor_dependency, None)
if no_resource_processor:
self.assertNotEquals(web_resource.no_resource_processor_dependency, None)
self.assertEquals(web_resource.no_resource_processor_dependency.provider, no_resource_processor)
else:
self.assertEquals(web_resource.no_resource_processor_dependency, None)
def getResourceForFakeRequest(self, site, post_path=None, request=None):
if not request:
request = DummyRequest(post_path)
return site.factory.getResourceFor(request)
def getConfiguredWebSite(self, config):
web_site = web_provider.WebSite('site_name', config)
web_site.configure(self.runtime_environment)
return web_site
def test_enabled_web_sites_provided(self):
provider = web_provider.WebResourceProvider()
self.configuration_manager.set('web.my_site.routing',
dict(__config__=dict(processor='a_processor'))
)
self.configuration_manager.set('web.another_site.enabled', False)
self.configuration_manager.set('web.another_site.routing',
dict(__config__=dict(processor='a_processor'))
)
provider.configure(self.runtime_environment)
self.assertEquals(len(provider.services), 1)
def test_simple_processor_routing(self):
config = dict(
routing = dict(
__config__ = dict(processor='pipeline.a_pipeline')
)
)
web_site = self.getConfiguredWebSite(config)
web_resource = self.getResourceForFakeRequest(web_site, [''])
self.assertConfiguredWithProcessor(web_resource, 'pipeline.a_pipeline')
def test_no_resource_processor_routing(self):
config = dict(
routing = dict(
__config__ = dict(processor='pipeline.root_pipeline', no_resource_processor='pipeline.root_no_resource_pipeline'),
foo = dict(
__config__ = dict(processor = 'pipeline.foo_pipeline')
),
bar = dict(
baz = dict(
__config__ = dict(no_resource_processor = 'pipeline.baz_pipeline')
)
)
)
)
web_site = self.getConfiguredWebSite(config)
root_resource = self.getResourceForFakeRequest(web_site, [''])
self.assertConfiguredWithProcessor(root_resource, processor='pipeline.root_pipeline', no_resource_processor='pipeline.root_no_resource_pipeline')
# nonexistent resources should be rendered by the closest matching no-resource-pipeline_dependency
self.assertEquals(self.getResourceForFakeRequest(web_site, ['nonexistent']), root_resource)
self.assertEquals(self.getResourceForFakeRequest(web_site, ['nonexistent', 'nested']), root_resource)
# since foo does not have a no_resource_processor, its no_resources should be rendered by the root_resource
self.assertEquals(self.getResourceForFakeRequest(web_site, ['foo', 'nonexistent']), root_resource)
self.assertEquals(self.getResourceForFakeRequest(web_site, ['foo', 'nonexistent', 'nested']), root_resource)
# since bar does not have a processor/no_resource_processor, it should be rendered by the root_resource
self.assertEquals(self.getResourceForFakeRequest(web_site, ['bar']), root_resource)
self.assertConfiguredWithProcessor(self.getResourceForFakeRequest(web_site, ['foo']), processor='pipeline.foo_pipeline')
self.assertConfiguredWithProcessor(self.getResourceForFakeRequest(web_site, ['foo', '']), processor='pipeline.foo_pipeline')
baz_resource = self.getResourceForFakeRequest(web_site, ['bar', 'baz'])
self.assertConfiguredWithProcessor(baz_resource, no_resource_processor='pipeline.baz_pipeline')
# since baz has a no_resource_processor, it is capable of rendering that itself doesn't have a "proper" resource/processor
self.assertEquals(self.getResourceForFakeRequest(web_site, ['bar', 'baz', '']), baz_resource)
self.assertEquals(self.getResourceForFakeRequest(web_site, ['bar', 'baz', 'nonexistent']), baz_resource)
self.assertEquals(self.getResourceForFakeRequest(web_site, ['bar', 'baz', 'nonexistent', 'nested']), baz_resource)
def test_web_resource_no_resource_request_processing(self):
""" Test that various web resources are being rendered with a request instance that
has its "postpath" instance variable set to the remaining / unhandled path segments.
"""
config = dict(
routing = dict(
__config__ = dict(processor='pipeline.root_pipeline', no_resource_processor='pipeline.root_no_resource_pipeline'),
foo = dict(
__config__ = dict(processor='pipeline.foo_pipeline')
),
bar = dict(
baz = dict(
__config__ = dict(no_resource_processor='pipeline.baz_pipeline')
)
)
)
)
web_site = self.getConfiguredWebSite(config)
batons = list()
# fake the pipelines being ready:
root_resource = self.getResourceForFakeRequest(web_site, [''])
foo_resource = self.getResourceForFakeRequest(web_site, ['foo'])
baz_resource = self.getResourceForFakeRequest(web_site, ['bar', 'baz'])
for resource in (root_resource, foo_resource, baz_resource):
if resource.processor_dependency:
resource.processor_dependency.on_resource_ready(batons.append)
if resource.no_resource_processor_dependency:
resource.no_resource_processor_dependency.on_resource_ready(batons.append)
def assertRequestRenderedWithPostPath(web_site, batons, request, post_path):
self.getResourceForFakeRequest(web_site, request=request).render(request)
self.assertEquals(batons, [dict(request=request)])
request = batons.pop()['request']
self.assertEquals(request.postpath, post_path)
for request_path, expected_postpath in (
# paths under the root resource, which has both a regular processor and a no resource processor
([''], []),
(['nonexistent'], ['nonexistent']),
(['nonexistent', 'nested'], ['nonexistent', 'nested']),
# paths under the foo/bar resource, which only has a regular processor
(['foo', 'bar'], ['foo', 'bar']),
(['foo', 'bar', ''], ['foo', 'bar', '']),
(['foo', 'bar', 'nested'], ['foo', 'bar', 'nested']),
# paths under the bar resource, which has a nested resource, but no processors at all
(['bar'], ['bar']),
(['bar', ''], ['bar', '']),
(['bar', 'nested'], ['bar', 'nested']),
# paths under the bar/baz resource, which only has a no resource processor
(['bar', 'baz'], []),
(['bar', 'baz', ''], ['']),
(['bar', 'baz', 'nested'], ['nested']),
(['bar', 'baz', 'nested', ''], ['nested', '']),
(['bar', 'baz', 'nested', 'deeply'], ['nested', 'deeply'])):
assertRequestRenderedWithPostPath(web_site, batons, DummyRequest(request_path), expected_postpath)
def test_static_preprocessors(self):
current_file = filepath.FilePath(__file__)
config = dict(
routing = dict(
__config__ = dict(
static = dict(
path = current_file.dirname(),
preprocessors = dict(
foo = "request: request.setHeader('foo', 'bar')"
)
)
)
)
)
web_site = self.getConfiguredWebSite(config)
# send a request for this file:
request = DummyRequest([current_file.basename()])
resource = web_site.factory.getResourceFor(request)
resource.render(request)
self.assertEquals(request.responseHeaders.getRawHeaders('foo'), ['bar'])
def test_processor_routing_with_nested_resources(self):
config = dict(
routing = dict(
__config__ = dict(
processor = 'pipeline.a_pipeline',
static = filepath.FilePath(__file__).dirname(),
),
nested = dict(
deeply = dict(
__config__ = dict(
processor = 'pipeline.another_pipeline'
)
)
)
)
)
web_site = self.getConfiguredWebSite(config)
web_resource = self.getResourceForFakeRequest(web_site, [''])
self.assertConfiguredWithProcessor(web_resource, 'pipeline.a_pipeline')
# if we request an existing file, a static file resource will be returned
filename = filepath.FilePath(__file__).basename()
static_resource = self.getResourceForFakeRequest(web_site, [filename])
self.assertIsInstance(static_resource, web_provider.StaticFile)
web_resource = self.getResourceForFakeRequest(web_site, ['nested'])
self.assertConfiguredWithProcessor(web_resource)
no_resource = self.getResourceForFakeRequest(web_site, ['nested', 'nonexistent'])
self.assertIsInstance(no_resource, resource.NoResource)
deeply_resource = self.getResourceForFakeRequest(web_site, ['nested', 'deeply'])
self.assertConfiguredWithProcessor(deeply_resource, 'pipeline.another_pipeline')
def test_web_resource_simple_request_processing(self):
web_resource = self._create_configured_web_resource(dict(__config__=dict(processor='pipeline.a_pipeline')))
request = DummyRequest([''])
batons = list()
web_resource.processor_dependency.on_resource_ready(batons.append)
# rendering the request should result in a baton being processed by the processor
web_resource.render(request)
self.assertEquals(batons, [dict(request=request)])
def test_web_resource_processing_handles_exceptions(self):
web_resource = self._create_configured_web_resource(dict(__config__=dict(processor='pipeline.a_pipeline')))
request = DummyRequest([''])
def raiser(baton):
raise Exception()
web_resource.processor_dependency.on_resource_ready(raiser)
# rendering the request should result in an exception response
web_resource.render(request)
self.assertIn('Processing Failed', ''.join(request.written))
self.assertEquals(request.code, 500)
def test_web_resource_processing_raises_with_debugging(self):
routing = dict(__config__=dict(processor='pipeline.a_pipeline'))
site_config = dict(debug=dict(allow=['localhost']))
web_resource = self._create_configured_web_resource(routing, site_config)
request = DummyRequest([''])
request.client = address.IPv4Address('TCP', 'localhost', 1234)
def raiser(baton):
raise Exception()
web_resource.processor_dependency.on_resource_ready(raiser)
# rendering the request should result in an exception response
web_resource.render(request)
self.assertIn('web.Server Traceback (most recent call last)', ''.join(request.written))
self.assertEquals(request.code, 500)
@defer.inlineCallbacks
def test_debug_handler_reaping(self):
# reap all debuggers every reactor iteration:
site_config = dict(routing=dict())
web_site = web_provider.WebSite('site_name', site_config)
debug_handler = web_provider.WebDebugHandler(web_site, reap_interval=0, max_inactive_time=0)
debug_handler.setServiceParent(self.service)
self.service.startService()
f = failure.Failure(Exception())
debug_handler.register_failure(f)
self.assertEquals(len(debug_handler.children), 1)
yield util.wait(0) # give the reaper one reactor iteration to reap the debugger
self.assertEquals(len(debug_handler.children), 0)
def test_debug_handler_allow(self):
site_config = dict(routing=dict())
web_site = self.getConfiguredWebSite(site_config)
debug_handler = web_provider.WebDebugHandler(web_site, allow=['some_host'])
debug_handler.setServiceParent(self.service)
f = failure.Failure(Exception())
path = debug_handler.register_failure(f)
request = DummyRequest([path])
# localhost is not allowed to debug:
request.client = address.IPv4Address('TCP', 'localhost', 1234)
forbidden = debug_handler.getChildWithDefault(path, request)
self.assertIsInstance(forbidden, resource.ForbiddenResource)
# but some_host is:
request.client = address.IPv4Address('TCP', 'some_host', 1234)
web_debugger = debug_handler.getChildWithDefault(path, request)
self.assertIsInstance(web_debugger, web_provider.WebDebugger)
def test_web_debugger(self):
# create a failure instance with an actual traceback:
foo = 42 # this will become part of the debuggers namespace
try:
raise Exception()
except Exception as e:
f = util.NonCleaningFailure()
web_debugger = web_provider.WebDebugger(f)
request = DummyRequest([])
request.addArg('expr', 'foo')
result = web_debugger.render(request)
# the result should be json-encoded
self.assertEquals(result, json.dumps('42\n'))
def test_fails_if_both_static_and_concatenated_are_specified(self):
for invalid_routing in (dict(__config__=dict(static='', concatenated='')),
dict(nested=dict(__config__=dict(static='', concatenated='')))):
site = web_provider.WebSite('site_name', dict(routing=invalid_routing))
self.assertRaises(exceptions.ConfigurationError, site.configure, self.runtime_environment)
def test_request_finished_when_garbage_collected(self):
web_site = web_provider.WebSite('site_name', dict(routing=dict(__config__=dict(processor='pipeline.test_pipeline'))))
web_site.configure(self.runtime_environment)
batons = list()
web_resource = self.getResourceForFakeRequest(web_site, [])
web_resource.processor_dependency = dependencies.InstanceDependency(batons.append)
web_resource.processor_dependency.is_ready = True
request = DummyRequest([])
web_resource.render(request)
# the processor should have been asked to process a baton
self.assertEquals(len(batons), 1)
self.assertEquals(batons[0]['request'], request)
# the processor didn't finish the request:
self.assertEquals(request.finished, False)
# .. however, when the processor loses the reference to the request, it should be
# automatically finished:
batons.pop()
self.assertEquals(request.finished, True)
class TestConcatenatedFile(unittest.TestCase):
def test_concatenating_files(self):
test_data_path = filepath.FilePath(__file__).sibling('data')
file_paths = [test_data_path.child('foo'), test_data_path.child('bar')]
cf = web_provider.ConcatenatedFile('text/plain', file_paths)
request = DummyRequest([''])
text = cf.render_GET(request)
self.assertEquals(text, 'foo\nbar\n')
def test_concatenating_files_in_different_order(self):
test_data_path = filepath.FilePath(__file__).sibling('data')
file_paths = [test_data_path.child('bar'), test_data_path.child('foo')]
cf = web_provider.ConcatenatedFile('text/plain', file_paths)
request = DummyRequest([''])
text = cf.render_GET(request)
self.assertEquals(text, 'bar\nfoo\n')
def test_just_a_single_file(self):
test_data_path = filepath.FilePath(__file__).sibling('data')
file_paths = [test_data_path.child('foo')]
cf = web_provider.ConcatenatedFile('text/plain', file_paths)
request = DummyRequest([''])
text = cf.render_GET(request)
self.assertEquals(text, 'foo\n')
def test_no_files(self):
file_paths = []
cf = web_provider.ConcatenatedFile('text/plain', file_paths)
request = DummyRequest([''])
text = cf.render_GET(request)
self.assertEquals(text, '')
def test_ensure_the_right_content_type_is_set(self):
file_paths = []
cf = web_provider.ConcatenatedFile('text/plain', file_paths)
request = DummyRequest([''])
cf.render_GET(request)
self.assertEquals(request.responseHeaders.getRawHeaders('content-type'), ['text/plain'])
| foundit/Piped | piped/providers/test/test_web_provider.py | Python | mit | 19,532 |
#!/usr/bin/env python3
# sequence = []
defence = 0.4
for x in range(1):
print(x)
for x in range(10):
if x == 0:
defence = defence
else:
defence = defence + (1 - defence) * (1 / 2)
print(defence)
# print(defence)
# print(sequence)
# print(sum(sequence))
# x = input()
# print(x) | mishka28/NYU-Python | advance_python_class_3/Homework1/temptest.py | Python | mit | 307 |
def pig_it(text):
return ' '.join([x[1:]+x[0]+'ay' if x.isalpha() else x for x in text.split()])
# 其实就是2个字符串过滤拼接,比移动方便多了,思路巧妙
# a if xx else b, 单行判断处理异常字符,xx为判断,标准套路
for x in text.split()
if x.isalpha()
x[1:]+x[0]+'ay'
else x
return ' '.join([ ]) | lluxury/codewars | Simple Pig Latin.py | Python | mit | 564 |
import tkinter as tk
from tkinter import *
import spotipy
import webbrowser
from PIL import Image, ImageTk
import os
from twitter import *
from io import BytesIO
import urllib.request
import urllib.parse
import PIL.Image
from PIL import ImageTk
import simplejson
song1 = "spotify:artist:58lV9VcRSjABbAbfWS6skp"
song2 = 'spotify:artist:0PFtn5NtBbbUNbU9EAmIWF'
song3 = 'spotify:artist:5INjqkS1o8h1imAzPqGZBb'
song4 = 'spotify:artist:1HwM5zlC5qNWhJtM00yXzG'
song5 = 'spotify:artist:4tZwfgrHOc3mvqYlEYSvVi'
song6 = 'spotify:artist:3AA28KZvwAUcZuOKwyblJQ'
song7 = 'spotify:artist:5T0MSzX9RC5NA6gAI6irSn'
song8 = 'spotify:artist:0SwO7SWeDHJijQ3XNS7xEE'
song9 = 'spotify:artist:1dWEYMPtNmvSVaDNLgB6NV'
# Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('705153959368007680-F5OUf8pvmOlXku1b7gpJPSAToqzV4Fb', 'bEGLkUJBziLc17EuKLTAMio8ChmFxP9aHYADwRXnxDsoC',
'gYDgR8lcTGcVZS9ucuEIYsMuj', '1dwHsLDN2go3aleQ8Q2vcKRfLETc51ipsP8310ayizL2p3Ycii'))
numberOfTweets = 3
class SetUp(tk.Tk): #inheriting
def __init__(self, *args, **kwargs): #method, initialisng
tk.Tk.__init__(self, *args, **kwargs)
tk.Tk.wm_iconbitmap(self, default="favicon.ico")
container = tk.Frame(self) #container for holding everything
container.pack(side = "top", fill = None, expand = False)
container.pack_propagate(0) # don't shrink
container.grid_rowconfigure(0, weight = 1)
container.grid_columnconfigure(0, weight = 1)
self.frames = {} #dictionary of frames
for F in (StartPage, RadioPage, MapPage, DataPage, InvPage, StatsPage): #loop through the number of pages
frame = F(container, self)
self.frames[F] = frame
frame.grid(row = 0, column = 0, sticky = "nsew") #alignment plus stretch
self.show_frame(StartPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise() #raised to the front
def music(self, uri):
spotify = spotipy.Spotify()
results = spotify.artist_top_tracks(uri)
#getting the track and audio link to top song
for track in results['tracks'][:1]:
text2 = track['preview_url']
return text2
def showTweets(self, x, num):
# display a number of new tweets and usernames
for i in range(0, num):
line1 = (x[i]['user']['screen_name'])
line2 = (x[i]['text'])
#w = Label(self, text=line1 + "\n" + line2 + "\n\n")
#w.pack()
self.label = Label(self,text=line1 + "\n" + line2 + "\n\n", width = 100)
self.label.place(x = 215, y = 0)
self.label.pack()
def getTweets(self):
x = t.statuses.home_timeline(screen_name="AndrewKLeech")
return x
def tweet(self):
text = entryWidget.get().strip()
if text == "":
print("Empty")
else:
t.statuses.update(status=text)
entryWidget.delete(0,END)
print("working")
def get_map(self,lat,lng):
latString = str(lat)
lngString = str(lng)
#Map url from google maps, has marker and colors included
url = ("https://maps.googleapis.com/maps/api/staticmap?center="+latString+","+lngString+"&size=450x250&zoom=16&style=feature:road.local%7Celement:geometry%7Ccolor:0x00ff00%7Cweight:1%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:administrative%7Celement:labels%7Cweight:3.9%7Cvisibility:on%7Cinverse_lightness:true&style=feature:poi%7Cvisibility:simplified&markers=color:blue%7Clabel:H%7C"+latString+","+lngString+"&markers=size:tiny%7Ccolor:green%7CDelta+Junction,AK\&sensor=false")
buffer = BytesIO(urllib.request.urlopen(url).read())
pil_image = PIL.Image.open(buffer)
tk_image = ImageTk.PhotoImage(pil_image)
# put the image in program
mapLabel = Label(image=tk_image)
mapLabel.pack()
mainloop()
def get_coordinates(self,from_sensor=False):
if entryWidget2.get().strip() == "":
print("Empty")
mapLabel.pack_forget()
else:
query=entryWidget2.get().strip()
print("working")
query = query.encode('utf-8')
params = {
'address': query,
'sensor': "true" if from_sensor else "false"
}
#url used for google geocodeing api
googleGeocodeUrl = 'http://maps.googleapis.com/maps/api/geocode/json?'
url = googleGeocodeUrl + urllib.parse.urlencode(params)
json_response = urllib.request.urlopen(url)
response = simplejson.loads(json_response.read())
if response['results']:
location = response['results'][0]['geometry']['location']
latitude, longitude = location['lat'], location['lng']
print(query, latitude, longitude)
else:
latitude, longitude = None, None
print(query, "<no results>")
self.get_map(latitude, longitude)
def game(self):
w, h = 500, 500
# Pack pygame in `embed`.
root = tk.Tk()
embed = tk.Frame(root, width=w, height=h)
embed.pack()
# Tell pygame's SDL window which window ID to use
os.environ['SDL_WINDOWID'] = str(embed.winfo_id())
# Show the window so it's assigned an ID.
root.update()
# Game for Pip-Boy
# Imports
import pygame
import random
# Initialise PyGame
pygame.init()
# Set display width and height
display_width = 500
display_height = 500
# Create a gameDisplay using display_width and display_height
gameDisplay = pygame.display.set_mode((display_width, display_height))
# Set the caption of the window to Turret Defense
pygame.display.set_caption('Tank War!')
# Create colours using RGB values
black = (0, 0, 0)
green = (0, 150, 0)
lightGreen = (0, 255, 0)
# Create fonts
smallFont = pygame.font.SysFont(None, 25)
mediumFont = pygame.font.SysFont(None, 50)
largeFont = pygame.font.SysFont(None, 75)
# Initialise the clock for FPS
clock = pygame.time.Clock()
# Tank part dimensions
tankWidth = 40
tankHeight = 20
turretWidth = 5
wheelWidth = 5
# Ground height
ground = .85 * display_height
# Load sounds
fireSound = pygame.mixer.Sound("fireSound.wav")
cannon = pygame.mixer.Sound("cannon.wav")
def text_objects(text, color, size="smallFont"): # Function returns text for blitting
if size == "smallFont":
textSurface = smallFont.render(text, True, color)
if size == "mediumFont":
textSurface = mediumFont.render(text, True, color)
if size == "largeFont":
textSurface = largeFont.render(text, True, color)
return textSurface, textSurface.get_rect()
def text_to_button(msg, color, buttonx, buttony, buttonwidth, buttonheight,
size="smallFont"): # Blits text to button
textSurface, textRect = text_objects(msg, color, size)
textRect.center = ((buttonx + buttonwidth / 2), buttony + (buttonheight / 2))
gameDisplay.blit(textSurface, textRect)
def message_to_screen(msg, color, y_displace=0, size="smallFont"): # Blits the text returned from text_objects
textSurface, textRect = text_objects(msg, color, size)
textRect.center = (int(display_width / 2), int(display_height / 2) + y_displace)
gameDisplay.blit(textSurface, textRect)
def tank(x, y, turretPosition): # Draws the tank and turret
# Casting x and y to be ints
x = int(x)
y = int(y)
# Set possible turret positions
turrets = [(x - 27, y - 2),
(x - 26, y - 5),
(x - 25, y - 8),
(x - 23, y - 12),
(x - 20, y - 14),
(x - 18, y - 15),
(x - 15, y - 17),
(x - 13, y - 19),
(x - 11, y - 21)]
# Draw the tank
pygame.draw.circle(gameDisplay, green, (int(x), int(y)), 10)
pygame.draw.rect(gameDisplay, green, (x - tankHeight, y, tankWidth, tankHeight))
pygame.draw.line(gameDisplay, green, (x, y), turrets[turretPosition], turretWidth)
# Draw the wheels
pygame.draw.circle(gameDisplay, green, (x - 15, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 0, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 15, y + 20), wheelWidth)
# Return the turret position
return turrets[turretPosition]
def enemyTank(x, y, turretPosition): # Draws the tank and turret
# Casting x and y to be ints
x = int(x)
y = int(y)
# Set possible turret positions
turrets = [(x + 27, y - 2),
(x + 26, y - 5),
(x + 25, y - 8),
(x + 23, y - 12),
(x + 20, y - 14),
(x + 18, y - 15),
(x + 15, y - 17),
(x + 13, y - 19),
(x + 11, y - 21)]
# Draw the tank
pygame.draw.circle(gameDisplay, green, (int(x), int(y)), 10)
pygame.draw.rect(gameDisplay, green, (x - tankHeight, y, tankWidth, tankHeight))
pygame.draw.line(gameDisplay, green, (x, y), turrets[turretPosition], turretWidth)
pygame.draw.circle(gameDisplay, green, (x - 15, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x - 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 0, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 5, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 10, y + 20), wheelWidth)
pygame.draw.circle(gameDisplay, green, (x + 15, y + 20), wheelWidth)
return turrets[turretPosition]
def explosion(x, y): # Draws an explosion on screen
# Play a sound
pygame.mixer.Sound.play(fireSound)
explode = True
while explode:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
choices = [green, lightGreen]
magnitude = 1
while magnitude < 50:
explodeBitX = x + random.randrange(-1 * magnitude, magnitude)
explodeBitY = y + random.randrange(-1 * magnitude, magnitude)
if explodeBitY > ground + 13:
pygame.draw.circle(gameDisplay, black, (explodeBitX, explodeBitY), random.randrange(1, 5))
else:
pygame.draw.circle(gameDisplay, choices[random.randrange(0, 2)], (explodeBitX, explodeBitY),
random.randrange(1, 5))
magnitude += 1
pygame.display.update()
clock.tick(100)
explode = False
def fire(pos, turretPos, gunPower, enemyTankX,
enemyTankY): # Function for shooting and controlling bullet physics
# Play a sound
pygame.mixer.Sound.play(cannon)
damage = 0
fire = True
startingPos = list(pos)
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
pygame.draw.circle(gameDisplay, green, (startingPos[0], startingPos[1]), 5)
startingPos[0] -= (10 - turretPos) * 2
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
# If the explosion hits the tank
# Various damages for how close it was
if enemyTankX + 10 > hitX > enemyTankX - 10:
damage = 25
elif enemyTankX + 15 > hitX > enemyTankX - 15:
damage = 20
elif enemyTankX + 20 > hitX > enemyTankX - 20:
damage = 15
elif enemyTankX + 30 > hitX > enemyTankX - 30:
damage = 5
explosion(hitX, hitY)
fire = False
pygame.display.update()
clock.tick(60)
return damage
def enemyFire(pos, turretPos, gunPower, playerX,
playerY): # Function for shooting and controlling bullet physics
# Play a sound
pygame.mixer.Sound.play(cannon)
damage = 0
currentPower = 1
powerFound = False
# How the AI decides what power to uses
while not powerFound:
currentPower += 1
if currentPower > 100:
powerFound = True
fire = True
startingPos = list(pos)
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
startingPos[0] += (10 - turretPos) * 2
# Make currentPower random between 80% and 120% of the chosen power
gunPower = random.randrange(int(currentPower * .8), int(currentPower * 1.2))
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
if playerX + 15 > hitX > playerX - 15:
powerFound = True
fire = False
fire = True
startingPos = list(pos)
# When the power is decided, it shoots
while fire:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
pygame.draw.circle(gameDisplay, green, (startingPos[0], startingPos[1]), 5)
startingPos[0] += (10 - turretPos) * 2
startingPos[1] += int((((startingPos[0] - pos[0]) * .015 / (gunPower / 50)) ** 2) - (
turretPos + turretPos / (12 - turretPos)))
# If the explosion is on the ground
if startingPos[1] > ground:
hitX = int((startingPos[0]))
hitY = int(startingPos[1])
# If the explosion hits the tank
# Various damages for how close it was
if playerX + 10 > hitX > playerX - 10:
damage = 25
elif playerX + 15 > hitX > playerX - 15:
damage = 20
elif playerX + 20 > hitX > playerX - 20:
damage = 15
elif playerX + 30 > hitX > playerX - 30:
damage = 5
explosion(hitX, hitY)
fire = False
pygame.display.update()
clock.tick(60)
return damage
def power(level): # Blits the power level
text = smallFont.render("Power: " + str(level) + "%", True, green)
gameDisplay.blit(text, [display_width * .75, 10])
def game_controls(): # Function for controls screen
controls = True
while controls:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Controls!", green, -100, size="largeFont")
message_to_screen("Left and right arrow keys to move the tank!", green, 10, size="smallFont")
message_to_screen("Up and down arrow keys to move the tank's turret!", green, 40, size="smallFont")
message_to_screen("A and D keys change the turret's power!", green, 70, size="smallFont")
message_to_screen("P to pause the game!", green, 100, size="smallFont")
# Buttons
button("Play", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def button(text, x, y, width, height, colour, active_colour,
action): # Creates the button, both active and inactive
cursor = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if x + width > cursor[0] > x and y + height > cursor[1] > y:
pygame.draw.rect(gameDisplay, active_colour, (x, y, width, height))
if click[0] == 1 and action != None:
if action == "play":
gameLoop()
if action == "controls":
game_controls()
if action == "quit":
pygame.quit()
quit()
else:
pygame.draw.rect(gameDisplay, colour, (x, y, width, height))
text_to_button(text, black, x, y, width, height)
def pause(): # Pauses the game
paused = True
message_to_screen("Paused", green, -225, size="largeFont")
message_to_screen("C to continue playing", green, -175, size="smallFont")
message_to_screen("Q to quit", green, -150, size="smallFont")
pygame.display.update()
while paused:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_c:
paused = False
elif event.key == pygame.K_q:
pygame.quit()
quit()
clock.tick(5)
def game_intro(): # Function for game introduction screen
intro = True
while intro:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Tank War!", green, -200, size="largeFont")
message_to_screen("Kill the enemy tank before it kills you!", green, -50, size="smallFont")
message_to_screen("Press play to play!", green, 0, size="smallFont")
message_to_screen("Press controls to view the game's controls!", green, 50, size="smallFont")
message_to_screen("Press quit to exit the game!", green, 100, size="smallFont")
# Text on the buttons
button("Play", 25, 400, 100, 50, green, lightGreen, action="play")
button("Controls", 200, 400, 100, 50, green, lightGreen, action="controls")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def gameWin(): # Function for game introduction screen
win = True
while win:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("You won!", green, -100, size="largeFont")
message_to_screen("Your enemy's tank was destroyed!", green, 0, size="smallFont")
message_to_screen("Replay to replay or quit to quit!", green, 100, size="smallFont")
# Text on the buttons
button("Replay", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def over(): # Function for game introduction screen
over = True
while over:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(black)
message_to_screen("Game over!", green, -100, size="largeFont")
message_to_screen("Your tank was destroyed!", green, 0, size="smallFont")
message_to_screen("Replay to replay or quit to quit!", green, 100, size="smallFont")
# Text on the buttons
button("Replay", 25, 400, 100, 50, green, lightGreen, action="play")
button("Quit", 375, 400, 100, 50, green, lightGreen, action="quit")
pygame.display.update()
clock.tick(15)
def health(playerHealth, enemyHealth, pX, eX): # Health bars
# Player health
if playerHealth > 50:
playerColour = lightGreen
else:
playerColour = green
# Enemy health
if enemyHealth > 50:
enemyColour = lightGreen
else:
enemyColour = green
# Draw the health bars
pygame.draw.rect(gameDisplay, playerColour, (pX - 100, display_height * .7, playerHealth, 10))
pygame.draw.rect(gameDisplay, enemyColour, (eX, display_height * .7, enemyHealth, 10))
def gameLoop(): # Main game loop
gameExit = False
gameOver = False
FPS = 15
# Tank positioning
mainTankX = display_width * .8
mainTankY = display_height * .8
tankMove = 0
curTurretPosition = 0
changeTurretPosition = 0
# Fire power
firePower = 50
change = 0
# enemyTank positioning
enemyTankX = display_width * .2
enemyTankY = display_height * .8
tankMove = 0
# Health
playerHealth = 100
enemyHealth = 100
while not gameExit:
if gameOver == True:
pygame.display.update()
while gameOver == True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
gameOver = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
# Movement for tank
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
tankMove = -5
elif event.key == pygame.K_RIGHT:
tankMove = 5
elif event.key == pygame.K_UP:
changeTurretPosition = 1
elif event.key == pygame.K_DOWN:
changeTurretPosition = -1
elif event.key == pygame.K_p:
pause()
elif event.key == pygame.K_SPACE:
# Player's shot
damage = fire(bullet, curTurretPosition, firePower, enemyTankX, enemyTankY)
enemyHealth -= damage
# Enemy moves
movements = ['f', 'b']
move = random.randrange(0, 2)
for x in range(random.randrange(0, 10)):
if display_width * .33 > enemyTankX > display_width * .05:
if movements[move] == "f":
enemyTankX += 5
elif movements[move] == "r":
enemyTankX -= 5
# If the tank moves, re draw the screen
gameDisplay.fill(black)
health(playerHealth, enemyHealth, pX, eX)
bullet = tank(mainTankX, mainTankY, curTurretPosition)
enemyBullet = enemyTank(enemyTankX, enemyTankY, 8)
pygame.draw.rect(gameDisplay, green, (0, ground, display_width, 10))
pygame.display.update()
clock.tick(FPS)
# Enemy's shot
damage = enemyFire(enemyBullet, 8, 33, mainTankX, mainTankY)
playerHealth -= damage
elif event.key == pygame.K_a:
change = -1
elif event.key == pygame.K_d:
change = 1
# If user stops pressing the button, stop moving the tank
elif event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
tankMove = 0
if event.key == pygame.K_UP or event.key == pygame.K_DOWN:
changeTurretPosition = 0
if event.key == pygame.K_a or event.key == pygame.K_d:
change = 0
# Draw the game screen
mainTankX += tankMove
pX = mainTankX
eX = enemyTankX
gameDisplay.fill(black)
health(playerHealth, enemyHealth, pX, eX)
bullet = tank(mainTankX, mainTankY, curTurretPosition)
enemyBullet = enemyTank(enemyTankX, enemyTankY, 8)
pygame.draw.rect(gameDisplay, green, (0, ground, display_width, 10))
# Change power of the bullet
firePower += change
if firePower <= 1:
firePower = 1
if firePower >= 100:
firePower = 100
power(firePower)
# Check if gameOver or gameWin
if playerHealth < 1:
over()
elif enemyHealth < 1:
gameWin()
# Turret positioning
curTurretPosition += changeTurretPosition
if curTurretPosition > 8:
curTurretPosition = 8
elif curTurretPosition < 0:
curTurretPosition = 0
# Avoid tank and walls collision
if mainTankX > display_width:
mainTankX -= 5
if mainTankX < display_width * .66:
mainTankX += 5
pygame.display.update()
clock.tick(FPS)
pygame.quit()
quit()
game_intro()
gameLoop()
class StartPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
image = Image.open("Pip Boy Images\mrPip.gif")
photo = ImageTk.PhotoImage(image)
label = tk.Label(self, image = photo, bg = "black", fg = "white", height = 40, width = 40)
label.image = photo #keeping refrence
label.pack(side = BOTTOM, padx = 10, pady = 10)
#to make width for now
label = tk.Label(self, width = 60, bg = "black")
label.pack(side = BOTTOM, pady = 120)
class RadioPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
#opening images for buttons
bonjovi1 = Image.open("coverart\Bonjovi.gif")
bonjovi = ImageTk.PhotoImage(bonjovi1)
toto1 = Image.open("coverart\Toto.gif")
toto = ImageTk.PhotoImage(toto1)
tameimpala1 = Image.open("coverart\Tameimpala.gif")
tameimpala = ImageTk.PhotoImage(tameimpala1)
dmx1 = Image.open("coverart\Dmx.gif")
dmx = ImageTk.PhotoImage(dmx1)
daftpunk1 = Image.open("coverart\Daftpunk.gif")
daftpunk = ImageTk.PhotoImage(daftpunk1)
gorrillaz1 = Image.open("coverart\Gorrillaz.gif")
gorrillaz = ImageTk.PhotoImage(gorrillaz1)
estelle1 = Image.open("coverart\estelle.gif")
estelle = ImageTk.PhotoImage(estelle1)
mgmt1 = Image.open("coverart\Mgmt.gif")
mgmt = ImageTk.PhotoImage(mgmt1)
saintmotel1 = Image.open("coverart\Saintmotel.gif")
saintmotel = ImageTk.PhotoImage(saintmotel1)
music1 = tk.Button(self, image = bonjovi, fg = "white", bg = "black", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song1)))
music1.image = bonjovi #keeping refrence
music1.place(x = 70, y = 70)
music2 = tk.Button(self, image = toto, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song2)))
music2.image = toto
music2.place(x = 70, y = 145)
music3 = tk.Button(self, image = tameimpala, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song3)))
music3.image = tameimpala
music3.place(x = 70, y = 220)
music4 = tk.Button(self, image = dmx, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song4)))
music4.image = dmx
music4.place(x = 175 , y = 70)
music5 = tk.Button(self, image = daftpunk, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song5)))
music5.image = daftpunk
music5.place( x = 175 , y = 145)
music6 = tk.Button(self, image = gorrillaz, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song6)))
music6.image = gorrillaz
music6.place(x = 175, y = 220)
music7 = tk.Button(self, image = estelle, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song7)))
music7.image = estelle
music7.place(x = 280, y = 70)
music8 = tk.Button(self, image = mgmt, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song8)))
music8.image = mgmt
music8.place(x = 280, y = 145)
music9 = tk.Button(self, image = saintmotel, bg = "black", fg = "white", cursor = "hand2", width = 75, height = 75,
command = lambda: webbrowser.open_new(controller.music(song9)))
music9.image = saintmotel
music9.place(x = 280, y = 220)
class MapPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
label = tk.Label(self, text = "map functionality", bg = "black", fg = "white")
label.pack(side = BOTTOM)
global entryWidget2
global mapLabel
# Create a text frame to hold the text Label and the Entry widget
textFrame = Frame(self)
#Create a Label in textFrame
entryLabel = Label(self)
entryLabel["text"] = "Where are you?"
entryLabel.pack(side=LEFT)
# Create an Entry Widget in textFrame
entryWidget2 = Entry(self)
entryWidget2["width"] = 50
entryWidget2.pack(side=LEFT)
textFrame.pack()
mapLabel = Label(self)
button = Button(self, text="Submit", command=controller.get_coordinates)
button.pack(side=BOTTOM)
class DataPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
global entryWidget
#Create a Label in textFrame
#controller.showTweets(controller.getTweets(), numberOfTweets)
entryLabel = Label(self)
entryLabel["text"] = "Make a new Tweet:"
entryLabel.pack(side = LEFT)
# Create an Entry Widget in textFrame
entryWidget = Entry(self)
entryWidget["width"] = 50
entryWidget.pack(side=LEFT)
buttonGet = Button(self, text="Get Tweets", command = lambda: controller.showTweets(controller.getTweets(), numberOfTweets))
buttonGet.pack()
button = Button(self, text="Submit", command = controller.tweet)
button.pack()
class InvPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
class StatsPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
tk.Frame.configure(self, bg = "black")
radio = tk.Button(self, text ="RADIO", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(RadioPage))
radio.place(x = 15, y = 0)
map = tk.Button(self, text ="MAP", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(MapPage))
map.place(x = 95, y = 0)
data = tk.Button(self, text="DATA", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(DataPage))
data.place(x = 175, y = 0)
inv = tk.Button(self, text ="INV", bg="black", fg="green", width = 10,
command = lambda: controller.game())
inv.place(x = 255, y = 0)
stats = tk.Button(self, text ="STATS", bg="black", fg="green", width = 10,
command = lambda: controller.show_frame(StatsPage))
stats.place(x = 335, y = 0)
#new buttons
strength = tk.Button(self, text ="STRENGTH", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Strength.gif"))
strength.place(x = 35, y = 50)
perception = tk.Button(self, text ="PERCEPTION", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Perception.gif"))
perception.place(x = 35, y = 75)
endurance = tk.Button(self, text ="ENDURANCE", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Endurance.gif"))
endurance.place(x = 35, y = 100)
charisma = tk.Button(self, text ="CHARISMA", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Charisma.gif"))
charisma.place(x = 35, y = 125)
intelligence = tk.Button(self, text ="INTELLIGENCE", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Intelligence.gif"))
intelligence.place(x = 35, y = 150)
agility = tk.Button(self, text ="AGILITY", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Agility.gif"))
agility.place(x = 35, y = 175)
luck = tk.Button(self, text ="LUCK", bg="black", fg="green", width = 20,
command = lambda: self.ImageShow("Pip Boy Images\Luck.gif"))
luck.place(x = 35, y = 200)
def ImageShow(self, path):
label = tk.Label(self, bg = "black", width = 40, height = 40)
label.place(x = 215, y = 75)
image = Image.open(path)
photo = ImageTk.PhotoImage(image)
label = tk.Label(self, image = photo, bg = "black", fg = "white")
label.image = photo #keeping refrence
label.place(x = 200, y = 75)
app = SetUp()
app.mainloop() | AndrewKLeech/Pip-Boy | Game.py | Python | mit | 42,161 |
# encoding: utf-8
"""
Plot-related objects. A plot is known as a chart group in the MS API. A chart
can have more than one plot overlayed on each other, such as a line plot
layered over a bar plot.
"""
from __future__ import absolute_import, print_function, unicode_literals
from .category import Categories
from .datalabel import DataLabels
from ..enum.chart import XL_CHART_TYPE as XL
from ..oxml.ns import qn
from ..oxml.simpletypes import ST_BarDir, ST_Grouping
from .series import SeriesCollection
from ..util import lazyproperty
class _BasePlot(object):
"""
A distinct plot that appears in the plot area of a chart. A chart may
have more than one plot, in which case they appear as superimposed
layers, such as a line plot appearing on top of a bar chart.
"""
def __init__(self, xChart, chart):
super(_BasePlot, self).__init__()
self._element = xChart
self._chart = chart
@lazyproperty
def categories(self):
"""
Returns a |category.Categories| sequence object containing
a |category.Category| object for each of the category labels
associated with this plot. The |category.Category| class derives from
``str``, so the returned value can be treated as a simple sequence of
strings for the common case where all you need is the labels in the
order they appear on the chart. |category.Categories| provides
additional properties for dealing with hierarchical categories when
required.
"""
return Categories(self._element)
@property
def chart(self):
"""
The |Chart| object containing this plot.
"""
return self._chart
@property
def data_labels(self):
"""
|DataLabels| instance providing properties and methods on the
collection of data labels associated with this plot.
"""
dLbls = self._element.dLbls
if dLbls is None:
raise ValueError(
"plot has no data labels, set has_data_labels = True first"
)
return DataLabels(dLbls)
@property
def has_data_labels(self):
"""
Read/write boolean, |True| if the series has data labels. Assigning
|True| causes data labels to be added to the plot. Assigning False
removes any existing data labels.
"""
return self._element.dLbls is not None
@has_data_labels.setter
def has_data_labels(self, value):
"""
Add, remove, or leave alone the ``<c:dLbls>`` child element depending
on current state and assigned *value*. If *value* is |True| and no
``<c:dLbls>`` element is present, a new default element is added with
default child elements and settings. When |False|, any existing dLbls
element is removed.
"""
if bool(value) is False:
self._element._remove_dLbls()
else:
if self._element.dLbls is None:
dLbls = self._element._add_dLbls()
dLbls.showVal.val = True
@lazyproperty
def series(self):
"""
A sequence of |Series| objects representing the series in this plot,
in the order they appear in the plot.
"""
return SeriesCollection(self._element)
@property
def vary_by_categories(self):
"""
Read/write boolean value specifying whether to use a different color
for each of the points in this plot. Only effective when there is
a single series; PowerPoint automatically varies color by series when
more than one series is present.
"""
varyColors = self._element.varyColors
if varyColors is None:
return True
return varyColors.val
@vary_by_categories.setter
def vary_by_categories(self, value):
self._element.get_or_add_varyColors().val = bool(value)
class AreaPlot(_BasePlot):
"""
An area plot.
"""
class Area3DPlot(_BasePlot):
"""
A 3-dimensional area plot.
"""
class BarPlot(_BasePlot):
"""
A bar chart-style plot.
"""
@property
def gap_width(self):
"""
Width of gap between bar(s) of each category, as an integer
percentage of the bar width. The default value for a new bar chart is
150, representing 150% or 1.5 times the width of a single bar.
"""
gapWidth = self._element.gapWidth
if gapWidth is None:
return 150
return gapWidth.val
@gap_width.setter
def gap_width(self, value):
gapWidth = self._element.get_or_add_gapWidth()
gapWidth.val = value
@property
def overlap(self):
"""
Read/write int value in range -100..100 specifying a percentage of
the bar width by which to overlap adjacent bars in a multi-series bar
chart. Default is 0. A setting of -100 creates a gap of a full bar
width and a setting of 100 causes all the bars in a category to be
superimposed. A stacked bar plot has overlap of 100 by default.
"""
overlap = self._element.overlap
if overlap is None:
return 0
return overlap.val
@overlap.setter
def overlap(self, value):
"""
Set the value of the ``<c:overlap>`` child element to *int_value*,
or remove the overlap element if *int_value* is 0.
"""
if value == 0:
self._element._remove_overlap()
return
self._element.get_or_add_overlap().val = value
class BubblePlot(_BasePlot):
"""
A bubble chart plot.
"""
@property
def bubble_scale(self):
"""
An integer between 0 and 300 inclusive indicating the percentage of
the default size at which bubbles should be displayed. Assigning
|None| produces the same behavior as assigning `100`.
"""
bubbleScale = self._element.bubbleScale
if bubbleScale is None:
return 100
return bubbleScale.val
@bubble_scale.setter
def bubble_scale(self, value):
bubbleChart = self._element
bubbleChart._remove_bubbleScale()
if value is None:
return
bubbleScale = bubbleChart._add_bubbleScale()
bubbleScale.val = value
class DoughnutPlot(_BasePlot):
"""
An doughnut plot.
"""
class LinePlot(_BasePlot):
"""
A line chart-style plot.
"""
class PiePlot(_BasePlot):
"""
A pie chart-style plot.
"""
class RadarPlot(_BasePlot):
"""
A radar-style plot.
"""
class XyPlot(_BasePlot):
"""
An XY (scatter) plot.
"""
def PlotFactory(xChart, chart):
"""
Return an instance of the appropriate subclass of _BasePlot based on the
tagname of *xChart*.
"""
try:
PlotCls = {
qn("c:areaChart"): AreaPlot,
qn("c:area3DChart"): Area3DPlot,
qn("c:barChart"): BarPlot,
qn("c:bubbleChart"): BubblePlot,
qn("c:doughnutChart"): DoughnutPlot,
qn("c:lineChart"): LinePlot,
qn("c:pieChart"): PiePlot,
qn("c:radarChart"): RadarPlot,
qn("c:scatterChart"): XyPlot,
}[xChart.tag]
except KeyError:
raise ValueError("unsupported plot type %s" % xChart.tag)
return PlotCls(xChart, chart)
class PlotTypeInspector(object):
"""
"One-shot" service object that knows how to identify the type of a plot
as a member of the XL_CHART_TYPE enumeration.
"""
@classmethod
def chart_type(cls, plot):
"""
Return the member of :ref:`XlChartType` that corresponds to the chart
type of *plot*.
"""
try:
chart_type_method = {
"AreaPlot": cls._differentiate_area_chart_type,
"Area3DPlot": cls._differentiate_area_3d_chart_type,
"BarPlot": cls._differentiate_bar_chart_type,
"BubblePlot": cls._differentiate_bubble_chart_type,
"DoughnutPlot": cls._differentiate_doughnut_chart_type,
"LinePlot": cls._differentiate_line_chart_type,
"PiePlot": cls._differentiate_pie_chart_type,
"RadarPlot": cls._differentiate_radar_chart_type,
"XyPlot": cls._differentiate_xy_chart_type,
}[plot.__class__.__name__]
except KeyError:
raise NotImplementedError(
"chart_type() not implemented for %s" % plot.__class__.__name__
)
return chart_type_method(plot)
@classmethod
def _differentiate_area_3d_chart_type(cls, plot):
return {
ST_Grouping.STANDARD: XL.THREE_D_AREA,
ST_Grouping.STACKED: XL.THREE_D_AREA_STACKED,
ST_Grouping.PERCENT_STACKED: XL.THREE_D_AREA_STACKED_100,
}[plot._element.grouping_val]
@classmethod
def _differentiate_area_chart_type(cls, plot):
return {
ST_Grouping.STANDARD: XL.AREA,
ST_Grouping.STACKED: XL.AREA_STACKED,
ST_Grouping.PERCENT_STACKED: XL.AREA_STACKED_100,
}[plot._element.grouping_val]
@classmethod
def _differentiate_bar_chart_type(cls, plot):
barChart = plot._element
if barChart.barDir.val == ST_BarDir.BAR:
return {
ST_Grouping.CLUSTERED: XL.BAR_CLUSTERED,
ST_Grouping.STACKED: XL.BAR_STACKED,
ST_Grouping.PERCENT_STACKED: XL.BAR_STACKED_100,
}[barChart.grouping_val]
if barChart.barDir.val == ST_BarDir.COL:
return {
ST_Grouping.CLUSTERED: XL.COLUMN_CLUSTERED,
ST_Grouping.STACKED: XL.COLUMN_STACKED,
ST_Grouping.PERCENT_STACKED: XL.COLUMN_STACKED_100,
}[barChart.grouping_val]
raise ValueError("invalid barChart.barDir value '%s'" % barChart.barDir.val)
@classmethod
def _differentiate_bubble_chart_type(cls, plot):
def first_bubble3D(bubbleChart):
results = bubbleChart.xpath("c:ser/c:bubble3D")
return results[0] if results else None
bubbleChart = plot._element
bubble3D = first_bubble3D(bubbleChart)
if bubble3D is None:
return XL.BUBBLE
if bubble3D.val:
return XL.BUBBLE_THREE_D_EFFECT
return XL.BUBBLE
@classmethod
def _differentiate_doughnut_chart_type(cls, plot):
doughnutChart = plot._element
explosion = doughnutChart.xpath("./c:ser/c:explosion")
return XL.DOUGHNUT_EXPLODED if explosion else XL.DOUGHNUT
@classmethod
def _differentiate_line_chart_type(cls, plot):
lineChart = plot._element
def has_line_markers():
matches = lineChart.xpath('c:ser/c:marker/c:symbol[@val="none"]')
if matches:
return False
return True
if has_line_markers():
return {
ST_Grouping.STANDARD: XL.LINE_MARKERS,
ST_Grouping.STACKED: XL.LINE_MARKERS_STACKED,
ST_Grouping.PERCENT_STACKED: XL.LINE_MARKERS_STACKED_100,
}[plot._element.grouping_val]
else:
return {
ST_Grouping.STANDARD: XL.LINE,
ST_Grouping.STACKED: XL.LINE_STACKED,
ST_Grouping.PERCENT_STACKED: XL.LINE_STACKED_100,
}[plot._element.grouping_val]
@classmethod
def _differentiate_pie_chart_type(cls, plot):
pieChart = plot._element
explosion = pieChart.xpath("./c:ser/c:explosion")
return XL.PIE_EXPLODED if explosion else XL.PIE
@classmethod
def _differentiate_radar_chart_type(cls, plot):
radarChart = plot._element
radar_style = radarChart.xpath("c:radarStyle")[0].get("val")
def noMarkers():
matches = radarChart.xpath("c:ser/c:marker/c:symbol")
if matches and matches[0].get("val") == "none":
return True
return False
if radar_style is None:
return XL.RADAR
if radar_style == "filled":
return XL.RADAR_FILLED
if noMarkers():
return XL.RADAR
return XL.RADAR_MARKERS
@classmethod
def _differentiate_xy_chart_type(cls, plot):
scatterChart = plot._element
def noLine():
return bool(scatterChart.xpath("c:ser/c:spPr/a:ln/a:noFill"))
def noMarkers():
symbols = scatterChart.xpath("c:ser/c:marker/c:symbol")
if symbols and symbols[0].get("val") == "none":
return True
return False
scatter_style = scatterChart.xpath("c:scatterStyle")[0].get("val")
if scatter_style == "lineMarker":
if noLine():
return XL.XY_SCATTER
if noMarkers():
return XL.XY_SCATTER_LINES_NO_MARKERS
return XL.XY_SCATTER_LINES
if scatter_style == "smoothMarker":
if noMarkers():
return XL.XY_SCATTER_SMOOTH_NO_MARKERS
return XL.XY_SCATTER_SMOOTH
return XL.XY_SCATTER
| scanny/python-pptx | pptx/chart/plot.py | Python | mit | 13,213 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import os
import re
import sys
from setuptools import setup
root_dir = os.path.abspath(os.path.dirname(__file__))
def get_version(package_name):
version_re = re.compile(r"^__version__ = [\"']([\w_.-]+)[\"']$")
package_components = package_name.split('.')
init_path = os.path.join(root_dir, *(package_components + ['__init__.py']))
with codecs.open(init_path, 'r', 'utf-8') as f:
for line in f:
match = version_re.match(line[:-1])
if match:
return match.groups()[0]
return '0.1.0'
if sys.version_info[0:2] < (2, 7): # pragma: no cover
test_loader = 'unittest2:TestLoader'
else:
test_loader = 'unittest:TestLoader'
PACKAGE = 'factory'
setup(
name='factory_boy',
version=get_version(PACKAGE),
description="A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby.",
long_description=codecs.open(os.path.join(root_dir, 'README.rst'), 'r', 'utf-8').read(),
author='Mark Sandstrom',
author_email='[email protected]',
maintainer='Raphaël Barrois',
maintainer_email='[email protected]',
url='https://github.com/FactoryBoy/factory_boy',
keywords=['factory_boy', 'factory', 'fixtures'],
packages=['factory'],
zip_safe=False,
license='MIT',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
install_requires=[
'Faker>=0.7.0',
],
setup_requires=[
'setuptools>=0.8',
],
tests_require=[
#'mock',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules",
],
test_suite='tests',
test_loader=test_loader,
)
| rrauenza/factory_boy | setup.py | Python | mit | 2,431 |
from tornado.httpclient import HTTPRequest, HTTPError
import ujson
import abc
import socket
from urllib import parse
from .. import admin as a
from .. social import SocialNetworkAPI, APIError, SocialPrivateKey
class XsollaAPI(SocialNetworkAPI, metaclass=abc.ABCMeta):
XSOLLA_API = "https://api.xsolla.com"
NAME = "xsolla"
def __init__(self, cache):
super(XsollaAPI, self).__init__(XsollaAPI.NAME, cache)
async def api_get(self, operation, merchant_id, api_key, **kwargs):
request = HTTPRequest(
XsollaAPI.XSOLLA_API + "/merchant/merchants/" +
str(merchant_id) + "/" + operation + "?" + parse.urlencode(kwargs),
method="GET",
auth_mode="basic",
auth_username=str(merchant_id),
auth_password=str(api_key),
headers={
"Content-Type": "application/json",
"Accept": "application/json"
})
result = await self.client.fetch(request)
try:
response_object = ujson.loads(result.body)
except (KeyError, ValueError):
raise APIError(500, "Corrupted xsolla response")
return response_object
async def api_post(self, operation, merchant_id, api_key, **kwargs):
request = HTTPRequest(
XsollaAPI.XSOLLA_API + "/merchant/merchants/" + str(merchant_id) + "/" + operation,
body=ujson.dumps(kwargs),
method="POST",
auth_mode="basic",
auth_username=str(merchant_id),
auth_password=str(api_key),
headers={
"Content-Type": "application/json",
"Accept": "application/json"
})
try:
result = await self.client.fetch(request)
except socket.error as e:
raise APIError(500, "Connection error: " + str(e))
except HTTPError as e:
try:
parsed = ujson.loads(e.response.body)
except (KeyError, ValueError):
raise APIError(e.code, "Internal API error")
else:
code = parsed.get("http_status_code", e.code)
message = parsed.get("message", "Internal API error")
raise APIError(code, message)
try:
response_object = ujson.loads(result.body)
except (KeyError, ValueError):
raise APIError(500, "Corrupted xsolla response")
return response_object
def has_private_key(self):
return True
def new_private_key(self, data):
return XsollaPrivateKey(data)
class XsollaPrivateKey(SocialPrivateKey):
def __init__(self, key):
super(XsollaPrivateKey, self).__init__(key)
self.api_key = self.data["api_key"] if self.data else None
self.project_key = self.data["project_key"] if self.data else None
self.merchant_id = self.data["merchant_id"] if self.data else None
def get_app_id(self):
return self.merchant_id
def dump(self):
return {
"api_key": self.api_key,
"project_key": self.project_key,
"merchant_id": self.merchant_id,
}
def has_ui(self):
return True
def get(self):
return {
"api_key": self.api_key,
"project_key": self.project_key,
"merchant_id": self.merchant_id
}
def render(self):
return {
"merchant_id": a.field(
"Merchant ID", "text", "primary", "non-empty",
order=1,),
"project_key": a.field(
"Project Key", "text", "primary", "non-empty",
order=2),
"api_key": a.field(
"API Key", "text", "primary", "non-empty",
order=2)
}
def update(self, merchant_id, project_key, api_key, **ignored):
self.merchant_id = merchant_id
self.project_key = project_key
self.api_key = api_key
| anthill-services/anthill-common | anthill/common/social/xsolla.py | Python | mit | 4,007 |
"""Constants for Sonarr."""
DOMAIN = "sonarr"
# Config Keys
CONF_BASE_PATH = "base_path"
CONF_DAYS = "days"
CONF_INCLUDED = "include_paths"
CONF_UNIT = "unit"
CONF_UPCOMING_DAYS = "upcoming_days"
CONF_WANTED_MAX_ITEMS = "wanted_max_items"
# Data
DATA_HOST_CONFIG = "host_config"
DATA_SONARR = "sonarr"
DATA_SYSTEM_STATUS = "system_status"
# Defaults
DEFAULT_UPCOMING_DAYS = 1
DEFAULT_VERIFY_SSL = False
DEFAULT_WANTED_MAX_ITEMS = 50
| rohitranjan1991/home-assistant | homeassistant/components/sonarr/const.py | Python | mit | 436 |
import sys
import numpy as np
# sigmoid function
def nonlin(x, deriv=False):
if(deriv==True):
return x*(1-x)
return 1/(1+np.exp(-x))
# input dataset
X = np.array([[0,0,1],
[0,1,1],
[1,0,1],
[1,1,1]])
# output dataset
y = np.array([[0,0,1,1]]).T
# seed random numbers to make calculation
# deterministic (just a good practice)
np.random.seed(1)
# initialize weights randomly with mean 0
syn0 = 2*np.random.random((3,1)) - 1
for iter in xrange(10000):
# forward propagation
l0 = X
l1 = nonlin(np.dot(l0,syn0))
# how much did we miss?
l1_error = y - l1
# multiply how much we missed by the
# slope of the sigmoid at the values in l1
l1_delta = l1_error * nonlin(l1,True)
# update weights
syn0 += np.dot(l0.T,l1_delta)
print "Output After Training:"
print l1
| jatinmistry13/BasicNeuralNetwork | two_layer_neural_network.py | Python | mit | 877 |
Subsets and Splits
Unique Repositories with URLs
Lists unique repository names along with their GitHub URLs, providing basic identification information for each repository.