text
stringlengths 4
1.02M
| meta
dict |
---|---|
'''
This is a framework for a Bot configured for Twitch
Using this will require some knowledge of Python since there are no commands
to begin with, once the username, channels and oauth has been filled out. The
program will just print out the chat of the channels it connects to.
You can send a message by using the following function,
sendmsg(chan,msg)
--
chan = The channel you want to send the message to, make sure it has a #
in front of it (String)
msg = The message you want to send to the channel, must be a string
--
sendwhis(user,msg)
--
user = The username of the person you want to send the message to (String)
msg = The message you want to send to the user, must be a string
--
'''
# Import necessary libraries.
import datetime
import socket
import select
import re
''' Change the following settings if you wish to run the program '''
channels = [
'CHANNEL_NAME',
'ANOTHER_CHANNEL_NAME'
]
username = 'USERNAME'
oauth = 'OAUTH_TOKEN'
# Definitions to use while connected
def ping():
''' Respond to the server 'pinging' (Stays connected) '''
socks[0].send('PONG :pingis\n')
print('PONG: Client > tmi.twitch.tv')
def sendmsg(chan,msg):
''' Send specified message to the channel '''
socks[0].send('PRIVMSG '+chan+' :'+msg+'\n')
print('[BOT] -> '+chan+': '+msg+'\n')
def sendwhis(user,msg):
socks[1].send('PRIVMSG #jtv :/w '+user+' '+msg+'\n')
print('[BOT] -> '+user+': '+msg+'\n')
def getmsg(msg):
''' GET IMPORTANT MESSAGE '''
if(re.findall('@(.*).tmi.twitch.tv PRIVMSG (.*) :(.*)',msg)):
msg_edit = msg.split(':',2)
if(len(msg_edit) > 2):
user = msg_edit[1].split('!',1)[0] # User
message = msg_edit[2] # Message
channel = re.findall('PRIVMSG (.*)',msg_edit[1]) # Channel
privmsg = re.findall('@(.*).tmi.twitch.tv PRIVMSG (.*) :(.*)',msg)
''' CONVERT TO ARRAY '''
privmsg = [x for xs in privmsg for x in xs]
datelog = datetime.datetime.now()
''' PRINT TO CONSOLE '''
if(len(privmsg) > 0):
print('['+str(datelog.hour)+':'+str(datelog.minute)+':'+str(datelog.second)+'] '+user+' @ '+channel[0][:-1]+': '+message)
if(re.findall('@(.*).tmi.twitch.tv WHISPER (.*) :(.*)',msg)):
whisper = re.findall('@(.*).tmi.twitch.tv WHISPER (.*) :(.*)',msg)
whisper = [x for xs in whisper for x in xs]
''' PRINT TO CONSOLE '''
if(len(whisper) > 0):
''' PRINT WHISPER TO CONSOLE '''
print('*WHISPER* '+whisper[0]+': '+whisper[2])
# Connect to the server using the provided details
socks = [socket.socket(),socket.socket()]
''' Connect to the server using port 6667 & 443 '''
socks[0].connect(('irc.twitch.tv',6667))
#socks[1].connect(('GROUP_CHAT_IP',GROUP_CHAT_PORT))
'''Authenticate with the server '''
socks[0].send('PASS '+oauth+'\n')
#socks[1].send('PASS OAUTH_TOKEN\n')
''' Assign the client with the nick '''
socks[0].send('NICK '+username+'\n')
#socks[1].send('NICK USER\n')
''' Join the specified channel '''
for val in channels:
socks[0].send('JOIN #'+val+'\n')
#socks[1].send('JOIN GROUP_CHAT_CHANNEL\n')
''' Send special requests to the server '''
# Used to recieve and send whispers!
#socks[1].send('CAP REQ :twitch.tv/commands\n')
print('Connected to irc.twitch.tv on port 6667')
print('USER: '+username)
print('OAUTH: oauth:'+'*'*30)
print('\n')
temp = 0
while True:
(sread,swrite,sexc) = select.select(socks,socks,[],120)
for sock in sread:
''' Receive data from the server '''
msg = sock.recv(2048)
if(msg == ''):
temp + 1
if(temp > 5):
print('Connection might have been terminated')
''' Remove any linebreaks from the message '''
msg = msg.strip('\n\r')
''' DISPLAY MESSAGE IN SHELL '''
getmsg(msg)
#print(msg)
# ANYTHING TO DO WITH CHAT FROM CHANNELS
''' GET THE INFO FROM THE SERVER '''
check = re.findall('@(.*).tmi.twitch.tv PRIVMSG (.*) :(.*)',msg)
if(len(check) > 0):
msg_edit = msg.split(':',2)
if(len(msg_edit) > 2):
user = msg_edit[1].split('!',1)[0] # User
message = msg_edit[2] # Message
channel = msg_edit[1].split(' ',2)[2][:-1] # Channel
msg_split = str.split(message)
# ANYTHING TO DO WITH WHISPERS RECIEVED FROM USERS
check = re.findall('@(.*).tmi.twitch.tv WHISPER (.*) :(.*)',msg)
if(len(check) > 0):
msg_edit = msg.split(':',2)
if(len(msg) > 2):
user = msg_edit[1].split('!',1)[0] # User
message = msg_edit[2] # Message
channel = msg_edit[1].split(' ',2)[2][:-1] # Channel
whis_split = str.split(message)
''' Respond to server pings '''
if msg.find('PING :') != -1:
print('PING: tmi.twitch.tv > Client')
ping()
| {
"content_hash": "15d269d3be712b71cb70431910a76363",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 137,
"avg_line_length": 31.561728395061728,
"alnum_prop": 0.5579894386857032,
"repo_name": "RubbixCube/Twitch-Chat-Bot-V2",
"id": "fe47cf2f67f7892a9a8554a33c9218309b84d848",
"size": "5132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5132"
}
],
"symlink_target": ""
} |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("DummyClassifier" , "BinaryClass_500" , "postgresql")
| {
"content_hash": "476821bb6f1c1d795dbaf9be544fd933",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 74,
"avg_line_length": 37.5,
"alnum_prop": 0.7933333333333333,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "065badaab9c5afea9bcc732b38431f1b415ed0eb",
"size": "150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/classification/BinaryClass_500/ws_BinaryClass_500_DummyClassifier_postgresql_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
} |
import sublime
import sublime_plugin
from ..anaconda_lib.worker import Worker
from ..anaconda_lib.typing import Dict, Any
from ..anaconda_lib.callback import Callback
from ..anaconda_lib.helpers import get_settings
class AnacondaMcCabe(sublime_plugin.WindowCommand):
"""Execute McCabe complexity checker
"""
def run(self) -> None:
view = self.window.active_view()
code = view.substr(sublime.Region(0, view.size()))
data = {
'code': code,
'threshold': get_settings(view, 'mccabe_threshold', 7),
'filename': view.file_name(),
'method': 'mccabe',
'handler': 'qa'
}
Worker().execute(Callback(on_success=self.prepare_data), **data)
def is_enabled(self) -> bool:
"""Determine if this command is enabled or not
"""
view = self.window.active_view()
location = view.sel()[0].begin()
matcher = 'source.python'
return view.match_selector(location, matcher)
def prepare_data(self, data: Dict[str, Any]) -> None:
"""Prepare the data to present in the quick panel
"""
if not data['success'] or data['errors'] is None:
sublime.status_message('Unable to run McCabe checker...')
return
if len(data['errors']) == 0:
view = self.window.active_view()
threshold = get_settings(view, 'mccabe_threshold', 7)
sublime.status_message(
'No code complexity beyond {} was found'.format(threshold)
)
self._show_options(data['errors'])
def _show_options(self, options: Dict[str, Any]) -> None:
"""Show a dropdown quickpanel with options to jump
"""
self.options = [] # type: List[List[str]]
for option in options:
self.options.append(
[option['message'], 'line: {}'.format(option['line'])]
)
self.window.show_quick_panel(self.options, self._jump)
def _jump(self, item: int) -> None:
"""Jump to a line in the view buffer
"""
if item == -1:
return
lineno = int(self.options[item][1].split(':')[1].strip()) - 1
pt = self.window.active_view().text_point(lineno, 0)
self.window.active_view().sel().clear()
self.window.active_view().sel().add(sublime.Region(pt))
self.window.active_view().show(pt)
| {
"content_hash": "844ed38d112493262c951b53c434363b",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 74,
"avg_line_length": 31.68831168831169,
"alnum_prop": 0.5733606557377049,
"repo_name": "danalec/dotfiles",
"id": "5e7b919b6d6b3b662a603809943c0d9fd024e122",
"size": "2569",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sublime/.config/sublime-text-3/Packages/Anaconda/commands/mccabe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "699"
},
{
"name": "CSS",
"bytes": "32865"
},
{
"name": "GLSL",
"bytes": "10062"
},
{
"name": "HTML",
"bytes": "4806"
},
{
"name": "JavaScript",
"bytes": "817118"
},
{
"name": "Lua",
"bytes": "34246"
},
{
"name": "PHP",
"bytes": "2263892"
},
{
"name": "Python",
"bytes": "9571271"
},
{
"name": "Ruby",
"bytes": "56701"
},
{
"name": "Shell",
"bytes": "280060"
},
{
"name": "Smarty",
"bytes": "5128"
},
{
"name": "Vim script",
"bytes": "26736"
}
],
"symlink_target": ""
} |
from oslo_config import fixture as fixture_config
from nova import test
from nova.virt import driver
class DriverMethodTestCase(test.NoDBTestCase):
def setUp(self):
super(DriverMethodTestCase, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
def test_is_xenapi_true(self):
self.CONF.set_override('compute_driver', 'xenapi.XenAPIDriver')
self.assertTrue(driver.is_xenapi())
def test_is_xenapi_false(self):
driver_names = ('libvirt.LibvirtDriver', 'fake.FakeDriver',
'ironic.IronicDriver', 'vmwareapi.VMwareVCDriver',
'hyperv.HyperVDriver', None)
for driver_name in driver_names:
self.CONF.set_override('compute_driver', driver_name)
self.assertFalse(driver.is_xenapi())
| {
"content_hash": "7114619679ffdc77538ea84510f9d02e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 74,
"avg_line_length": 36.08695652173913,
"alnum_prop": 0.6578313253012048,
"repo_name": "rahulunair/nova",
"id": "716fa6e8371f2c8728eadb6ed4f4c8c9a7b4245c",
"size": "1485",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "nova/tests/unit/virt/test_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "22804450"
},
{
"name": "Shell",
"bytes": "41649"
},
{
"name": "Smarty",
"bytes": "472764"
}
],
"symlink_target": ""
} |
from xml.dom.minidom import parse, parseString, Element
from os import path
import re
class CypressParser:
patient_section_templateIds = ['2.16.840.1.113883.10.20.17.2.4', '2.16.840.1.113883.10.20.24.2.1']
# patient_id_root='2.16.840.1.113883.4.572'
patient_id_root="2.16.840.1.113883.3.1257"
def parsePatientFile(self, filename):
patient = CypressPatient(filename)
structuredBody = None
node = parse(filename)
metadata = self.parseMetadata(node, filename)
patientRole = node.getElementsByTagName('patientRole')
assert patientRole.length == 1
patient.setCharacteristics(self.parse_patient_role(patientRole[0]))
nl = node.getElementsByTagName('structuredBody')
if len(nl) != 1:
raise ValueError('more or less than 1 structuredBody element in clinical document')
structuredBody = nl[0]
patientSection = self.findPatientSection(structuredBody)
patient.setEvents(self.parsePatientSection(patientSection))
return {'metadata' : metadata, 'patient_data' : patient.asDict()}
def parseMetadata(self, node, filename):
metadata = dict()
metadata['filepath'] = filename
metadata['filename'] = path.basename(filename)
metadata['patient_number'] = re.sub('_.*', '', metadata['filename'])
return metadata
def parseGenericElement(self, element, expectedName):
result = None
for child in element.childNodes:
if child.localName == expectedName:
fn = getattr(self, 'parse_' + expectedName)
result = fn(child)
if (result != None):
break
elif child.localName != None:
raise ValueError('unexpected child ' + child.localName + ' of ' + element.localName + ' element')
return result
def findPatientSection(self, node):
return self.parseGenericElement(node, 'component')
def parse_component(self, node):
return self.parseGenericElement(node, 'section')
def parse_section(self, node):
patient_section = None
for child in node.childNodes:
if child.localName == 'templateId' and child.getAttribute('root') in self.patient_section_templateIds:
patient_section = node
patient_section.normalize()
return patient_section
def parsePatientSection(self, patientSection):
resultList = []
for child in patientSection.childNodes:
if child.localName == 'entry':
res = self.parseEntry(child)
if res != None:
resultList.append({child.localName : res})
elif child.localName in ['templateId', 'code', 'title', 'text', None]:
continue
else:
raise ValueError('unexpected ' + child.localName + ' in patient section')
return resultList
def parseEntry(self, entry):
for child in entry.childNodes:
if child.localName != None:
fn = getattr(self, 'parse_patient_' + child.localName)
result = fn(child)
if result != None:
break
return result
def parse_patient_role(self, node):
result = dict()
unique_id = None
for child in node.childNodes:
if child.localName == 'id' and child.getAttribute('root') == self.patient_id_root:
unique_id = child.getAttribute('extension')
if unique_id != None:
break
if unique_id != None:
result['unique_patient_id'] = unique_id
zipcode = self.getSingleValue(node.getElementsByTagName('postalCode'))
for child in zipcode.childNodes:
if child.nodeType == child.TEXT_NODE:
result['zip'] = child.data
patient = self.getSingleValue(node.getElementsByTagName('patient'))
if patient.localName != None:
if patient.attributes != None and patient.attributes.length > 0:
for i in range(patient.attributes.length):
a = patient.attributes.item(i)
result[a.localName] = a.value
for child in patient.childNodes:
if child.nodeType == child.TEXT_NODE:
result[child.localName] = child.data
elif child.nodeType != child.ATTRIBUTE_NODE:
cr = self.parse_patient_node(child)
if cr != None:
if result.get(child.localName) == None:
result[child.localName] = [cr]
else:
result[child.localName].append(cr)
if len(result.items()) == 0:
result = None
return result
def getSingleValue(self, list):
assert len(list) == 1
return list[0]
def parse_patient_node(self, node):
result = dict()
if node.localName != None:
if node.attributes != None and node.attributes.length > 0:
for i in range(node.attributes.length):
a = node.attributes.item(i)
result[a.localName] = a.value
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE:
result[child.localName] = child.data
elif child.nodeType != child.ATTRIBUTE_NODE:
cr = self.parse_patient_node(child)
if cr != None:
if result.get(child.localName) == None:
result[child.localName] = [cr]
else:
result[child.localName].append(cr)
if len(result.items()) == 0:
result = None
return result
def parse_patient_observation(self, observation):
return {'observation' : self.parse_patient_node(observation)}
def parse_patient_procedure(self, procedure):
return {'procedure' : self.parse_patient_node(procedure)}
def parse_patient_substanceAdministration(self, substanceAdministration):
return {'substanceAdministration' : self.parse_patient_node(substanceAdministration)}
def parse_patient_act(self, act):
return {'act' : self.parse_patient_node(act)}
def parse_patient_supply(self, supply):
return {'supply' : self.parse_patient_node(supply)}
def parse_patient_encounter(self, encounter):
return {'encounter' : self.parse_patient_node(encounter)}
class CypressPatient:
fake_text_value_code='Unmapped text result'
fake_code_system_oid = '2.16.840.1.113883.5.4'
def __init__(self, patient_id):
self.patient_id = patient_id
self.canonical_representation = dict()
def setCharacteristics(self, characteristics):
self.characteristics = {'characteristics' : characteristics}
self.canonical_representation['individual_characteristics'] = self.canonicalize_characteristics(characteristics)
def setEvents(self, events):
self.events = {'events' : events}
self.canonical_representation['events'] = self.canonicalize_events(events)
def asDict(self):
return self.canonical_representation
def canonicalize_characteristics(self, characteristics):
res = dict()
res['birthTime'] = self.convertTime(characteristics['birthTime'])
res['name'] = self.getSingleDictListTextValue(characteristics['name'], 'given') + \
' ' + self.getSingleDictListTextValue(characteristics['name'], 'family')
assert len(characteristics['administrativeGenderCode']) == 1
for code in ['administrativeGenderCode', 'ethnicGroupCode', 'raceCode']:
res[code] = self.getSingleValue(characteristics[code])
res['unique_patient_id'] = characteristics['unique_patient_id']
res['zip'] = characteristics['zip']
return res
def canonicalize_events(self, events):
res = []
for event in self.events['events']:
cr = self.canonicalize_event(event)
if cr != None:
res = res + cr
return res
def canonicalize_event(self, event):
entry = event['entry']
if entry == None:
return None
return self.canonicalize_entry(entry)
def canonicalize_entry(self, entry):
if isinstance(entry, str):
return [{'notImplemented' : entry}]
keys = entry.keys()
assert len(keys) == 1
event_type = keys[0]
fn = getattr(self, 'canonicalize_' + event_type)
res = fn(entry.get(event_type), event_type)
if len(res) == 0:
return None
return res
def canonicalize_base(self, event, event_type):
res = dict()
res['event_type'] = event_type
unprocessed_items = self.fill_standard_event_values(res, event)
res['codes'] = []
codes = event.get('code')
if codes != None:
for code in event['code']:
res['codes'].append(self.canonicalize_code(code))
res['unprocessed_items'] = unprocessed_items
return res
def canonicalize_simple(self, event, event_type):
canonicalized_event = self.canonicalize_base(event, event_type)
result = [canonicalized_event]
d = canonicalized_event['unprocessed_items']
if d.has_key('entryRelationship'):
del d['entryRelationship']
for related_event in self.find_related_events(event):
if related_event != None:
for key in related_event.keys():
if not (key in [None, 'typeCode']):
entry = self.getSingleValue(related_event[key])
result = result + self.canonicalize_entry({key : entry})
return result
def canonicalize_encounter(self, encounter, event_type):
canonicalized_events = self.canonicalize_simple(encounter, event_type)
canonicalized_event = canonicalized_events[0]
u = canonicalized_event.get('unprocessed_items')
if u != None:
if u.has_key('participant'):
for p in u['participant']:
if p != None:
if p.get('typeCode') == 'LOC':
t = self.getSingleValue(p.get('time'))
high=self.getSingleValue(t.get('high'))
low=self.getSingleValue(t.get('low'))
if high != None and high.has_key('value'):
canonicalized_event['FACILITY_LOCATION_DEPARTURE_DATETIME'] = self.convertScalarTime(high.get('value'))
if low != None and low.has_key('value'):
canonicalized_event['FACILITY_LOCATION_ARRIVAL_DATETIME'] = self.convertScalarTime(low.get('value'))
if u.has_key('dischargeDispositionCode'):
canonicalized_event['dischargeDispositionCode'] = self.getSingleValue(u.get('dischargeDispositionCode'))
return canonicalized_events
def canonicalize_observation(self, observation, event_type):
return self.canonicalize_simple(observation, event_type)
def canonicalize_procedure(self, procedure, event_type):
return self.canonicalize_simple(procedure, event_type)
def canonicalize_substanceAdministration(self, substanceAdministration, event_type):
canonicalized_event = self.canonicalize_base(substanceAdministration, event_type)
canonicalized_event['codes'] = []
result = [canonicalized_event]
consumable = self.getSingleValue(substanceAdministration.get('consumable'))
if consumable != None:
product = self.getSingleValue(consumable.get('manufacturedProduct'))
codes = self.getCodesFromManufacturedProduct(product)
if codes != None:
for code in codes:
canonicalized_event['codes'].append(self.canonicalize_code(code))
return [canonicalized_event]
def canonicalize_supply(self, supply, event_type):
canonicalized_event = self.canonicalize_base(supply, event_type)
canonicalized_event['codes'] = []
result = [canonicalized_event]
product = self.getSingleValue(supply.get('product'))
if product != None:
mfgProduct = self.getSingleValue(product.get('manufacturedProduct'))
codes = self.getCodesFromManufacturedProduct(mfgProduct)
if codes != None:
for code in codes:
canonicalized_event['codes'].append(self.canonicalize_code(code))
return [canonicalized_event]
def getCodesFromManufacturedProduct(self, product):
if product == None:
return None
material = self.getSingleValue(product.get('manufacturedMaterial'))
if material != None:
codes = material.get('code')
if codes != None and len(codes) == 0:
codes = None
return codes
def canonicalize_act(self, act, event_type):
return self.canonicalize_simple(act, event_type)
def find_related_event(self, event):
rel = event.get('entryRelationship')
if rel == None:
return None
return self.getSingleValue(rel)
def find_related_events(self, event):
events = event.get('entryRelationship')
if events == None:
return []
else:
return events
def canonicalize_code(self, code):
if code == None:
return None
res = dict()
text = self.getTextValue(code.get('originalText'))
if text != None:
res['original_text'] = text
for k in code.keys():
if k != 'originalText':
res[k] = code.get(k)
if len(res.items()) == 0:
return None
return res
def fill_standard_event_values(self, res, event):
simple_event_keys = ['classCode', 'moodCode', 'templateId', 'id']
single_code_keys = ['routeCode', 'targetSiteCode', 'priorityCode']
other_handled_keys = ['effectiveTime', 'statusCode', 'code', 'text', 'value', 'negationInd']
effective_time = self.getEffectiveTime(event)
start_time = self.getSingleDictListValue(effective_time, 'low')
res['start_time'] = self.convertTime(start_time)
end_time = self.getSingleDictListValue(effective_time, 'high')
res['end_time'] = self.convertTime(end_time)
res['effective_time'] = self.convertTime(effective_time)
res['status_code'] = self.getSingleDictListValue(event.get('statusCode'), 'code')
res['text'] = self.getTextValue(event.get('text'))
if event.has_key('negationInd'):
res['negationInd'] = event.get('negationInd')
val = event.get('value')
if val != None:
val = self.getSingleValue(val)
if val.get('type') == 'ST':
val = self.make_fake_code(val)
res['value'] = self.canonicalize_code(val)
codelist = []
codes = event.get('code')
if codes != None:
for item in codes:
cr = self.canonicalize_code(item)
if cr != None:
codelist.append(cr)
for key in single_code_keys:
if event.has_key(key):
res[key] = self.getSingleValue(event.get(key))
for key in simple_event_keys:
res[key] = event.get(key)
unprocessed_items = dict()
for key in event.keys():
if not (key in simple_event_keys or key in single_code_keys or key in other_handled_keys):
unprocessed_items[key] = event.get(key)
return(unprocessed_items)
def make_fake_code(self, value):
fake = dict()
fake['type'] = 'CD'
fake['codeSystem'] = self.fake_code_system_oid
fake['original_text'] = value.get(None)
fake['code'] = self.fake_text_value_code
return fake
def getEffectiveTime(self, event):
times = event.get('effectiveTime')
if times == None:
return None
if len(times) == 1:
return times
possible_times = []
for t in times:
if (t.get('type') == None and t.get('xsi:type') == None) or t.get('type') == 'IVL_TS' or t.get('xsi:type') == 'IVL_TS':
possible_times.append(t)
return possible_times
def convertScalarTime(self, tstr):
if tstr == None:
return None
result = dict()
result['string'] = tstr
result['date'] = tstr[0:8]
result['time'] = tstr[8:]
return result
def convertTime(self, time):
if time == None:
return None
return self.convertScalarTime(self.getSingleDictListValue(time, 'value'))
def getSingleValue(self, list):
assert len(list) == 1
return list[0]
def getSingleDictListValue(self, dictlist, label):
if dictlist == None:
return None
assert len(dictlist) == 1
return dictlist[0].get(label)
def getSingleDictListTextValue(self, dictlist, label):
return(self.getTextValue(self.getSingleDictListValue(dictlist, label)))
def getTextValue(self, map):
return self.getSingleDictListValue(map, None)
| {
"content_hash": "79a6e994ec2d153cc0a708bd0653a17f",
"timestamp": "",
"source": "github",
"line_count": 430,
"max_line_length": 135,
"avg_line_length": 40.6953488372093,
"alnum_prop": 0.5878050174295674,
"repo_name": "pSCANNER/USC-source-code",
"id": "97da81e6527ba9e19ddf5d79f3322b7c69e67221",
"size": "17518",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hqmf2sql/tests/CypressParser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "76684"
},
{
"name": "HTML",
"bytes": "1056"
},
{
"name": "Makefile",
"bytes": "1392"
},
{
"name": "PLpgSQL",
"bytes": "35591401"
},
{
"name": "Python",
"bytes": "312412"
},
{
"name": "Ruby",
"bytes": "364"
},
{
"name": "SQLPL",
"bytes": "4128871"
},
{
"name": "Shell",
"bytes": "17166"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(name='enzynet',
description='EnzyNet: enzyme classification using 3D convolutional neural networks on spatial representation',
author='Afshine Amidi and Shervine Amidi',
author_email='<author1-lastname>@mit.edu, <author2-firstname>@stanford.edu',
license='MIT',
packages=['enzynet'])
| {
"content_hash": "7c72d72371a4037a93f2bbc966041f5e",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 116,
"avg_line_length": 43.75,
"alnum_prop": 0.7257142857142858,
"repo_name": "shervinea/enzynet",
"id": "ae6288463604622950f3794eb87ca0d1c58b5571",
"size": "350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61910"
}
],
"symlink_target": ""
} |
"""Full mock of the builtin 'os' module for blazing-fast unit-testing."""
# pylint: disable=import-self
from fakeos import FakeOS
from filesystem import (FakeFilesystem, FakeDirectory, FakeFile,
FakeFilesystemWithPermissions)
from environment import FakeEnvironment
from device import FakeDevice
from fakeuser import FakeUser, Root
from operating_system import FakeUnix, FakeWindows
| {
"content_hash": "4bb7b63077d446753c402fa0ae4b793e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 73,
"avg_line_length": 45.22222222222222,
"alnum_prop": 0.7813267813267813,
"repo_name": "rinslow/fakeos",
"id": "20f677f6235b95439809140b6f00930c2e42395f",
"size": "407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53981"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GithubCommentsConfig'
db.create_table(u'ghcomments_githubcommentsconfig', (
(u'datacollectorconfig_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['acquisition.DataCollectorConfig'], unique=True, primary_key=True)),
('repo_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('issue_nodes', self.gf('django.db.models.fields.BooleanField')(default=False)),
('issues_state', self.gf('django.db.models.fields.CharField')(max_length=63)),
))
db.send_create_signal(u'ghcomments', ['GithubCommentsConfig'])
def backwards(self, orm):
# Deleting model 'GithubCommentsConfig'
db.delete_table(u'ghcomments_githubcommentsconfig')
models = {
u'acquisition.acquisitionsessionconfig': {
'Meta': {'ordering': "['created']", 'object_name': 'AcquisitionSessionConfig'},
'completed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'graph': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'temporary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'acquisition.datacollectorconfig': {
'Meta': {'object_name': 'DataCollectorConfig'},
'completed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'configurator': ('django.db.models.fields.CharField', [], {'max_length': '44'}),
'graph': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_acquisition.datacollectorconfig_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'result_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'running_instance_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'session_config': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'collectors'", 'to': u"orm['acquisition.AcquisitionSessionConfig']"}),
'started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'ghcomments.githubcommentsconfig': {
'Meta': {'object_name': 'GithubCommentsConfig', '_ormbases': [u'acquisition.DataCollectorConfig']},
u'datacollectorconfig_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['acquisition.DataCollectorConfig']", 'unique': 'True', 'primary_key': 'True'}),
'issue_nodes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'issues_state': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'repo_name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['ghcomments'] | {
"content_hash": "4c6cc8343b68f11f1b16db1edea097f7",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 214,
"avg_line_length": 70.48529411764706,
"alnum_prop": 0.5916962236595035,
"repo_name": "GaretJax/csat",
"id": "a590b5ffc7fe5e06f80ac0886318c59597686352",
"size": "4817",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "csat/collectors/ghcomments/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "118593"
},
{
"name": "CoffeeScript",
"bytes": "214811"
},
{
"name": "JavaScript",
"bytes": "2053598"
},
{
"name": "Python",
"bytes": "304635"
},
{
"name": "Ruby",
"bytes": "264"
},
{
"name": "Scala",
"bytes": "232"
},
{
"name": "Shell",
"bytes": "6697"
}
],
"symlink_target": ""
} |
from steamworks import *
import time
#------------------------------------------------
# Initialize Steam
#------------------------------------------------
Steam.Init()
#------------------------------------------------
# Test Steam Apps functions
def appsTest():
print("\n#------------------------------------------------\nINFO: Running apps test.....\n")
print("Has Other App/Game (using Spacewars): %s") % (SteamApps.HasOtherApp(480))
print("DLC Count: %s") % (SteamApps.GetDlcCount())
print("Is App/Game Installed? (using Spacewars): %s") % (SteamApps.IsAppInstalled(480))
print("Current Game Language: %s") % (SteamApps.GetCurrentGameLanguage())
raw_input("\nINFO: Apps test done. Press any key to continue.\n")
#------------------------------------------------
# Test Steam Friends functions
def friendsTest():
print("\n#------------------------------------------------\nINFO: Running friends test.....\n")
print("Friend Count: %s") % (SteamFriends.GetFriendCount())
print("Player Name: %s") % (SteamFriends.GetPlayerName())
print("Player State: %s") % (SteamFriends.GetPlayerState())
print("Friend's Name (using Gramps): %s") % (SteamFriends.GetFriendPersonaName(76561198002965952))
raw_input("INFO: Friends test done. Press any key to continue.\n")
#------------------------------------------------
# Test Steam Music functions
def musicTest():
print("\n#------------------------------------------------\nINFO: Running music test.....\n")
print("Music Enabled?: "+str(SteamMusic.MusicIsEnabled()))
print("Music Playing?: "+str(SteamMusic.MusicIsPlaying()))
print("Music Volume: "+str(SteamMusic.MusicGetVolume()))
print("Attempting to play music...")
SteamMusic.MusicPlay()
time.sleep(3)
print("Attempting to pause music...")
SteamMusic.MusicPause()
time.sleep(3)
print("Attempting to play next song...")
SteamMusic.MusicPlayNext()
time.sleep(3)
print("Attempting to play previous song...")
SteamMusic.MusicPlayPrev()
time.sleep(3)
print("Setting volume to 5...")
SteamMusic.MusicSetVolume(5)
time.sleep(3)
raw_input("INFO: Music test done. Press any key to continue.\n")
#------------------------------------------------
# Test Steam Users functions
def userTest():
print("\n#------------------------------------------------\nINFO: Running user test.....\n")
print("Steam ID: %s") % (SteamUser.GetPlayerID())
print("Steam Level: %s") % (SteamUser.GetPlayerSteamLevel())
print("Steam User Folder: %s") % (SteamUser.GetUserDataFolder())
raw_input("INFO: User test done. Press any key to continue.\n")
#------------------------------------------------
# Test Steam User Stats functions
def statsTest():
print("\n#------------------------------------------------\nINFO: Running stats test.....\n")
print("This test only works in a game with statistics and achievements enabled. Sorry!")
raw_input("INFO: Stats test done. Press any key to continue.\n")
#------------------------------------------------
# Test Steam Utilities functions
def utilitiesTest():
print("\n#------------------------------------------------\nINFO: Running utilities test.....\n")
print("Computer Battery Power: "+str(SteamUtilities.GetCurrentBatteryPower()))
print("User Country: "+str(SteamUtilities.GetIPCountry()))
print("Seconds Since Game Active: "+str(SteamUtilities.GetSecondsSinceAppActive()))
print("Seconds Since Computer Active: "+str(SteamUtilities.GetSecondsSinceComputerActive()))
print("Server Time: "+str(SteamUtilities.GetServerRealTime()))
print("Steam Overlay Enabled?: %s") % (SteamUtilities.IsOverlayEnabled())
print("Steam VR Running?: %s") % (SteamUtilities.IsSteamRunningInVR())
print("Steam UI Language: %s") % (SteamUtilities.GetSteamUILanguage())
print("Steam App ID: %s") % (SteamUtilities.GetAppID())
raw_input("INFO: Utilities test done. Press any key to continue.\n")
#------------------------------------------------
# The main test loop
#------------------------------------------------
# Set some variables
STEAM_TEST = True
FIRST_RUN = False
# The loop itself
while STEAM_TEST:
if FIRST_RUN == False:
print("##################################################\nSTEAMWORKS PYTHON API TEST\n##################################################\n")
print("This will loop through different sections of the SteamworksPy API system for testing purposes.\n")
print("Some functions are omitted as they will not work properly without being in a game, transmitting data, or inputting the ID's, etc.\n")
print("Please try these functions out in your own game and report any issues you find.\n")
print("Which test do you want to use?\n")
print("(A)pps, (F)riends, (M)usic, (U)ser, User (S)tats, U(t)ilities, (Q)uit")
LOOP = raw_input("Run which test?: ")
# Running apps test
if LOOP == 'A' or LOOP == 'a':
appsTest()
elif LOOP == 'F' or LOOP == 'f':
friendsTest()
elif LOOP == 'M' or LOOP == 'm':
musicTest()
elif LOOP == 'U' or LOOP == 'u':
userTest()
elif LOOP == 'S' or LOOP == 's':
statsTest()
elif LOOP == 'T' or LOOP == 't':
utilitiesTest()
elif LOOP == 'Q' or LOOP == 'q':
break
else:
pass | {
"content_hash": "88cd802b6291f7ded2c732de9844e003",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 143,
"avg_line_length": 47.05555555555556,
"alnum_prop": 0.5834317197953561,
"repo_name": "Gramps/SteamworksForPython",
"id": "4bd3c22d35ef61f732a5f643dde2c8cc83c52f62",
"size": "5362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/legacy/_legacy_steamworks-test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "20440"
},
{
"name": "Makefile",
"bytes": "119"
},
{
"name": "Python",
"bytes": "109373"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from hq.models import *
from django.contrib.auth.models import Group, User
from reporters.models import Reporter
from locations.models import Location
from resources.models import *
'''
'''
class ResourceAdmin(admin.ModelAdmin):
list_display = ('name', 'code', 'category','domain')
list_filter = ['domain', 'category', 'status']
search_fields = ('name','code')
admin.site.register(Resource,ResourceAdmin)
class ResourceCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(ResourceCategory,ResourceCategoryAdmin)
class TrackResourceAdmin(admin.ModelAdmin):
list_display = ('resource', 'status', 'user', 'date_tracked')
date_hierarchy = 'date_tracked'
list_filter = ('status', 'date_tracked')
search_fields = ('resource', 'user')
admin.site.register(TrackResource, TrackResourceAdmin)
class ResourceSupplyRequestAdmin(admin.ModelAdmin):
list_display = ('user', 'resource', 'request_date')
date_hierarchy = 'request_date'
search_fields = ('user', 'resource')
# list_filter = ['location']
admin.site.register(ResourceSupplyRequest,ResourceSupplyRequestAdmin) | {
"content_hash": "d73c0fefd7f7e6773faccecca1d8534d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 69,
"avg_line_length": 36.34375,
"alnum_prop": 0.7343078245915735,
"repo_name": "icomms/wqmanager",
"id": "06b45753e608e662c74a76e6383285a33e030ba9",
"size": "1163",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/resources/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "793418"
},
{
"name": "PHP",
"bytes": "2863"
},
{
"name": "Python",
"bytes": "3735941"
},
{
"name": "Shell",
"bytes": "383"
}
],
"symlink_target": ""
} |
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import threading
import requests
import argparse
def gen(slow_time):
for _ in range(slow_time):
yield b'a'
time.sleep(1)
def slow_post(port, slow_time):
requests.post('http://127.0.0.1:{0}/'.format(port, ), data=gen(slow_time))
def makerequest(port, connection_limit):
client_timeout = 3
for _ in range(connection_limit):
t = threading.Thread(target=slow_post, args=(port, client_timeout + 10))
t.daemon = True
t.start()
time.sleep(1)
r = requests.get('http://127.0.0.1:{0}/'.format(port,))
print(r.status_code)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--port", "-p",
type=int,
help="Port to use")
parser.add_argument("--connectionlimit", "-c",
type=int,
help="connection limit")
args = parser.parse_args()
makerequest(args.port, args.connectionlimit)
if __name__ == '__main__':
main()
| {
"content_hash": "13d8a1594caa918a4e3ea57d3e56c6a0",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 80,
"avg_line_length": 31,
"alnum_prop": 0.6495352651722253,
"repo_name": "bryancall/trafficserver",
"id": "47fb4b4639ebab584e613382196c4e3da462e76c",
"size": "1829",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/gold_tests/slow_post/slow_post_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1402820"
},
{
"name": "C++",
"bytes": "15545891"
},
{
"name": "CMake",
"bytes": "19248"
},
{
"name": "Dockerfile",
"bytes": "6693"
},
{
"name": "Java",
"bytes": "9881"
},
{
"name": "Lua",
"bytes": "63183"
},
{
"name": "M4",
"bytes": "197455"
},
{
"name": "Makefile",
"bytes": "240525"
},
{
"name": "Objective-C",
"bytes": "3722"
},
{
"name": "Perl",
"bytes": "128281"
},
{
"name": "Python",
"bytes": "1151937"
},
{
"name": "SWIG",
"bytes": "25971"
},
{
"name": "Shell",
"bytes": "169276"
},
{
"name": "Vim Script",
"bytes": "192"
}
],
"symlink_target": ""
} |
import os
import sys
from buildsupport.GlobalSettings import g_compiler
from buildsupport.GlobalSettings import g_platformSettings
from buildsupport.GlobalSettings import g_sourceDir
from buildsupport.GlobalSettings import g_targetOsName, g_targetProcArch
from buildsupport.GlobalSettings import g_verbose
from buildsupport.Errors import BuildError
from buildsupport.Util import runCommandWatchStderr
def produceExecutable(outputDir,
outputName,
libraries,
libDirs,
modules,
optimize,
disableThreading):
"""
Calls the linker (via the compiler executable in real life) to produce an
executable out of the object files.
PARAMETERS:
outputDir -- a string denoting the path to the directory where the resulting
executable will be created;
outputName -- a string denoting the base file name of the resulting
executable;
libraries -- a list of libraries to link with;
libDirs -- a list of directories to search for in order to find the
libraries to link with;
modules -- the modules to be linked into the executable; a module name is a
Unix path to the source file (from which an object file was generated),
relative to the src directory, without any suffixes/extensions.
optimize -- the build optimization setting, might alter the options that
are passed to the linker
disableThreading -- disables passing any threading-enabling options to the
linker
EXCEPTIONS
Will raise BuildError if it is unable to produce the executable.
RETURNS
Nothing. If it doesn't raise a BuildError, that means it succeeded.
"""
assert os.path.isdir(outputDir)
assert isinstance(outputName, str)
assert isinstance(libraries, list)
assert isinstance(libDirs, list)
assert isinstance(modules, list)
outputFile = os.path.join(outputDir,
g_platformSettings.executableName(outputName))
inputFiles = [os.path.join(*[outputDir] + x.split('/')) + \
g_platformSettings.objectFileSuffix \
for x in modules]
linkerArgv = g_compiler.getExecutableLinkerArgv(inputFiles,
outputFile,
libDirs,
libraries,
optimize,
disableThreading)
if g_verbose: print(linkerArgv)
resultcode = runCommandWatchStderr(linkerArgv)
if resultcode == 0:
return outputFile
else:
errMsg = "Error while linking %s (error code %d)" % \
(outputFile, resultcode)
raise BuildError(errMsg)
def produceSharedLibrary(outputDir,
outputName,
libraries,
libDirs,
majorVersion,
minorVersion,
modules,
optimize,
disableThreading):
"""
Calls the linker (via the compiler executable in real life) to produce a
shared library out of the object files.
PARAMETERS:
outputDir -- a string denoting the path to the directory where the resulting
shared library will be created;
outputName -- a string denoting the base file name of the resulting
shared library;
libraries -- a list of other libraries to link with;
libDirs -- a list of directories to search for in order to find the
libraries to link with;
modules -- the modules to be linked into the shared library; a module name
is a Unix path to the source file (from which an object file was generated),
relative to the src directory, without any suffixes/extensions.
optimize -- the build optimization setting, might alter the options that
are passed to the linker
disableThreading -- disables passing any threading-enabling options to the
linker
EXCEPTIONS
Will raise BuildError if it is unable to produce the shared library.
RETURNS
Nothing. If it doesn't raise a BuildError, that means it succeeded.
"""
assert os.path.isdir(outputDir)
outputFile = os.path.join(outputDir,
g_platformSettings.sharedLibName(outputName,
majorVersion,
minorVersion))
soName = g_platformSettings.soName(outputName, majorVersion, minorVersion)
soLinkerName = g_platformSettings.soLinkerName(outputName, majorVersion, minorVersion)
inputFiles = [os.path.join(*[outputDir] + x.split('/')) + \
g_platformSettings.objectFileSuffix \
for x in modules]
linkerArgv = g_compiler.getSharedLinkerArgv(inputFiles,
outputFile,
majorVersion,
minorVersion,
soName,
libDirs,
libraries,
optimize,
disableThreading)
if g_verbose: print(linkerArgv)
resultcode = runCommandWatchStderr(linkerArgv)
if resultcode == 0:
if soName:
soPath = os.path.join(outputDir, soName)
if os.path.exists(soPath):
if os.path.islink(soPath):
os.remove(soPath)
else:
errMsg = "Error while linking %s: %s exists and is not a " \
"symlink, not risking attempting to remove the " \
"file" % (outputFile, soPath)
raise BuildError(errMsg)
os.symlink(os.path.basename(outputFile), soPath)
if soLinkerName:
soLinkerPath = os.path.join(outputDir, soLinkerName)
if os.path.exists(soLinkerPath):
if os.path.islink(soLinkerPath):
os.remove(soLinkerPath)
else:
errMsg = "Error while linking %s: %s exists and is " \
"not a symlink, not risking attempting to " \
"remove the file" % (outputFile, soLinkerPath)
raise BuildError(errMsg)
os.symlink(soName, soLinkerPath)
return outputFile
else:
errMsg = "Error while linking %s (error code %d)" % \
(outputFile, resultcode)
raise BuildError(errMsg)
def produceStaticLibrary(outputDir,
outputName,
modules,
optimize,
disableThreading):
"""
Calls the linker to produce a static library out of the object files.
PARAMETERS:
outputDir -- a string denoting the path to the directory where the resulting
static library will be created;
outputName -- a string denoting the base file name of the resulting
static library;
modules -- the modules to be linked into the static library; a module name
is a Unix path to the source file (from which an object file was generated),
relative to the src directory, without any suffixes/extensions.
optimize -- the build optimization setting, might alter the options that
are passed to the linker
disableThreading -- disables passing any threading-enabling options to the
linker
EXCEPTIONS
Will raise BuildError if it is unable to produce the static library.
RETURNS
Nothing. If it doesn't raise a BuildError, that means it succeeded.
"""
assert os.path.isdir(outputDir)
outputFile = os.path.join(outputDir,
g_platformSettings.staticLibName(outputName))
inputFiles = [os.path.join(*[outputDir] + x.split('/')) + \
g_platformSettings.objectFileSuffix \
for x in modules]
linkerArgv = g_platformSettings.staticLinker(
outputFile,
inputFiles,
optimize,
disableThreading)
if g_verbose: print(linkerArgv)
resultcode = runCommandWatchStderr(linkerArgv)
if resultcode == 0:
return outputFile
else:
errMsg = "Error while linking %s (error code %d)" % \
(outputFile, resultcode)
raise BuildError(errMsg)
| {
"content_hash": "aa5d90b2ab76bacf407e6bc9da445ef7",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 90,
"avg_line_length": 34.732283464566926,
"alnum_prop": 0.574699614599864,
"repo_name": "rodyakin/chilly-build",
"id": "58347e73db810b7ad89fcdccf028b88ff99da4d6",
"size": "10108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "share/chilly-build/buildsupport/Linking.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1886"
},
{
"name": "C++",
"bytes": "23352"
},
{
"name": "Python",
"bytes": "126933"
},
{
"name": "Shell",
"bytes": "4844"
}
],
"symlink_target": ""
} |
from django.shortcuts import render_to_response
from django import newforms as forms
from citeweb.citeimport import models
import re
import hashlib
import logging
import random
random.seed()
import htmlentitydefs
class ImportForm(forms.Form):
url_field = forms.CharField(widget=forms.Textarea(attrs={'cols':'100', 'rows':'10', }))
def convert_html_entities(s):
matches = re.findall("&#\d+;", s)
if len(matches) > 0:
hits = set(matches)
for hit in hits:
name = hit[2:-1]
try:
entnum = int(name)
s = s.replace(hit, unichr(entnum))
except ValueError:
pass
matches = re.findall("&\w+;", s)
hits = set(matches)
amp = "&"
if amp in hits:
hits.remove(amp)
for hit in hits:
name = hit[1:-1]
if htmlentitydefs.name2codepoint.has_key(name):
s = s.replace(hit, unichr(htmlentitydefs.name2codepoint[name]))
s = s.replace(amp, "&")
return s
def parse_urls(s):
s = s.strip()
s = re.sub(r"\r?\n", " ", s)
if s.startswith("<"): s = convert_html_entities(s)
papers = [ x for x in re.findall(r"viewType=fullRecord&(?:amp;)?U+T.*?>([^<>]*?)</a>", s) if x.strip() ]
urls = re.findall(r"http://rss.[a-z]+.com/rss\?e=\w*&(?:amp;)?c=\w*", s)
urls = [ url.replace("&", "&") for url in urls ]
# logging.info(s)
# logging.info(str(urls))
# logging.info("\n".join(papers))
# logging.info("%d %d" % (len(urls), len(papers)))
if len(urls) != len(papers):
logging.debug(s)
pass
assert len(urls) == len(papers)
assert all( "<" not in s and ">" not in s for s in papers )
return (papers, urls)
def index(request):
urls = ()
if request.POST:
(papers, urls) = parse_urls(request.POST["url_field"])
url_hash = hashlib.sha1("\n".join(urls)).hexdigest()
proposed_user_hash = hashlib.sha1(url_hash + str(random.random())).hexdigest()
if not models.URLList.objects.filter(url_hash = url_hash):
url_list = models.URLList.objects.create(url_hash = url_hash, urls = "\n".join(urls), papers = "\n".join(papers))
paper_urls = []
for (paper, url) in zip(papers, urls):
paper_urls.append( { "paper": paper, "url" : url })
return render_to_response('import.html', locals())
def save(request):
if request.GET:
url_hash = request.GET.get("url_hash")
user_hash = request.GET.get("user_hash")
if len(user_hash) >= 40:
userprefs_l = models.UserPrefs.objects.filter(user_hash = user_hash)
if not userprefs_l:
# if this is a new user, generate a hash and store the data
userprefs = models.UserPrefs(user = user_hash, user_hash = user_hash, url_hash = url_hash)
else:
# if it is old, leave the user hash unchanged and just adapt the url hash
userprefs = userprefs_l[0]
userprefs.url_hash = url_hash
userprefs.save()
return render_to_response('save.html', locals())
| {
"content_hash": "002efd019f7314d22e7b73ce0ee80848",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 125,
"avg_line_length": 28.72972972972973,
"alnum_prop": 0.5647538413295704,
"repo_name": "mkuhn/cite-web",
"id": "235c8b2ec8d3d52a832e38fa0fd52b1392a37b1d",
"size": "3216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "citeweb/citeimport/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6684"
},
{
"name": "Python",
"bytes": "16374"
}
],
"symlink_target": ""
} |
"""
Boilerplate functions used in defining binary operations.
"""
from __future__ import annotations
from functools import wraps
from typing import Callable
from pandas._libs.lib import item_from_zerodim
from pandas._libs.missing import is_matching_na
from pandas._typing import F
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCIndex,
ABCSeries,
)
def unpack_zerodim_and_defer(name: str) -> Callable[[F], F]:
"""
Boilerplate for pandas conventions in arithmetic and comparison methods.
Parameters
----------
name : str
Returns
-------
decorator
"""
def wrapper(method: F) -> F:
return _unpack_zerodim_and_defer(method, name)
return wrapper
def _unpack_zerodim_and_defer(method, name: str):
"""
Boilerplate for pandas conventions in arithmetic and comparison methods.
Ensure method returns NotImplemented when operating against "senior"
classes. Ensure zero-dimensional ndarrays are always unpacked.
Parameters
----------
method : binary method
name : str
Returns
-------
method
"""
is_cmp = name.strip("__") in {"eq", "ne", "lt", "le", "gt", "ge"}
@wraps(method)
def new_method(self, other):
if is_cmp and isinstance(self, ABCIndex) and isinstance(other, ABCSeries):
# For comparison ops, Index does *not* defer to Series
pass
else:
for cls in [ABCDataFrame, ABCSeries, ABCIndex]:
if isinstance(self, cls):
break
if isinstance(other, cls):
return NotImplemented
other = item_from_zerodim(other)
return method(self, other)
return new_method
def get_op_result_name(left, right):
"""
Find the appropriate name to pin to an operation result. This result
should always be either an Index or a Series.
Parameters
----------
left : {Series, Index}
right : object
Returns
-------
name : object
Usually a string
"""
if isinstance(right, (ABCSeries, ABCIndex)):
name = _maybe_match_name(left, right)
else:
name = left.name
return name
def _maybe_match_name(a, b):
"""
Try to find a name to attach to the result of an operation between
a and b. If only one of these has a `name` attribute, return that
name. Otherwise return a consensus name if they match or None if
they have different names.
Parameters
----------
a : object
b : object
Returns
-------
name : str or None
See Also
--------
pandas.core.common.consensus_name_attr
"""
a_has = hasattr(a, "name")
b_has = hasattr(b, "name")
if a_has and b_has:
try:
if a.name == b.name:
return a.name
elif is_matching_na(a.name, b.name):
# e.g. both are np.nan
return a.name
else:
return None
except TypeError:
# pd.NA
if is_matching_na(a.name, b.name):
return a.name
return None
except ValueError:
# e.g. np.int64(1) vs (np.int64(1), np.int64(2))
return None
elif a_has:
return a.name
elif b_has:
return b.name
return None
| {
"content_hash": "96a4f5bef802f66c37dd417413f6a9c8",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 82,
"avg_line_length": 23.62676056338028,
"alnum_prop": 0.5767511177347243,
"repo_name": "datapythonista/pandas",
"id": "f0e6aa3750cee5f5c25bd5d9d58a19e929f3569a",
"size": "3355",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/core/ops/common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C",
"bytes": "355524"
},
{
"name": "CSS",
"bytes": "1662"
},
{
"name": "Cython",
"bytes": "1178139"
},
{
"name": "Dockerfile",
"bytes": "1933"
},
{
"name": "HTML",
"bytes": "456449"
},
{
"name": "Makefile",
"bytes": "505"
},
{
"name": "Python",
"bytes": "19048364"
},
{
"name": "Shell",
"bytes": "10511"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
} |
'''
Freetype structured types
-------------------------
FT_Library: A handle to a FreeType library instance.
FT_Vector: A simple structure used to store a 2D vector.
FT_BBox: A structure used to hold an outline's bounding box.
FT_Matrix: A simple structure used to store a 2x2 matrix.
FT_UnitVector: A simple structure used to store a 2D vector unit vector.
FT_Bitmap: A structure used to describe a bitmap or pixmap to the raster.
FT_Data: Read-only binary data represented as a pointer and a length.
FT_Generic: Client applications generic data.
FT_Bitmap_Size: Metrics of a bitmap strike.
FT_Charmap: The base charmap structure.
FT_Glyph_Metrics:A structure used to model the metrics of a single glyph.
FT_Outline: This structure is used to describe an outline to the scan-line
converter.
FT_GlyphSlot: FreeType root glyph slot class structure.
FT_Glyph: The root glyph structure contains a given glyph image plus its
advance width in 16.16 fixed float format.
FT_Size_Metrics: The size metrics structure gives the metrics of a size object.
FT_Size: FreeType root size class structure.
FT_Face: FreeType root face class structure.
FT_Parameter: A simple structure used to pass more or less generic parameters
to FT_Open_Face.
FT_Open_Args: A structure used to indicate how to open a new font file or
stream.
FT_SfntName: A structure used to model an SFNT 'name' table entry.
FT_Stroker: Opaque handler to a path stroker object.
FT_BitmapGlyph: A structure used for bitmap glyph images.
'''
from visvis.text.freetype.ft_types import *
# -----------------------------------------------------------------------------
# A handle to a FreeType library instance. Each 'library' is completely
# independent from the others; it is the 'root' of a set of objects like fonts,
# faces, sizes, etc.
class FT_LibraryRec(Structure):
'''
A handle to a FreeType library instance. Each 'library' is completely
independent from the others; it is the 'root' of a set of objects like
fonts, faces, sizes, etc.
'''
_fields_ = [ ]
FT_Library = POINTER(FT_LibraryRec)
# -----------------------------------------------------------------------------
# A simple structure used to store a 2D vector; coordinates are of the FT_Pos
# type.
class FT_Vector(Structure):
'''
A simple structure used to store a 2D vector; coordinates are of the FT_Pos
type.
x: The horizontal coordinate.
y: The vertical coordinate.
'''
_fields_ = [('x', FT_Pos),
('y', FT_Pos)]
# -----------------------------------------------------------------------------
# A structure used to hold an outline's bounding box, i.e., the coordinates of
# its extrema in the horizontal and vertical directions.
#
# The bounding box is specified with the coordinates of the lower left and the
# upper right corner. In PostScript, those values are often called (llx,lly)
# and (urx,ury), respectively.
#
# If 'yMin' is negative, this value gives the glyph's descender. Otherwise, the
# glyph doesn't descend below the baseline. Similarly, if 'ymax' is positive,
# this value gives the glyph's ascender.
#
# 'xMin' gives the horizontal distance from the glyph's origin to the left edge
# of the glyph's bounding box. If 'xMin' is negative, the glyph extends to the
# left of the origin.
class FT_BBox(Structure):
'''
A structure used to hold an outline's bounding box, i.e., the coordinates
of its extrema in the horizontal and vertical directions.
The bounding box is specified with the coordinates of the lower left and
the upper right corner. In PostScript, those values are often called
(llx,lly) and (urx,ury), respectively.
If 'yMin' is negative, this value gives the glyph's descender. Otherwise,
the glyph doesn't descend below the baseline. Similarly, if 'ymax' is
positive, this value gives the glyph's ascender.
'xMin' gives the horizontal distance from the glyph's origin to the left
edge of the glyph's bounding box. If 'xMin' is negative, the glyph extends
to the left of the origin.
xMin: The horizontal minimum (left-most).
yMin: The vertical minimum (bottom-most).
xMax: The horizontal maximum (right-most).
yMax: The vertical maximum (top-most).
'''
_fields_ = [('xMin', FT_Pos),
('yMin', FT_Pos),
('xMax', FT_Pos),
('yMax', FT_Pos)]
# -----------------------------------------------------------------------------
# A simple structure used to store a 2x2 matrix. Coefficients are in 16.16
# fixed float format. The computation performed is:
# x' = x*xx + y*xy
# y' = x*yx + y*yy
class FT_Matrix(Structure):
'''
A simple structure used to store a 2x2 matrix. Coefficients are in 16.16
fixed float format. The computation performed is:
x' = x*xx + y*xy
y' = x*yx + y*yy
xx: Matrix coefficient.
xy: Matrix coefficient.
yx: Matrix coefficient.
yy: Matrix coefficient.
'''
_fields_ = [('xx', FT_Fixed),
('xy', FT_Fixed),
('yx', FT_Fixed),
('yy', FT_Fixed)]
# -----------------------------------------------------------------------------
# A simple structure used to store a 2D vector unit vector. Uses FT_F2Dot14
# types.
class FT_UnitVector(Structure):
'''
A simple structure used to store a 2D vector unit vector. Uses FT_F2Dot14
types.
x: The horizontal coordinate.
y: The vertical coordinate.
'''
_fields_ = [('x', FT_F2Dot14),
('y', FT_F2Dot14)]
# -----------------------------------------------------------------------------
# A structure used to describe a bitmap or pixmap to the raster. Note that we
# now manage pixmaps of various depths through the 'pixel_mode' field.
class FT_Bitmap(Structure):
'''
A structure used to describe a bitmap or pixmap to the raster. Note that we
now manage pixmaps of various depths through the 'pixel_mode' field.
rows: The number of bitmap rows.
width: The number of pixels in bitmap row.
pitch: The pitch's absolute value is the number of bytes taken by one
bitmap row, including padding. However, the pitch is positive when
the bitmap has a 'down' flow, and negative when it has an 'up'
flow. In all cases, the pitch is an offset to add to a bitmap
pointer in order to go down one row.
Note that 'padding' means the alignment of a bitmap to a byte
border, and FreeType functions normally align to the smallest
possible integer value.
For the B/W rasterizer, 'pitch' is always an even number.
To change the pitch of a bitmap (say, to make it a multiple of 4),
use FT_Bitmap_Convert. Alternatively, you might use callback
functions to directly render to the application's surface; see the
file 'example2.py' in the tutorial for a demonstration.
buffer: A typeless pointer to the bitmap buffer. This value should be
aligned on 32-bit boundaries in most cases.
num_grays: This field is only used with FT_PIXEL_MODE_GRAY; it gives the
number of gray levels used in the bitmap.
pixel_mode: The pixel mode, i.e., how pixel bits are stored. See
FT_Pixel_Mode for possible values.
palette_mode: This field is intended for paletted pixel modes; it indicates
how the palette is stored. Not used currently.
palette: A typeless pointer to the bitmap palette; this field is intended
for paletted pixel modes. Not used currently.
'''
_fields_ = [
('rows', c_int),
('width', c_int),
('pitch', c_int),
# declaring buffer as c_char_p confuses ctypes
('buffer', POINTER(c_ubyte)),
('num_grays', c_short),
('pixel_mode', c_ubyte),
('palette_mode', c_char),
('palette', c_void_p) ]
# -----------------------------------------------------------------------------
# Read-only binary data represented as a pointer and a length.
class FT_Data(Structure):
'''
Read-only binary data represented as a pointer and a length.
pointer: The data.
length: The length of the data in bytes.
'''
_fields_ = [('pointer', POINTER(FT_Byte)),
('y', FT_Int)]
# -----------------------------------------------------------------------------
# Client applications often need to associate their own data to a variety of
# FreeType core objects. For example, a text layout API might want to associate
# a glyph cache to a given size object.
#
# Most FreeType object contains a 'generic' field, of type FT_Generic, which
# usage is left to client applications and font servers.
#
# It can be used to store a pointer to client-specific data, as well as the
# address of a 'finalizer' function, which will be called by FreeType when the
# object is destroyed (for example, the previous client example would put the
# address of the glyph cache destructor in the 'finalizer' field).
class FT_Generic(Structure):
'''
Client applications often need to associate their own data to a variety of
FreeType core objects. For example, a text layout API might want to
associate a glyph cache to a given size object.
Most FreeType object contains a 'generic' field, of type FT_Generic, which
usage is left to client applications and font servers.
It can be used to store a pointer to client-specific data, as well as the
address of a 'finalizer' function, which will be called by FreeType when
the object is destroyed (for example, the previous client example would put
the address of the glyph cache destructor in the 'finalizer' field).
data: A typeless pointer to any client-specified data. This field is
completely ignored by the FreeType library.
finalizer: A pointer to a 'generic finalizer' function, which will be
called when the object is destroyed. If this field is set to
NULL, no code will be called.
'''
_fields_ = [('data', c_void_p),
('finalizer', FT_Generic_Finalizer)]
# -----------------------------------------------------------------------------
# This structure models the metrics of a bitmap strike (i.e., a set of glyphs
# for a given point size and resolution) in a bitmap font. It is used for the
# 'available_sizes' field of FT_Face.
class FT_Bitmap_Size(Structure):
'''
This structure models the metrics of a bitmap strike (i.e., a set of glyphs
for a given point size and resolution) in a bitmap font. It is used for the
'available_sizes' field of FT_Face.
height: The vertical distance, in pixels, between two consecutive
baselines. It is always positive.
width: The average width, in pixels, of all glyphs in the strike.
size: The nominal size of the strike in 26.6 fractional points. This field
is not very useful.
x_ppem: The horizontal ppem (nominal width) in 26.6 fractional pixels.
y_ppem: The vertical ppem (nominal height) in 26.6 fractional pixels.
'''
_fields_ = [
('height', FT_Short),
('width', FT_Short),
('size', FT_Pos),
('x_ppem', FT_Pos),
('y_ppem', FT_Pos) ]
# -----------------------------------------------------------------------------
# The base charmap structure.
class FT_CharmapRec(Structure):
'''
The base charmap structure.
face : A handle to the parent face object.
encoding : An FT_Encoding tag identifying the charmap. Use this with
FT_Select_Charmap.
platform_id: An ID number describing the platform for the following
encoding ID. This comes directly from the TrueType
specification and should be emulated for other formats.
encoding_id: A platform specific encoding number. This also comes from the
TrueType specification and should be emulated similarly.
'''
_fields_ = [
('face', c_void_p), # Shoudl be FT_Face
('encoding', FT_Encoding),
('platform_id', FT_UShort),
('encoding_id', FT_UShort),
]
FT_Charmap = POINTER(FT_CharmapRec)
# -----------------------------------------------------------------------------
# A structure used to model the metrics of a single glyph. The values are
# expressed in 26.6 fractional pixel format; if the flag FT_LOAD_NO_SCALE has
# been used while loading the glyph, values are expressed in font units
# instead.
class FT_Glyph_Metrics(Structure):
'''
A structure used to model the metrics of a single glyph. The values are
expressed in 26.6 fractional pixel format; if the flag FT_LOAD_NO_SCALE has
been used while loading the glyph, values are expressed in font units
instead.
width: The glyph's width.
height: The glyph's height.
horiBearingX: Left side bearing for horizontal layout.
horiBearingY: Top side bearing for horizontal layout.
horiAdvance: Advance width for horizontal layout.
vertBearingX: Left side bearing for vertical layout.
vertBearingY: Top side bearing for vertical layout.
vertAdvance: Advance height for vertical layout.
'''
_fields_ = [
('width', FT_Pos),
('height', FT_Pos),
('horiBearingX', FT_Pos),
('horiBearingY', FT_Pos),
('horiAdvance', FT_Pos),
('vertBearingX', FT_Pos),
('vertBearingY', FT_Pos),
('vertAdvance', FT_Pos),
]
# -----------------------------------------------------------------------------
# This structure is used to describe an outline to the scan-line converter.
class FT_Outline(Structure):
'''
This structure is used to describe an outline to the scan-line converter.
n_contours: The number of contours in the outline.
n_points: The number of points in the outline.
points: A pointer to an array of 'n_points' FT_Vector elements, giving the
outline's point coordinates.
tags: A pointer to an array of 'n_points' chars, giving each outline
point's type.
If bit 0 is unset, the point is 'off' the curve, i.e., a Bezier
control point, while it is 'on' if set.
Bit 1 is meaningful for 'off' points only. If set, it indicates a
third-order Bezier arc control point; and a second-order control
point if unset.
If bit 2 is set, bits 5-7 contain the drop-out mode (as defined in
the OpenType specification; the value is the same as the argument to
the SCANMODE instruction).
Bits 3 and 4 are reserved for internal purposes.
contours: An array of 'n_contours' shorts, giving the end point of each
contour within the outline. For example, the first contour is
defined by the points '0' to 'contours[0]', the second one is
defined by the points 'contours[0]+1' to 'contours[1]', etc.
flags: A set of bit flags used to characterize the outline and give hints
to the scan-converter and hinter on how to convert/grid-fit it. See
FT_OUTLINE_FLAGS.
'''
_fields_ = [
('n_contours', c_short),
('n_points', c_short),
('points', POINTER(FT_Vector)),
# declaring buffer as c_char_p would prevent us to acces all tags
('tags', POINTER(c_ubyte)),
('contours', POINTER(c_short)),
('flags', c_int),
]
# -----------------------------------------------------------------------------
# The root glyph structure contains a given glyph image plus its advance width
# in 16.16 fixed float format.
class FT_GlyphRec(Structure):
'''
The root glyph structure contains a given glyph image plus its advance
width in 16.16 fixed float format.
library: A handle to the FreeType library object.
clazz: A pointer to the glyph's class. Private.
format: The format of the glyph's image.
advance: A 16.16 vector that gives the glyph's advance width.
'''
_fields_ = [
('library', FT_Library),
('clazz', c_void_p),
('format', FT_Glyph_Format),
('advance', FT_Vector)
]
FT_Glyph = POINTER(FT_GlyphRec)
# -----------------------------------------------------------------------------
# FreeType root glyph slot class structure. A glyph slot is a container where
# individual glyphs can be loaded, be they in outline or bitmap format.
class FT_GlyphSlotRec(Structure):
'''
FreeType root glyph slot class structure. A glyph slot is a container where
individual glyphs can be loaded, be they in outline or bitmap format.
library: A handle to the FreeType library instance this slot belongs to.
face: A handle to the parent face object.
next: In some cases (like some font tools), several glyph slots per face
object can be a good thing. As this is rare, the glyph slots are
listed through a direct, single-linked list using its 'next' field.
generic: A typeless pointer which is unused by the FreeType library or any
of its drivers. It can be used by client applications to link
their own data to each glyph slot object.
metrics: The metrics of the last loaded glyph in the slot. The returned
values depend on the last load flags (see the FT_Load_Glyph API
function) and can be expressed either in 26.6 fractional pixels or
font units.
Note that even when the glyph image is transformed, the metrics
are not.
linearHoriAdvance: The advance width of the unhinted glyph. Its value is
expressed in 16.16 fractional pixels, unless
FT_LOAD_LINEAR_DESIGN is set when loading the
glyph. This field can be important to perform correct
WYSIWYG layout. Only relevant for outline glyphs.
linearVertAdvance: The advance height of the unhinted glyph. Its value is
expressed in 16.16 fractional pixels, unless
FT_LOAD_LINEAR_DESIGN is set when loading the
glyph. This field can be important to perform correct
WYSIWYG layout. Only relevant for outline glyphs.
advance: This shorthand is, depending on FT_LOAD_IGNORE_TRANSFORM, the
transformed advance width for the glyph (in 26.6 fractional pixel
format). As specified with FT_LOAD_VERTICAL_LAYOUT, it uses either
the 'horiAdvance' or the 'vertAdvance' value of 'metrics' field.
format: This field indicates the format of the image contained in the glyph
slot. Typically FT_GLYPH_FORMAT_BITMAP, FT_GLYPH_FORMAT_OUTLINE, or
FT_GLYPH_FORMAT_COMPOSITE, but others are possible.
bitmap: This field is used as a bitmap descriptor when the slot format is
FT_GLYPH_FORMAT_BITMAP. Note that the address and content of the
bitmap buffer can change between calls of FT_Load_Glyph and a few
other functions.
bitmap_left: This is the bitmap's left bearing expressed in integer
pixels. Of course, this is only valid if the format is
FT_GLYPH_FORMAT_BITMAP.
bitmap_top: This is the bitmap's top bearing expressed in integer
pixels. Remember that this is the distance from the baseline to
the top-most glyph scanline, upwards y coordinates being
positive.
outline: The outline descriptor for the current glyph image if its format
is FT_GLYPH_FORMAT_OUTLINE. Once a glyph is loaded, 'outline' can
be transformed, distorted, embolded, etc. However, it must not be
freed.
num_subglyphs: The number of subglyphs in a composite glyph. This field is
only valid for the composite glyph format that should
normally only be loaded with the FT_LOAD_NO_RECURSE
flag. For now this is internal to FreeType.
subglyphs: An array of subglyph descriptors for composite glyphs. There are
'num_subglyphs' elements in there. Currently internal to
FreeType.
control_data: Certain font drivers can also return the control data for a
given glyph image (e.g. TrueType bytecode, Type 1
charstrings, etc.). This field is a pointer to such data.
control_len: This is the length in bytes of the control data.
other: Really wicked formats can use this pointer to present their own
glyph image to client applications. Note that the application needs
to know about the image format.
lsb_delta: The difference between hinted and unhinted left side bearing
while autohinting is active. Zero otherwise.
rsb_delta: The difference between hinted and unhinted right side bearing
while autohinting is active. Zero otherwise.
'''
_fields_ = [
('library', FT_Library),
('face', c_void_p),
('next', c_void_p),
('reserved', c_uint),
('generic', FT_Generic),
('metrics', FT_Glyph_Metrics),
('linearHoriAdvance', FT_Fixed),
('linearVertAdvance', FT_Fixed),
('advance', FT_Vector),
('format', FT_Glyph_Format),
('bitmap', FT_Bitmap),
('bitmap_left', FT_Int),
('bitmap_top', FT_Int),
('outline', FT_Outline),
('num_subglyphs', FT_UInt),
('subglyphs', c_void_p),
('control_data', c_void_p),
('control_len', c_long),
('lsb_delta', FT_Pos),
('rsb_delta', FT_Pos),
('other', c_void_p),
('internal', c_void_p),
]
FT_GlyphSlot = POINTER(FT_GlyphSlotRec)
# -----------------------------------------------------------------------------
# The size metrics structure gives the metrics of a size object.
class FT_Size_Metrics(Structure):
'''
The size metrics structure gives the metrics of a size object.
x_ppem: The width of the scaled EM square in pixels, hence the term 'ppem'
(pixels per EM). It is also referred to as 'nominal width'.
y_ppem: The height of the scaled EM square in pixels, hence the term 'ppem'
(pixels per EM). It is also referred to as 'nominal height'.
x_scale: A 16.16 fractional scaling value used to convert horizontal
metrics from font units to 26.6 fractional pixels. Only relevant
for scalable font formats.
y_scale: A 16.16 fractional scaling value used to convert vertical metrics
from font units to 26.6 fractional pixels. Only relevant for
scalable font formats.
ascender: The ascender in 26.6 fractional pixels. See FT_FaceRec for the
details.
descender: The descender in 26.6 fractional pixels. See FT_FaceRec for the
details.
height: The height in 26.6 fractional pixels. See FT_FaceRec for the
details.
max_advance: The maximal advance width in 26.6 fractional pixels. See
FT_FaceRec for the details.
'''
_fields_ = [
('x_ppem', FT_UShort),
('y_ppem', FT_UShort),
('x_scale', FT_Fixed),
('y_scale', FT_Fixed),
('ascender', FT_Pos),
('descender', FT_Pos),
('height', FT_Pos),
('max_advance', FT_Pos),
]
# -----------------------------------------------------------------------------
# FreeType root size class structure. A size object models a face object at a
# given size.
class FT_SizeRec(Structure):
'''
FreeType root size class structure. A size object models a face object at a
given size.
face: Handle to the parent face object.
generic: A typeless pointer, which is unused by the FreeType library or any
of its drivers. It can be used by client applications to link
their own data to each size object.
metrics: Metrics for this size object. This field is read-only.
'''
_fields_ = [
('face', c_void_p),
('generic', FT_Generic),
('metrics', FT_Size_Metrics),
('internal', c_void_p),
]
FT_Size = POINTER(FT_SizeRec)
# -----------------------------------------------------------------------------
# FreeType root face class structure. A face object models a typeface in a font
# file.
class FT_FaceRec(Structure):
'''
FreeType root face class structure. A face object models a typeface in a
font file.
num_faces: The number of faces in the font file. Some font formats can have
multiple faces in a font file.
face_index: The index of the face in the font file. It is set to 0 if there
is only one face in the font file.
face_flags: A set of bit flags that give important information about the
face; see FT_FACE_FLAG_XXX for the details.
style_flags: A set of bit flags indicating the style of the face; see
FT_STYLE_FLAG_XXX for the details.
num_glyphs: The number of glyphs in the face. If the face is scalable and
has sbits (see 'num_fixed_sizes'), it is set to the number of
outline glyphs.
For CID-keyed fonts, this value gives the highest CID used in
the font.
family_name: The face's family name. This is an ASCII string, usually in
English, which describes the typeface's family (like 'Times
New Roman', 'Bodoni', 'Garamond', etc). This is a least common
denominator used to list fonts. Some formats (TrueType &
OpenType) provide localized and Unicode versions of this
string. Applications should use the format specific interface
to access them. Can be NULL (e.g., in fonts embedded in a PDF
file).
style_name: The face's style name. This is an ASCII string, usually in
English, which describes the typeface's style (like 'Italic',
'Bold', 'Condensed', etc). Not all font formats provide a style
name, so this field is optional, and can be set to NULL. As for
'family_name', some formats provide localized and Unicode
versions of this string. Applications should use the format
specific interface to access them.
num_fixed_sizes: The number of bitmap strikes in the face. Even if the face
is scalable, there might still be bitmap strikes, which
are called 'sbits' in that case.
available_sizes: An array of FT_Bitmap_Size for all bitmap strikes in the
face. It is set to NULL if there is no bitmap strike.
num_charmaps: The number of charmaps in the face.
charmaps: An array of the charmaps of the face.
generic: A field reserved for client uses. See the FT_Generic type
description.
bbox: The font bounding box. Coordinates are expressed in font units (see
'units_per_EM'). The box is large enough to contain any glyph from
the font. Thus, 'bbox.yMax' can be seen as the 'maximal ascender',
and 'bbox.yMin' as the 'minimal descender'. Only relevant for
scalable formats.
Note that the bounding box might be off by (at least) one pixel for
hinted fonts. See FT_Size_Metrics for further discussion.
units_per_EM: The number of font units per EM square for this face. This is
typically 2048 for TrueType fonts, and 1000 for Type 1
fonts. Only relevant for scalable formats.
ascender: The typographic ascender of the face, expressed in font
units. For font formats not having this information, it is set to
'bbox.yMax'. Only relevant for scalable formats.
descender: The typographic descender of the face, expressed in font
units. For font formats not having this information, it is set
to 'bbox.yMin'. Note that this field is usually negative. Only
relevant for scalable formats.
height: The height is the vertical distance between two consecutive
baselines, expressed in font units. It is always positive. Only
relevant for scalable formats.
max_advance_width: The maximal advance width, in font units, for all glyphs
in this face. This can be used to make word wrapping
computations faster. Only relevant for scalable formats.
max_advance_height: The maximal advance height, in font units, for all
glyphs in this face. This is only relevant for vertical
layouts, and is set to 'height' for fonts that do not
provide vertical metrics. Only relevant for scalable
formats.
underline_position: The position, in font units, of the underline line for
this face. It is the center of the underlining
stem. Only relevant for scalable formats.
underline_thickness: The thickness, in font units, of the underline for
this face. Only relevant for scalable formats.
glyph: The face's associated glyph slot(s).
size: The current active size for this face.
charmap: The current active charmap for this face.
'''
_fields_ = [
('num_faces', FT_Long),
('face_index', FT_Long),
('face_flags', FT_Long),
('style_flags', FT_Long),
('num_glyphs', FT_Long),
('family_name', FT_String_p),
('style_name', FT_String_p),
('num_fixed_sizes', FT_Int),
('available_sizes', POINTER(FT_Bitmap_Size)),
('num_charmaps', c_int),
('charmaps', POINTER(FT_Charmap)),
('generic', FT_Generic),
# The following member variables (down to `underline_thickness')
# are only relevant to scalable outlines; cf. @FT_Bitmap_Size
# for bitmap fonts.
('bbox', FT_BBox),
('units_per_EM', FT_UShort),
('ascender', FT_Short),
('descender', FT_Short),
('height', FT_Short),
('max_advance_width', FT_Short),
('max_advance_height', FT_Short),
('underline_position', FT_Short),
('underline_thickness', FT_Short),
('glyph', FT_GlyphSlot),
('size', FT_Size),
('charmap', FT_Charmap),
# private
('driver', c_void_p),
('memory', c_void_p),
('stream', c_void_p),
('sizes_list_head', c_void_p),
('sizes_list_tail', c_void_p),
('autohint', FT_Generic),
('extensions', c_void_p),
('internal', c_void_p),
]
FT_Face = POINTER(FT_FaceRec)
# -----------------------------------------------------------------------------
# A simple structure used to pass more or less generic parameters to
# FT_Open_Face.
class FT_Parameter(Structure):
'''
A simple structure used to pass more or less generic parameters to
FT_Open_Face.
tag: A four-byte identification tag.
data: A pointer to the parameter data
'''
_fields_ = [
('tag', FT_ULong),
('data', FT_Pointer) ]
FT_Parameter_p = POINTER(FT_Parameter)
# -----------------------------------------------------------------------------
# A structure used to indicate how to open a new font file or stream. A pointer
# to such a structure can be used as a parameter for the functions FT_Open_Face
# and FT_Attach_Stream.
class FT_Open_Args(Structure):
'''
A structure used to indicate how to open a new font file or stream. A pointer
to such a structure can be used as a parameter for the functions FT_Open_Face
and FT_Attach_Stream.
flags: A set of bit flags indicating how to use the structure.
memory_base: The first byte of the file in memory.
memory_size: The size in bytes of the file in memory.
pathname: A pointer to an 8-bit file pathname.
stream: A handle to a source stream object.
driver: This field is exclusively used by FT_Open_Face; it simply specifies
the font driver to use to open the face. If set to 0, FreeType
tries to load the face with each one of the drivers in its list.
num_params: The number of extra parameters.
params: Extra parameters passed to the font driver when opening a new face.
'''
_fields_ = [
('flags', FT_UInt),
('memory_base', POINTER(FT_Byte)),
('memory_size', FT_Long),
('pathname', FT_String_p),
('stream', c_void_p),
('driver', c_void_p),
('num_params', FT_Int),
('params', FT_Parameter_p) ]
# -----------------------------------------------------------------------------
# A structure used to model an SFNT 'name' table entry.
class FT_SfntName(Structure):
'''
platform_id: The platform ID for 'string'.
encoding_id: The encoding ID for 'string'.
language_id: The language ID for 'string'
name_id: An identifier for 'string'
string: The 'name' string. Note that its format differs depending on the
(platform,encoding) pair. It can be a Pascal String, a UTF-16 one,
etc.
Generally speaking, the string is not zero-terminated. Please refer
to the TrueType specification for details.
string_len: The length of 'string' in bytes.
'''
_fields_ = [
('platform_id', FT_UShort),
('encoding_id', FT_UShort),
('language_id', FT_UShort),
('name_id', FT_UShort),
# this string is *not* null-terminated!
('string', POINTER(FT_Byte)),
('string_len', FT_UInt) ]
# -----------------------------------------------------------------------------
# Opaque handler to a path stroker object.
class FT_StrokerRec(Structure):
'''
Opaque handler to a path stroker object.
'''
_fields_ = [ ]
FT_Stroker = POINTER(FT_StrokerRec)
# -----------------------------------------------------------------------------
# A structure used for bitmap glyph images. This really is a 'sub-class' of
# FT_GlyphRec.
#
class FT_BitmapGlyphRec(Structure):
'''
A structure used for bitmap glyph images. This really is a 'sub-class' of
FT_GlyphRec.
'''
_fields_ = [
('root' , FT_GlyphRec),
('left', FT_Int),
('top', FT_Int),
('bitmap', FT_Bitmap)
]
FT_BitmapGlyph = POINTER(FT_BitmapGlyphRec)
| {
"content_hash": "53365e7bc5afca82e794c6f430e180d0",
"timestamp": "",
"source": "github",
"line_count": 936,
"max_line_length": 81,
"avg_line_length": 37.7724358974359,
"alnum_prop": 0.6002828454249752,
"repo_name": "pbfy0/visvis",
"id": "ec9b96fdc33ada93b8b78cc87634ba38b95471fd",
"size": "35690",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "text/freetype/ft_structs.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "158972"
},
{
"name": "C++",
"bytes": "44817"
},
{
"name": "Python",
"bytes": "1475236"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from cate.util.im.cmaps import get_cmaps
class CmapsTest(TestCase):
def test_get_cmaps_returns_singleton(self):
cmaps = get_cmaps()
self.assertIs(cmaps, get_cmaps())
self.assertIs(cmaps, get_cmaps())
def test_get_cmaps_retruns_equal_size_recs(self):
cmaps = get_cmaps()
rec_len = len(cmaps[0])
self.assertEqual(rec_len, 3)
for cmap in cmaps:
self.assertEqual(len(cmap), rec_len)
def test_get_cmaps_categories(self):
cmaps = get_cmaps()
self.assertGreaterEqual(len(cmaps), 6)
self.assertEqual(cmaps[0][0], 'Perceptually Uniform Sequential')
self.assertEqual(cmaps[1][0], 'Sequential 1')
self.assertEqual(cmaps[2][0], 'Sequential 2')
self.assertEqual(cmaps[3][0], 'Diverging')
self.assertEqual(cmaps[4][0], 'Qualitative')
self.assertEqual(cmaps[5][0], 'Miscellaneous')
def test_get_cmaps_category_descr(self):
cmaps = get_cmaps()
self.assertEqual(cmaps[0][1], 'For many applications, a perceptually uniform colormap is the best choice - '
'one in which equal steps in data are perceived as equal steps in the color '
'space')
def test_get_cmaps_category_tuples(self):
cmaps = get_cmaps()
category_tuple = cmaps[0][2]
self.assertEqual(len(category_tuple), 4)
self.assertEqual(category_tuple[0][0], 'viridis')
self.assertEqual(category_tuple[0][1],
'iVBORw0KGgoAAAANSUhEUgAAAQAAAAACCAYAAAC3zQLZAAAAzklEQVR4nO2TQZLFIAhEX7dXmyPM/Y8SZwEqMcnU3/9QZTU8GszC6Ee/HQlk5FAsJIENqVGv/piZ3uqf3nX6Vtd+l8D8UwNOLhZL3+BLh796OXvMdWaqtrrqnZ/tjvuZT/0XxnN/5f25z9X7tIMTKzV7/5yrME3NHoPlUzvplgOevOcz6ZO5eCqzOmark1nHDQveHuuYaazZkTcdmE110HJu6doR3tgfPHyL51zNc0fd2xjf0vPukUPL36YBTcpcWArFyY0RTca88cYbXxt/gUOJC8yRF1kAAAAASUVORK5CYII=')
self.assertEqual(category_tuple[1][0], 'inferno')
self.assertEqual(category_tuple[2][0], 'plasma')
self.assertEqual(category_tuple[3][0], 'magma')
def main():
cmaps = get_cmaps()
html_head = '<!DOCTYPE html>\n' + \
'<html lang="en">\n' + \
'<head>' + \
'<meta charset="UTF-8">' + \
'<title>matplotlib Color Maps</title>' + \
'</head>\n' + \
'<body style="padding: 0.2em">\n'
html_body = ''
html_foot = '</body>\n' + \
'</html>\n'
for cmap_cat, cmap_desc, cmap_bars in cmaps:
html_body += ' <h2>%s</h2>\n' % cmap_cat
html_body += ' <p><i>%s</i></p>\n' % cmap_desc
html_body += ' <table style=border: 0">\n'
for cmap_bar in cmap_bars:
cmap_name, cmap_data = cmap_bar
cmap_image = '<img src="data:image/png;base64,%s" width="100%%" height="20px"/>' % cmap_data
html_body += ' <tr><td style="width: 5em">%s:</td><td style="width: 40em">%s</td></tr>\n' % (
cmap_name, cmap_image)
html_body += ' </table>\n'
html_page = html_head + html_body + html_foot
html_filename = 'test_cmaps.html'
with open(html_filename, 'w') as fp:
fp.write(html_page)
import webbrowser
webbrowser.open_new_tab(html_filename)
if __name__ == '__main__':
main()
| {
"content_hash": "966fcce834e557ef4e40a6214513111f",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 380,
"avg_line_length": 38.10112359550562,
"alnum_prop": 0.5903863167207314,
"repo_name": "CCI-Tools/ect-core",
"id": "9cc8a462feb9bf696e16bd50923d9011ab81e5e7",
"size": "3391",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/util/im/test_cmaps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "184"
},
{
"name": "Jupyter Notebook",
"bytes": "992448"
},
{
"name": "Python",
"bytes": "649945"
}
],
"symlink_target": ""
} |
"""Module to run Applescript internally with Foundation class."""
import logging
import platform
try:
import Foundation # pylint: disable=import-error,g-import-not-at-top
except ImportError:
# hack to make unit tests work
if not platform.platform().startswith('Linux-'):
raise
class Error(Exception):
"""Base error."""
class AppleScriptError(Error):
"""AppleScript error."""
class AppleScriptTimeoutError(Error):
"""AppleScript dialog timed out before user action."""
class AppleScriptRunner(object):
"""Run AppleScript without shelling out to osascript."""
# TODO(user): add a timeout for all of these dialogs
GENERIC_DIALOG = (
'tell application "Finder"\n'
' activate\n'
' set myResult to (display dialog "%s" %s )\n'
' set myReplyText to text returned of myResult\n'
' set myGaveUpState to False\n'
' set myReply to {myReplyText, myGaveUpState}\n'
'end tell\n'
)
GENERIC_TIMEOUT_DIALOG = (
'tell application "Finder"\n'
' activate\n'
' set myResult to (display dialog "%s" %s )\n'
' set myReplyText to text returned of myResult\n'
' set myGaveUpState to gave up of myResult as string\n'
' set myReply to {myReplyText, myGaveUpState}\n'
'end tell\n'
)
BUTTON_DIALOG = (
'tell application "Finder"\n'
' activate\n'
' set myResult to (display dialog "%s" %s )\n'
' set myReplyText to button returned of myResult as string\n'
' set myGaveUpState to False\n'
' set myReply to {myReplyText, myGaveUpState}\n'
'end tell\n'
)
BUTTON_TIMEOUT_DIALOG = (
'tell application "Finder"\n'
' activate\n'
' set myResult to (display dialog "%s" %s )\n'
' set myReplyText to button returned of myResult as string\n'
' set myGaveUpState to gave up of myResult as string\n'
' set myReply to {myReplyText, myGaveUpState}\n'
'end tell\n'
)
def _EscapeScriptValue(self, v):
"""Returns an script safe version of v if v is a str, or returns v."""
if type(v) in [unicode, str]:
return v.replace('\\', '\\\\').replace('"', '\\"')
else:
return v
def _IsNSAppleEventDescriptor(self, x):
"""Returns true if x is NSAppleEventDescriptor instance."""
try:
if x.__class__.__name__ == 'NSAppleEventDescriptor':
return True
except AttributeError:
pass
return False
def Execute(self, osa_script, *args):
"""Execute script with optional arguments.
Parsing the return value yourself may not be necessary, see
ExecuteAndUnpack.
Be careful to put user-supplied values into args, not osa_script, or
code injection attacks could occur.
Args:
osa_script: str, the script to run
*args: array of arguments to pass in
Returns:
NSAppleEventDescriptor instance
Raises:
AppleScriptError: if an error occured at the AppleScript layer
"""
if args:
safe_args = tuple([self._EscapeScriptValue(x) for x in args])
osa_script %= safe_args
logging.debug('AppleScript: %s', osa_script)
script = Foundation.NSAppleScript.initWithSource_(
Foundation.NSAppleScript.alloc(),
osa_script)
ret, err = script.executeAndReturnError_(None)
logging.debug('AppleScript return: %s, %s', ret, err)
if err:
raise AppleScriptError(err)
if not self._IsNSAppleEventDescriptor(ret):
raise AppleScriptError('expecting NSAppleEventDescriptor return')
return ret
def ExecuteAndUnpack(self, osa_script, unpack_fmt, *args):
"""Execute script with optional arguments and unpack the return values.
Be careful to put user-supplied values into args, not osa_script, or
code injection attacks could occur.
The unpack_fmt string is a str of single characters which defines each
expected return value from AppleScript. Each character can be one of:
's': unicode string
'b': boolean
'i': int
e.g. a string of 'sb' indicates that AppleScript will be returning 2
values, first a unicode string, and then a boolean. You will receive
native Python types containing the values.
Args:
osa_script: str, the script to run
unpack_fmt: str, format string to use when parsing the return values
*args: array of arguments to pass in
Returns:
list of values as parsed by format string
Raises:
AppleScriptError: if an error occured at the AppleScript layer
Error: if unpack_fmt has invalid format characters
"""
ret = self.Execute(osa_script, *args)
noi = ret.numberOfItems()
lf = len(unpack_fmt)
if noi != lf:
raise AppleScriptError(
'numberOfItems %d != unpack_fmt len %d' % (noi, lf))
values = []
idx = 1
for f in unpack_fmt:
d = ret.descriptorAtIndex_(idx)
if f == 's': # unicode string
values.append(unicode(d.stringValue()))
elif f == 'b': # bool
values.append(d.booleanValue())
elif f == 'i': # int32
values.append(d.int32Value())
else:
raise Error('unknown unpack_fmt char %s', f)
idx += 1
return values
def DialogGetString(
self, prompt, timeout=None, hidden=False, default=None, args=()):
"""Prompt the user for a string input via a GUI dialog.
Do not put user-supplied data into the prompt value. Use string formatting
and put the values into args.
Args:
prompt: str, the prompt to supply to the user
timeout: int, optional, number of seconds to wait before giving up
hidden: bool, optional, if true the input field is obfuscated
default: str, optional, default value to place into input field
args: list, optional, arguments to supply to Execute().
Returns:
str
Raises:
AppleScriptTimeoutError: dialog timed out before user action
"""
opts = []
if timeout is not None:
opts.append('giving up after %d' % timeout)
base_script = self.GENERIC_TIMEOUT_DIALOG
else:
base_script = self.GENERIC_DIALOG
if hidden:
opts.append('with hidden answer')
if default is not None:
opts.append('default answer "%s"' % self._EscapeScriptValue(default))
else:
opts.append('default answer ""')
osa_script = base_script % (self._EscapeScriptValue(prompt), ' '.join(opts))
# The GENERIC_*DIALOG scripts return 2 values, the button text
# and a boolean for whether timeout occured or not.
ret = self.ExecuteAndUnpack(osa_script, 'sb', *args)
if ret[1]:
raise AppleScriptTimeoutError(ret[0])
return ret[0]
def DialogDisplay(self, prompt, timeout=None, args=(), buttons=None):
"""Show the user a dialog with OK button.
Do not put user-supplied data into the prompt value. Use string formatting
and put the values into args.
Args:
prompt: str, the prompt to supply to the user
timeout: int, optional, number of seconds to wait before giving up
args: list, optional, arguments to supply to Execute().
buttons: list of strs, optional, default "OK", buttons to display
Returns:
str, the name of the button pressed, in this case "OK"
Raises:
AppleScriptTimeoutError: dialog timed out before user action
"""
if buttons is None:
buttons = ['OK']
opts = [
'buttons {"'
+ '","'.join([self._EscapeScriptValue(b) for b in buttons])
+ '"}',
]
if timeout is not None:
opts.append('giving up after %d' % timeout)
base_script = self.BUTTON_TIMEOUT_DIALOG
else:
base_script = self.BUTTON_DIALOG
osa_script = base_script % (self._EscapeScriptValue(prompt), ' '.join(opts))
ret = self.ExecuteAndUnpack(osa_script, 'sb', *args)
# The BUTTON_*DIALOG scripts return 2 values, the button text
# and a boolean for whether timeout occured or not.
if ret[1]:
raise AppleScriptTimeoutError(ret[0])
return ret[0]
# Provide module-global shortcuts for these commonly called methods.
_SINGLETON = AppleScriptRunner()
def DialogDisplay(*args, **kwargs):
return _SINGLETON.DialogDisplay(*args, **kwargs)
def DialogGetString(*args, **kwargs):
return _SINGLETON.DialogGetString(*args, **kwargs)
| {
"content_hash": "bd05be664c7e61f0ec5ad7821c5762fd",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 80,
"avg_line_length": 30.583025830258304,
"alnum_prop": 0.653957528957529,
"repo_name": "jflinter/macops",
"id": "5c416e144f2f5495e54d6f57f4e826b9986b9c6a",
"size": "8288",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "gmacpyutil/gmacpyutil/applescript.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3500"
},
{
"name": "M",
"bytes": "816"
},
{
"name": "Makefile",
"bytes": "2128"
},
{
"name": "Objective-C",
"bytes": "87539"
},
{
"name": "Python",
"bytes": "400525"
},
{
"name": "Ruby",
"bytes": "2545"
},
{
"name": "Shell",
"bytes": "10252"
}
],
"symlink_target": ""
} |
import pytest
# Local imports
from uplink.clients.io import interfaces, state, transitions
@pytest.fixture
def request_execution_mock(mocker):
return mocker.Mock(spec=interfaces.RequestExecution)
@pytest.fixture
def request_state_mock(mocker):
return mocker.Mock(spec=interfaces.RequestState)
class BasicStateTest(object):
def create_state(self, request):
raise NotImplementedError
@staticmethod
def _create_request_mock():
return object()
def test_prepare(self):
request = self._create_request_mock()
target = self.create_state(request)
output = target.prepare(request)
assert output == state.BeforeRequest(request)
def test_send(self):
request = self._create_request_mock()
target = self.create_state(request)
output = target.send(request)
assert output == state.SendRequest(request)
def test_fail(self):
request = self._create_request_mock()
target = self.create_state(request)
error = Exception()
output = target.fail(Exception, error, None)
assert output == state.Fail(request, Exception, error, None)
def test_finish(self):
request = self._create_request_mock()
response = object()
target = self.create_state(request)
output = target.finish(response)
assert output == state.Finish(request, response)
def test_sleep(self):
request = self._create_request_mock()
target = self.create_state(request)
output = target.sleep(10)
assert output == state.Sleep(request, 10)
def test_request_property(self):
request = self._create_request_mock()
target = self.create_state(request)
assert target.request == request
class TestBeforeRequest(BasicStateTest):
def create_state(self, request):
return state.BeforeRequest(request)
class TestAfterResponse(BasicStateTest):
def create_state(self, request):
return state.AfterResponse(request, object())
class TestAfterException(BasicStateTest):
def create_state(self, request):
return state.AfterException(request, Exception, Exception(), None)
class TestSleep(object):
def test_execute(self, request_execution_mock):
request = object()
sleep = state.Sleep(request, 10)
sleep.execute(request_execution_mock)
assert request_execution_mock.sleep.called
args, _ = request_execution_mock.sleep.call_args
callback = args[1]
assert isinstance(callback, interfaces.SleepCallback)
callback.on_success()
assert request_execution_mock.state == state.BeforeRequest(request)
error = Exception()
callback.on_failure(Exception, error, None)
assert request_execution_mock.state == state.AfterException(
request, Exception, error, None
)
class TestSendRequest(object):
def test_execute(self, request_execution_mock):
request = object()
send_request = state.SendRequest(request)
send_request.execute(request_execution_mock)
assert request_execution_mock.send.called
args, _ = request_execution_mock.send.call_args
callback = args[1]
assert isinstance(callback, interfaces.InvokeCallback)
response = object()
callback.on_success(response)
assert request_execution_mock.state == state.AfterResponse(
request, response
)
error = Exception()
callback.on_failure(Exception, error, None)
assert request_execution_mock.state == state.AfterException(
request, Exception, error, None
)
class TestFail(object):
def test_execute(self, request_execution_mock):
request, error = object(), Exception()
fail = state.Fail(request, type(error), error, None)
fail.execute(request_execution_mock)
request_execution_mock.fail.assert_called_with(Exception, error, None)
class TestFinish(object):
def test_execute(self, request_execution_mock):
request, response = object(), object()
finish = state.Finish(request, response)
finish.execute(request_execution_mock)
request_execution_mock.finish.assert_called_with(response)
def test_sleep_transition(request_state_mock):
transitions.sleep(10)(request_state_mock)
request_state_mock.sleep.assert_called_with(10)
def test_send_transition(request_state_mock):
request = object()
transitions.send(request)(request_state_mock)
request_state_mock.send.assert_called_with(request)
def test_finish_transition(request_state_mock):
response = object()
transitions.finish(response)(request_state_mock)
request_state_mock.finish.assert_called_with(response)
def test_fail_transition(request_state_mock):
error = Exception()
transitions.fail(Exception, error, None)(request_state_mock)
request_state_mock.fail.assert_called_with(Exception, error, None)
def test_prepare_transition(request_state_mock):
request = object()
transitions.prepare(request)(request_state_mock)
request_state_mock.prepare.assert_called_with(request)
| {
"content_hash": "3a99a88bbd5cb0ba6c099d95791cb345",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 78,
"avg_line_length": 31.484848484848484,
"alnum_prop": 0.6802694898941289,
"repo_name": "prkumar/uplink",
"id": "c0e6ae4888422adbb4df91f356375eeade882240",
"size": "5217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/test_io.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "348057"
}
],
"symlink_target": ""
} |
'''
Created on Aug 21, 2014
@author: moloyc
'''
import re
import os
import yaml
import platform
import datetime
import shutil
from netaddr import IPNetwork
import netifaces
import logging.config
from crypt import Cryptic
#__all__ = ['getPortNamesForDeviceFamily', 'expandPortName']
configLocation = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf')
TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT=5
TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL=30 # in seconds
TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY=40 # in seconds
conf = None
def loadConfig(confFile = 'openclos.yaml', appName = None):
'''
Loads global configuration and creates hash 'conf'
'''
global conf
if conf:
return conf
try:
confStream = open(os.path.join(configLocation, confFile), 'r')
conf = yaml.load(confStream)
if conf is not None:
if 'dbUrl' in conf:
if 'dbDialect' in conf:
print "Warning: dbUrl and dbDialect both exist. dbDialect ignored"
# dbUrl is used by sqlite only
conf['dbUrl'] = fixSqlliteDbUrlForRelativePath(conf['dbUrl'])
elif 'dbDialect' in conf:
db_pass = Cryptic ().decrypt ( conf['dbPassword'] )
conf['dbUrl'] = conf['dbDialect'] + '://' + conf['dbUser'] + ':' + db_pass + '@' + conf['dbHost'] + '/' + conf['dbName']
if 'outputDir' in conf:
conf['outputDir'] = fixOutputDirForRelativePath(conf['outputDir'])
except (OSError, IOError) as e:
print "File error:", e
return None
except (yaml.scanner.ScannerError) as e:
print "YAML error:", e
confStream.close()
return None
finally:
pass
loadLoggingConfig(appName = appName)
return conf
def fixOutputDirForRelativePath(outputDir):
# /absolute-path/out
# relative-path/out
if (os.path.abspath(outputDir) != outputDir):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), outputDir)
else:
return outputDir
def fixSqlliteDbUrlForRelativePath(dbUrl):
# sqlite:////absolute-path/sqllite3.db
# sqlite:///relative-path/sqllite3.db
match = re.match(r"sqlite:(\/+)(.*)\/(.*)", dbUrl)
if match is not None:
isRelative = (len(match.group(1)) == 3)
if isRelative:
relativeDir = match.group(2)
absoluteDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), relativeDir)
dbUrl = 'sqlite:///' + absoluteDir + os.path.sep + match.group(3)
return dbUrl
def loadClosDefinition(closDefination = os.path.join(configLocation, 'closTemplate.yaml')):
'''
Loads clos definition from yaml file
'''
try:
stream = open(closDefination, 'r')
yamlStream = yaml.load(stream)
return yamlStream
except (OSError, IOError) as e:
print "File error:", e
except (yaml.scanner.ScannerError) as e:
print "YAML error:", e
stream.close()
finally:
pass
def getSupportedDeviceFamily(conf):
'''
:param dict: conf -- device family configuration in dict format, not the whole conf, conf['deviceFamily']
:returns list: device model/family (exactly as it is appeared on junos)
'''
if conf is None:
raise ValueError("Missing configuration data")
return conf.keys()
def getPortNamesForDeviceFamily(deviceFamily, conf):
'''
returns all port names for a device family grouped by uplink/downlink
ex - xe-0/0/0, xe-0/0/1 ... xe-0/0/47
For some device family (qfx5100-24q-2p) there is no specific uplink/downlink,
for those it is just a list in the dict.
:param str: deviceFamily -- example qfx5100-24q-2p
:param dict: conf -- device family configuration in dict format, example in openclos.yaml
:returns dict: portNames
uplinkPorts:
downlinkPorts:
ports: list of ports that are not tagged, example qfx5100-24q-2p
'''
if conf is None:
raise ValueError("Missing configuration data")
if deviceFamily not in conf:
raise ValueError("Unknown device family: %s" % (deviceFamily))
portMapping = conf[deviceFamily]
portNames = {'uplinkPorts': [], 'downlinkPorts': [], 'ports': []}
if 'uplinkPorts' in portMapping:
portNames['uplinkPorts'] = expandPortName(portMapping['uplinkPorts'])
if 'downlinkPorts' in portMapping:
portNames['downlinkPorts'] = expandPortName(portMapping['downlinkPorts'])
if 'ports' in portMapping:
portNames['ports'] = expandPortName(portMapping['ports'])
return portNames
portNameRegx = re.compile(r"([a-z]+-\d\/\d\/\[)(\d{1,3})-(\d{1,3})(\])")
def expandPortName(portName):
'''
Expands portname regular expression to a list
ex - [xe-0/0/0, xe-0/0/1 ... xe-0/0/47]
Currently it does not expands all junos regex, only few limited
Keyword arguments:
portName -- port name in junos regular expression.
it could be a single string in format: xe-0/0/[0-10]
or it could be a list of strings where each string is in format: ['xe-0/0/[0-10]', 'et-0/0/[0-3]']
'''
if not portName or portName == '':
return []
portList = []
if isinstance(portName, list) == True:
portList = portName
else:
portList.append(portName)
portNames = []
for port in portList:
match = portNameRegx.match(port)
if match is None:
raise ValueError("Port name regular expression is not formatted properly: %s, example: xe-0/0/[0-10]" % (port))
preRegx = match.group(1) # group index starts with 1, NOT 0
postRegx = match.group(4)
startNum = int(match.group(2))
endNum = int(match.group(3))
for id in range(startNum, endNum + 1):
portNames.append(preRegx[:-1] + str(id) + postRegx[1:])
return portNames
def isPlatformUbuntu():
#return 'ubuntu' in platform.platform().lower()
result = os.popen("grep -i ubuntu /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformCentos():
#return 'centos' in platform.platform().lower()
result = os.popen("grep -i centos /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformWindows():
return 'windows' in platform.platform().lower()
def backupDatabase(conf):
if conf is not None and 'dbUrl' in conf:
match = re.match(r"sqlite:\/\/\/(.*)", conf['dbUrl'])
if match is not None:
dbFileName = match.group(1)
if dbFileName != '':
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
backupDbFileName = dbFileName + '.' + timestamp
shutil.copyfile(dbFileName, backupDbFileName)
def getMgmtIps(prefix, startingIP, mask, count):
'''
returns list of management IP for given number of devices
Keyword arguments:
prefix -- ip prefix, example 1.2.3.4/24
count -- number of devices
'''
mgmtIps = []
cidr = None
if startingIP is not None and mask is not None:
cidr = startingIP + '/' + str(mask)
else:
cidr = prefix
if cidr is not None:
ipNetwork = IPNetwork(cidr)
ipNetworkList = list(ipNetwork)
start = ipNetworkList.index(ipNetwork.ip)
end = start + count
ipList = ipNetworkList[start:end]
for ip in ipList:
mgmtIps.append(str(ip) + '/' + str(ipNetwork.prefixlen))
return mgmtIps
def getMgmtIpsForLeaf():
return []
def isZtpStaged(conf):
if conf is not None and conf.get('deploymentMode') is not None:
return conf['deploymentMode'].get('ztpStaged', False)
return False
def getZtpStagedInterval(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedInterval', TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL)
else:
return None
def getZtpStagedAttempt(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedAttempt', TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT)
else:
return None
def getTwoStageConfigurationCallback(conf):
if isZtpStaged(conf) == True:
return conf.get('twoStageConfigurationCallback')
else:
return None
def getVcpLldpDelay(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpVcpLldpDelay', TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY)
else:
return None
def enumerateRoutableIpv4Addresses():
addrs = []
intfs = netifaces.interfaces()
for intf in intfs:
if intf != 'lo':
addrDict = netifaces.ifaddresses(intf)
ipv4AddrInfoList = addrDict.get(netifaces.AF_INET)
if ipv4AddrInfoList is not None:
for ipv4AddrInfo in ipv4AddrInfoList:
addrs.append(ipv4AddrInfo['addr'])
return addrs
def loadLoggingConfig(logConfFile = 'logging.yaml', appName = None):
logConf = getLoggingHandlers(logConfFile, appName)
if logConf is not None:
logging.config.dictConfig(logConf)
def getLoggingHandlers(logConfFile = 'logging.yaml', appName = None):
'''
Loads global configuration and creates hash 'logConf'
'''
try:
logConfStream = open(os.path.join(configLocation, logConfFile), 'r')
logConf = yaml.load(logConfStream)
if logConf is not None:
handlers = logConf.get('handlers')
if handlers is not None:
if appName is None:
removeLoggingHandler('file', logConf)
for handlerName, handlerDict in handlers.items():
filename = handlerDict.get('filename')
if filename is not None:
filename = filename.replace('%(appName)', appName)
handlerDict['filename'] = filename
return logConf
except (OSError, IOError) as e:
print "File error:", e
except (yaml.scanner.ScannerError) as e:
print "YAML error:", e
finally:
logConfStream.close()
def removeLoggingHandler(name, logConf):
for key, logger in logConf['loggers'].iteritems():
logger['handlers'].remove(name)
logConf['handlers'].pop(name)
def getImageNameForDevice(pod, device):
if device.role == 'spine':
return pod.spineJunosImage
elif device.role == 'leaf':
for leafSetting in pod.leafSettings:
if leafSetting.deviceFamily == device.family:
return leafSetting.junosImage
return None
def isSqliteUsed(conf):
return 'sqlite' in conf.get('dbUrl')
fpcPicPortRegx = re.compile(r"[a-z]+-(\d)\/(\d)\/(\d{1,3})\.?(\d{0,2})")
fakeNameRegx = re.compile(r"uplink-(\d{1,3})\.?(\d{0,2})")
otherPortRegx = re.compile(r"[0-9A-Za-z]+\.?(\d{0,2})")
def interfaceNameToUniqueSequenceNumber(interfaceName):
'''
:param str: name, examples:
IFD: et-0/0/1, et-0/0/0, et-0/0/101, lo0, irb, vme
IFL: et-0/0/1.0, et-0/0/0.0, et-0/0/0.99, lo0.0
IFD with fake name: uplink-0, uplink-1
IFL with fake name: uplink-0.0, uplink-1.0, uplink-1.99
'''
if interfaceName is None or interfaceName == '':
return None
match = fpcPicPortRegx.match(interfaceName)
if match is not None:
fpc = match.group(1)
pic = match.group(2)
port = match.group(3)
unit = match.group(4)
if not unit:
unit = 0
sequenceNum = 10000 * int(fpc) + 1000 * int(pic) + int(port)
if unit != 0:
sequenceNum = 10000000 + 100 * sequenceNum + int(unit)
return sequenceNum
match = fakeNameRegx.match(interfaceName)
if match is not None:
port = match.group(1)
unit = match.group(2)
if not unit:
unit = 0
sequenceNum = 20000000 + int(port)
if unit != 0:
sequenceNum = 21000000 + 100 * int(port) + int(unit)
return sequenceNum
match = otherPortRegx.match(interfaceName)
if match is not None:
return int(interfaceName.encode('hex'), 16)
def getOutFolderPath(conf, ipFabric):
if 'outputDir' in conf:
outputDir = os.path.join(conf['outputDir'], ipFabric.id+'-'+ipFabric.name)
else:
outputDir = os.path.join('out', ipFabric.id+'-'+ipFabric.name)
return outputDir
def createOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
if not os.path.exists(path):
os.makedirs(path)
return path
def deleteOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
shutil.rmtree(path, ignore_errors=True)
def getDbUrl():
if conf is None:
raise ValueError('Configuration is not loaded using "util.loadConfig"')
elif conf.get('dbUrl') is None or conf.get('dbUrl') == '':
raise ValueError('DB Url is empty')
return conf['dbUrl']
def stripNetmaskFromIpString(ipString):
pos = ipString.find('/')
if pos != -1:
return ipString[:pos]
else:
return ipString
def stripPlusSignFromIpString(ipString):
pos = ipString.find('+')
if pos != -1:
return ipString[:pos]
else:
return ipString
| {
"content_hash": "0005edc5adb416fcf19c0170c3e1e948",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 137,
"avg_line_length": 32.333333333333336,
"alnum_prop": 0.6092047128129602,
"repo_name": "plucena24/OpenClos",
"id": "26fbd0426bea54a811c77685110af3569dbd442d",
"size": "13580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jnpr/openclos/util.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "400851"
},
{
"name": "Shell",
"bytes": "211"
}
],
"symlink_target": ""
} |
import mock
import six
from rally import osclients
from rally.plugins.openstack.scenarios.fuel import utils
from tests.unit import test
UTILS = "rally.plugins.openstack.scenarios.fuel.utils."
class ModuleTestCase(test.TestCase):
@mock.patch(UTILS + "six")
@mock.patch(UTILS + "FuelClient", return_value="fuel_client")
def test_fuel(self, mock_fuel_client, mock_six):
mock_six.moves.urllib.parse.urlparse().hostname = "foo_host"
clients_ins = mock.Mock(endpoint=mock.Mock(username="foo_user",
password="foo_pass"))
client = utils.fuel(clients_ins)
mock_fuel_client.assert_called_once_with(
version="v1", server_address="foo_host", server_port=8000,
username="foo_user", password="foo_pass")
self.assertEqual("fuel_client", client)
def test_fuel_is_registered(self):
six.moves.reload_module(osclients)
self.assertFalse(hasattr(osclients.Clients, "fuel"))
six.moves.reload_module(utils)
self.assertTrue(hasattr(osclients.Clients, "fuel"))
# NOTE(amaretskiy): Now we can finally mock utils.FuelClient,
# since `reload_module' above destroys mocks
with mock.patch(UTILS + "FuelClient",
mock.Mock(return_value="fuel_client")):
with mock.patch(UTILS + "six"):
clients = osclients.Clients(mock.Mock())
self.assertEqual("fuel_client", clients.fuel())
class FuelEnvTestCase(test.TestCase):
def test___init__(self):
env = utils.FuelEnvManager("some_client")
self.assertEqual("some_client", env.client)
def test_get(self):
client = mock.Mock()
fenv = utils.FuelEnvManager(client)
result = fenv.get("some_id")
client.get_by_id.assert_called_once_with("some_id")
self.assertEqual(result, client.get_by_id("some_id"))
client.get_by_id.side_effect = BaseException
self.assertIsNone(fenv.get("some_id"))
def test_list(self):
client = mock.Mock()
envs = [
{"name": "one"},
{"name": "two"},
{"name": "three"}]
client.get_all.return_value = envs
fenv = utils.FuelEnvManager(client)
self.assertEqual(envs, fenv.list())
def test_list_exception(self):
client = mock.Mock()
client.get_all = mock.Mock(side_effect=SystemExit)
fenv = utils.FuelEnvManager(client)
self.assertRaises(RuntimeError, fenv.list)
def test_create(self):
client = mock.Mock()
client.create.return_value = "env"
fenv = utils.FuelEnvManager(client)
kwargs = {"release_id": 42, "network_provider": "testprov",
"deployment_mode": "some_mode", "net_segment_type": "bar"}
self.assertEqual("env", fenv.create("some_env", **kwargs))
client.create.assert_called_once_with("some_env", 42, "testprov",
"some_mode", "bar")
client.create.side_effect = SystemExit
self.assertRaises(RuntimeError, fenv.create, "some_env", **kwargs)
def test_create_env_not_returned(self):
client = mock.Mock()
client.create.return_value = None
kwargs = {"release_id": 42, "network_provider": "testprov",
"deployment_mode": "some_mode", "net_segment_type": "bar"}
fenv = utils.FuelEnvManager(client)
self.assertRaises(RuntimeError, fenv.create, "some_env", **kwargs)
@mock.patch(UTILS + "scenario.OpenStackScenario")
def test_delete(self, mock_open_stack_scenario):
mock_open_stack_scenario.RESOURCE_NAME_PREFIX = ""
envs = [{"id": "some_one", "name": "one"}]
client = mock.Mock()
client.get_all.return_value = envs
client.delete_by_id.side_effect = SystemExit
fenv = utils.FuelEnvManager(client)
self.assertRaises(RuntimeError, fenv.delete, "some_one", retries=2)
self.assertEqual(3, len(client.delete_by_id.mock_calls))
@mock.patch(UTILS + "scenario.OpenStackScenario")
def test_delete_error(self, mock_open_stack_scenario):
mock_open_stack_scenario.RESOURCE_NAME_PREFIX = ""
envs = [{"id": "some_one", "name": "one"}]
client = mock.Mock()
client.delete_by_id.side_effect = SystemExit
client.get_all.return_value = envs
fenv = utils.FuelEnvManager(client)
self.assertRaises(RuntimeError, fenv.delete, "some_one", retries=1)
self.assertEqual(2, len(client.delete_by_id.mock_calls))
class FuelClientTestCase(test.TestCase):
@mock.patch(UTILS + "FuelEnvManager")
@mock.patch(UTILS + "os")
def test___init__(self, mock_os, mock_fuel_env_manager):
mock_os.environ = {}
mock_fuelclient = mock.Mock(get_client=lambda *args, **kw: [args, kw])
with mock.patch.dict("sys.modules", {"fuelclient": mock_fuelclient}):
client = utils.FuelClient(version="foo_version",
server_address="foo_address",
server_port=1234,
username="foo_user",
password="foo_pass")
expected_environ = {"KEYSTONE_PASS": "foo_pass",
"KEYSTONE_USER": "foo_user",
"LISTEN_PORT": "1234",
"SERVER_ADDRESS": "foo_address"}
self.assertEqual(expected_environ, mock_os.environ)
self.assertEqual(mock_fuel_env_manager.return_value,
client.environment)
self.assertEqual([("node",), {"version": "foo_version"}],
client.node)
self.assertEqual([("task",), {"version": "foo_version"}],
client.task)
mock_fuel_env_manager.assert_called_once_with(
[("environment",),
{"version": "foo_version"}])
class FuelScenarioTestCase(test.ScenarioTestCase):
def test__list_environments(self):
self.admin_clients("fuel").environment.list.return_value = [
{"name": "some_name1"}, {"name": "rally_name2"}]
scenario = utils.FuelScenario(self.context)
self.assertEqual([{"name": "rally_name2"}],
scenario._list_environments())
self.admin_clients("fuel").environment.list.assert_called_once_with()
self._test_atomic_action_timer(scenario.atomic_actions(),
"fuel.list_environments")
def test__create_environment(self):
self.admin_clients("fuel").environment.create.return_value = {"id": 42}
fuel_scenario = utils.FuelScenario()
fuel_scenario.admin_clients = self.admin_clients
fuel_scenario._generate_random_name = mock.Mock(
return_value="random_name")
result = fuel_scenario._create_environment()
self.assertEqual(
self.admin_clients("fuel").environment.create.return_value["id"],
result)
fuel_scenario._generate_random_name.assert_called_once_with(
prefix=fuel_scenario.RESOURCE_NAME_PREFIX)
tmp_mck = self.admin_clients("fuel").environment.create
tmp_mck.assert_called_once_with(
fuel_scenario._generate_random_name.return_value, 1, "neutron",
"ha_compact", "vlan")
def test__delete_environment(self):
fuel_scenario = utils.FuelScenario()
fuel_scenario.admin_clients = self.admin_clients
fuel_scenario._delete_environment(42, 33)
tmp_mock = fuel_scenario.admin_clients("fuel")
tmp_mock.environment.delete.assert_called_once_with(42, 33)
| {
"content_hash": "aa8984204d608b1e922d7ebb709b20ab",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 79,
"avg_line_length": 42.87362637362637,
"alnum_prop": 0.5918236575676022,
"repo_name": "aplanas/rally",
"id": "7a2ad4be724e8fe9df813ba3920f72d31ed17f03",
"size": "8401",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/unit/plugins/openstack/scenarios/fuel/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "48167"
},
{
"name": "Python",
"bytes": "2620059"
},
{
"name": "Shell",
"bytes": "43889"
}
],
"symlink_target": ""
} |
"""
This module tests the test API. These are high-level integration tests.
"""
import os
import pytest
from conda_build import api
from .utils import metadata_dir
@pytest.mark.sanity
def test_recipe_test(testing_workdir, testing_config):
"""Test calling conda build -t <recipe dir>"""
recipe = os.path.join(metadata_dir, 'has_prefix_files')
metadata = api.render(recipe, config=testing_config)[0][0]
api.build(metadata, notest=True, anaconda_upload=False)
api.test(recipe, config=metadata.config)
@pytest.mark.sanity
def test_package_test(testing_workdir, testing_config):
"""Test calling conda build -t <package file> - rather than <recipe dir>"""
recipe = os.path.join(metadata_dir, 'has_prefix_files')
metadata = api.render(recipe, config=testing_config)[0][0]
outputs = api.build(metadata, notest=True, anaconda_upload=False)
api.test(outputs[0], config=metadata.config)
def test_package_test_without_recipe_in_package(testing_workdir, testing_metadata):
"""Can't test packages after building if recipe is not included. Not enough info to go on."""
testing_metadata.config.include_recipe = False
output = api.build(testing_metadata, notest=True, copy_test_source_files=True)[0]
api.test(output, config=testing_metadata.config)
def test_package_with_jinja2_does_not_redownload_source(testing_workdir, testing_config, mocker):
recipe = os.path.join(metadata_dir, 'jinja2_build_str')
metadata = api.render(recipe, config=testing_config, dirty=True)[0][0]
outputs = api.build(metadata, notest=True, anaconda_upload=False)
# this recipe uses jinja2, which should trigger source download, except that source download
# will have already happened in the build stage.
# https://github.com/conda/conda-build/issues/1451
provide = mocker.patch('conda_build.source.provide')
api.test(outputs[0], config=metadata.config)
assert not provide.called
@pytest.mark.sanity
def test_api_extra_dep(testing_metadata):
testing_metadata.meta['test']['imports'] = ['click']
output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0]
# extra_deps will add it in
api.test(output, config=testing_metadata.config, extra_deps=['click'])
# missing click dep will fail tests
with pytest.raises(SystemExit):
api.test(output, config=testing_metadata.config)
| {
"content_hash": "778dc783a3a7ded28924ccae989bb157",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 98,
"avg_line_length": 39.85,
"alnum_prop": 0.7252195734002509,
"repo_name": "pelson/conda-build",
"id": "22f7b789a47c33ea3ae305603724c78f9c17abd7",
"size": "2391",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/test_api_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import atexit
import logging
import os
import cherrypy
import ifcfg
import requests
from django.conf import settings
from kolibri.content.utils import paths
from .system import kill_pid, pid_exists
logger = logging.getLogger(__name__)
# Status codes for kolibri
STATUS_RUNNING = 0
STATUS_STOPPED = 1
STATUS_STARTING_UP = 4
STATUS_NOT_RESPONDING = 5
STATUS_FAILED_TO_START = 6
STATUS_UNCLEAN_SHUTDOWN = 7
STATUS_UNKNOWN_INSTANCE = 8
STATUS_SERVER_CONFIGURATION_ERROR = 9
STATUS_PID_FILE_READ_ERROR = 99
STATUS_PID_FILE_INVALID = 100
STATUS_UNKNOWN = 101
# Used to store PID and port number (both in foreground and daemon mode)
PID_FILE = os.path.join(os.environ['KOLIBRI_HOME'], "server.pid")
# Used to PID, port during certain exclusive startup process, before we fork
# to daemon mode
STARTUP_LOCK = os.path.join(os.environ['KOLIBRI_HOME'], "server.lock")
# This is a special file with daemon activity. It logs ALL stderr output, some
# might not have made it to the log file!
DAEMON_LOG = os.path.join(os.environ['KOLIBRI_HOME'], "server.log")
# Currently non-configurable until we know how to properly handle this
LISTEN_ADDRESS = "0.0.0.0"
class NotRunning(Exception):
"""
Raised when server was expected to run, but didn't. Contains a status
code explaining why.
"""
def __init__(self, status_code):
self.status_code = status_code
super(NotRunning, self).__init__()
def start(port=8080):
"""
Starts the server.
:param: port: Port number (default: 8080)
"""
# Write the new PID
with open(PID_FILE, 'w') as f:
f.write("%d\n%d" % (os.getpid(), port))
# This should be run every time the server is started for now.
# Events to trigger it are hard, because of copying a content folder into
# ~/.kolibri, or deleting a channel DB on disk
from kolibri.content.utils.annotation import update_channel_metadata_cache
update_channel_metadata_cache()
def rm_pid_file():
os.unlink(PID_FILE)
atexit.register(rm_pid_file)
run_server(port=port)
def stop(pid=None, force=False):
"""
Stops the kalite server, either from PID or through a management command
:param args: List of options to parse to the django management command
:raises: NotRunning
"""
if not force:
# Kill the KA lite server
kill_pid(pid)
else:
try:
pid, __ = _read_pid_file(PID_FILE)
kill_pid(pid)
except ValueError:
logger.error("Could not find PID in .pid file\n")
except OSError:
logger.error("Could not read .pid file\n")
# TODO: Check that server has in fact been killed, otherwise we should
# raise an error...
# Finally, remove the PID file
os.unlink(PID_FILE)
def run_server(port):
# Mount the application
from kolibri.deployment.default.wsgi import application
cherrypy.tree.graft(application, "/")
cherrypy.config.update({"environment": "production"})
serve_static_dir(settings.STATIC_ROOT, settings.STATIC_URL)
serve_static_dir(settings.CONTENT_DATABASE_DIR,
paths.get_content_database_url("/"))
serve_static_dir(settings.CONTENT_STORAGE_DIR,
paths.get_content_storage_url("/"))
# Unsubscribe the default server
cherrypy.server.unsubscribe()
# Instantiate a new server object
server = cherrypy._cpserver.Server()
# Configure the server
server.socket_host = LISTEN_ADDRESS
server.socket_port = port
server.thread_pool = 30
# Subscribe this server
server.subscribe()
# Start the server engine (Option 1 *and* 2)
cherrypy.engine.start()
cherrypy.engine.block()
def serve_static_dir(root, url):
static_handler = cherrypy.tools.staticdir.handler(
section="/",
dir=os.path.split(root)[1],
root=os.path.abspath(os.path.split(root)[0]))
cherrypy.tree.mount(static_handler, url)
def _read_pid_file(filename):
"""
Reads a pid file and returns the contents. PID files have 1 or 2 lines;
- first line is always the pid
- optional second line is the port the server is listening on.
:param filename: Path of PID to read
:return: (pid, port): with the PID in the file and the port number
if it exists. If the port number doesn't exist, then
port is None.
"""
pid_file_lines = open(filename, "r").readlines()
if len(pid_file_lines) == 2:
pid, port = pid_file_lines
pid, port = int(pid), int(port)
elif len(pid_file_lines) == 1:
# The file only had one line
pid, port = int(pid_file_lines[0]), None
else:
raise ValueError("PID file must have 1 or two lines")
return pid, port
def _write_pid_file(filename, port):
"""
Writes a PID file in the format Kolibri parses
:param: filename: Path of file to write
:param: port: Listening port number which the server is assigned
"""
with open(filename, 'w') as f:
f.write("%d\n%d" % (os.getpid(), port))
def get_status(): # noqa: max-complexity=16
"""
Tries to get the PID of a running server.
The behavior is also quite redundant given that `kalite start` should
always create a PID file, and if its been started directly with the
runserver command, then its up to the developer to know what's happening.
:returns: (PID, address, port), where address is not currently detected in
a valid way because it's not configurable, and we might be
listening on several IPs.
:raises: NotRunning
"""
# There is no PID file (created by server daemon)
if not os.path.isfile(PID_FILE):
# Is there a startup lock?
if os.path.isfile(STARTUP_LOCK):
try:
pid, port = _read_pid_file(STARTUP_LOCK)
# Does the PID in there still exist?
if pid_exists(pid):
raise NotRunning(STATUS_STARTING_UP)
# It's dead so assuming the startup went badly
else:
raise NotRunning(STATUS_FAILED_TO_START)
# Couldn't parse to int or empty PID file
except (TypeError, ValueError):
raise NotRunning(STATUS_STOPPED)
raise NotRunning(STATUS_STOPPED) # Stopped
# PID file exists, check if it is running
try:
pid, port = _read_pid_file(PID_FILE)
except (ValueError, OSError):
raise NotRunning(STATUS_PID_FILE_INVALID) # Invalid PID file
# PID file exists, but process is dead
if pid is None or not pid_exists(pid):
if os.path.isfile(STARTUP_LOCK):
raise NotRunning(STATUS_FAILED_TO_START) # Failed to start
raise NotRunning(STATUS_UNCLEAN_SHUTDOWN) # Unclean shutdown
listen_port = port
try:
# Timeout is 3 seconds, we don't want the status command to be slow
# TODO: Using 127.0.0.1 is a hardcode default from KA Lite, it could
# be configurable
# TODO: HTTP might not be the protocol if server has SSL
response = requests.get(
"http://{}:{}".format("127.0.0.1", listen_port), timeout=3)
except (requests.exceptions.ReadTimeout,
requests.exceptions.ConnectionError):
raise NotRunning(STATUS_NOT_RESPONDING)
except (requests.exceptions.RequestException):
if os.path.isfile(STARTUP_LOCK):
raise NotRunning(STATUS_STARTING_UP) # Starting up
raise NotRunning(STATUS_UNCLEAN_SHUTDOWN)
if response.status_code == 404:
raise NotRunning(STATUS_UNKNOWN_INSTANCE) # Unknown HTTP server
if response.status_code != 200:
# Probably a mis-configured kolibri
raise NotRunning(STATUS_SERVER_CONFIGURATION_ERROR)
return pid, LISTEN_ADDRESS, listen_port # Correct PID !
# We don't detect this at present:
# Could be detected because we fetch the PID directly via HTTP, but this
# is dumb because kolibri could be running in a worker pool with different
# PID from the PID file..
# raise NotRunning(STATUS_UNKNOWN_INSTANCE)
# This would be the fallback when we know it's not running, but we can't
# give a proper reason...
# raise NotRunning(STATUS_UNKNOW)
def get_urls(listen_port=None):
"""
:param listen_port: if set, will not try to determine the listen port from
other running instances.
"""
try:
if listen_port:
port = listen_port
else:
__, __, port = get_status()
urls = []
interfaces = ifcfg.interfaces()
for interface in filter(lambda i: i['inet'], interfaces.values()):
urls.append("http://{}:{}/".format(interface['inet'], port))
return STATUS_RUNNING, urls
except NotRunning as e:
return e.status_code, []
| {
"content_hash": "6e4875e381ac34b3277a0779fff629de",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 78,
"avg_line_length": 32.007168458781365,
"alnum_prop": 0.6427771556550952,
"repo_name": "MingDai/kolibri",
"id": "436c5745886658692cd6a2df1ff5cce4e8844ec1",
"size": "8930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kolibri/utils/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27623"
},
{
"name": "HTML",
"bytes": "4014"
},
{
"name": "JavaScript",
"bytes": "511003"
},
{
"name": "Makefile",
"bytes": "3914"
},
{
"name": "Python",
"bytes": "716654"
},
{
"name": "Shell",
"bytes": "10357"
},
{
"name": "Vue",
"bytes": "494571"
}
],
"symlink_target": ""
} |
"""Class to hold all alarm control panel accessories."""
import logging
from pyhap.const import CATEGORY_ALARM_SYSTEM
from homeassistant.components.alarm_control_panel import DOMAIN
from homeassistant.const import (
ATTR_CODE,
ATTR_ENTITY_ID,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_DISARM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from . import TYPES
from .accessories import HomeAccessory
from .const import (
CHAR_CURRENT_SECURITY_STATE,
CHAR_TARGET_SECURITY_STATE,
SERV_SECURITY_SYSTEM,
)
_LOGGER = logging.getLogger(__name__)
HASS_TO_HOMEKIT = {
STATE_ALARM_ARMED_HOME: 0,
STATE_ALARM_ARMED_AWAY: 1,
STATE_ALARM_ARMED_NIGHT: 2,
STATE_ALARM_DISARMED: 3,
STATE_ALARM_TRIGGERED: 4,
}
HOMEKIT_TO_HASS = {c: s for s, c in HASS_TO_HOMEKIT.items()}
STATE_TO_SERVICE = {
STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME,
STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT,
STATE_ALARM_DISARMED: SERVICE_ALARM_DISARM,
}
@TYPES.register("SecuritySystem")
class SecuritySystem(HomeAccessory):
"""Generate an SecuritySystem accessory for an alarm control panel."""
def __init__(self, *args):
"""Initialize a SecuritySystem accessory object."""
super().__init__(*args, category=CATEGORY_ALARM_SYSTEM)
self._alarm_code = self.config.get(ATTR_CODE)
self._flag_state = False
serv_alarm = self.add_preload_service(SERV_SECURITY_SYSTEM)
self.char_current_state = serv_alarm.configure_char(
CHAR_CURRENT_SECURITY_STATE, value=3
)
self.char_target_state = serv_alarm.configure_char(
CHAR_TARGET_SECURITY_STATE, value=3, setter_callback=self.set_security_state
)
def set_security_state(self, value):
"""Move security state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set security state to %d", self.entity_id, value)
self._flag_state = True
hass_value = HOMEKIT_TO_HASS[value]
service = STATE_TO_SERVICE[hass_value]
params = {ATTR_ENTITY_ID: self.entity_id}
if self._alarm_code:
params[ATTR_CODE] = self._alarm_code
self.call_service(DOMAIN, service, params)
def update_state(self, new_state):
"""Update security state after state changed."""
hass_state = new_state.state
if hass_state in HASS_TO_HOMEKIT:
current_security_state = HASS_TO_HOMEKIT[hass_state]
self.char_current_state.set_value(current_security_state)
_LOGGER.debug(
"%s: Updated current state to %s (%d)",
self.entity_id,
hass_state,
current_security_state,
)
# SecuritySystemTargetState does not support triggered
if not self._flag_state and hass_state != STATE_ALARM_TRIGGERED:
self.char_target_state.set_value(current_security_state)
self._flag_state = False
| {
"content_hash": "029a9eb53f9874fa1d7852c0f2912373",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 88,
"avg_line_length": 33.463157894736845,
"alnum_prop": 0.6502044668134633,
"repo_name": "postlund/home-assistant",
"id": "345709eb7daac0a17e2a619eca426a0af743a33c",
"size": "3179",
"binary": false,
"copies": "7",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homekit/type_security_systems.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
} |
"""Test max width of table in a terminal without wrapping."""
from textwrap import dedent
import pytest
from terminaltables import AsciiTable, UnixTable
from terminaltables.terminal_io import terminal_size
def test_terminal_width_height():
"""Test terminal width/height functions."""
assert (80, 24) == terminal_size()
@pytest.mark.parametrize('cls', [AsciiTable, UnixTable])
def test_empty(cls):
"""Test on empty tables."""
table = cls([])
with pytest.raises(IndexError):
table.column_max_width(0)
with pytest.raises(IndexError):
table.column_max_width(1)
table = cls([[]])
with pytest.raises(IndexError):
table.column_max_width(0)
with pytest.raises(IndexError):
table.column_max_width(1)
table = cls([['']])
assert 76 == table.column_max_width(0)
with pytest.raises(IndexError):
table.column_max_width(1)
table = cls([[' ']])
assert 76 == table.column_max_width(0)
with pytest.raises(IndexError):
table.column_max_width(1)
@pytest.mark.parametrize('cls', [AsciiTable, UnixTable])
def test_simple(cls):
"""Test on simple tables."""
table_data = [
['Name', 'Color', 'Type'],
['Avocado', 'green', 'nut'],
['Tomato', 'red', 'fruit'],
['Lettuce', 'green', 'vegetable'],
]
table = cls(table_data) # '| Lettuce | green | vegetable |'
assert 56 == table.column_max_width(0)
assert 54 == table.column_max_width(1)
assert 58 == table.column_max_width(2)
table_data.append(['Watermelon', 'green', 'fruit'])
assert 56 == table.column_max_width(0)
assert 51 == table.column_max_width(1)
assert 55 == table.column_max_width(2)
@pytest.mark.parametrize('cls', [AsciiTable, UnixTable])
def test_attributes(cls):
"""Test different table attributes."""
table_data = [
['Name', 'Color', 'Type'],
['Avocado', 'green', 'nut'],
['Tomato', 'red', 'fruit'],
['Lettuce', 'green', 'vegetable'],
]
table = cls(table_data) # '| Lettuce | green | vegetable |'
table.outer_border = False
assert 58 == table.column_max_width(0)
assert 56 == table.column_max_width(1)
assert 60 == table.column_max_width(2)
table.outer_border = True
table.inner_column_border = False
assert 58 == table.column_max_width(0)
assert 56 == table.column_max_width(1)
assert 60 == table.column_max_width(2)
table.outer_border = False
assert 60 == table.column_max_width(0)
assert 58 == table.column_max_width(1)
assert 62 == table.column_max_width(2)
table.outer_border = True
table.inner_column_border = True
table.padding_left = 0
assert 59 == table.column_max_width(0)
assert 57 == table.column_max_width(1)
assert 61 == table.column_max_width(2)
table.padding_right = 5
assert 47 == table.column_max_width(0)
assert 45 == table.column_max_width(1)
assert 49 == table.column_max_width(2)
@pytest.mark.parametrize('cls', [AsciiTable, UnixTable])
def test_multi_line(monkeypatch, cls):
"""Test multi-line tables."""
table_data = [
['Show', 'Characters'],
['Rugrats', dedent('Tommy Pickles, Chuckie Finster, Phillip DeVille, Lillian DeVille, Angelica Pickles,\n'
'Susie Carmichael, Dil Pickles, Kimi Finster, Spike')],
['South Park', 'Stan Marsh, Kyle Broflovski, Eric Cartman, Kenny McCormick']
]
table = cls(table_data)
assert -10 == table.column_max_width(0)
assert 63 == table.column_max_width(1)
monkeypatch.setattr('terminaltables.base_table.terminal_size', lambda: (100, 24))
assert 10 == table.column_max_width(0)
assert 83 == table.column_max_width(1)
| {
"content_hash": "6ace01ca0f48ece5585ec659aefe2a85",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 114,
"avg_line_length": 32.00854700854701,
"alnum_prop": 0.6299065420560748,
"repo_name": "rtulke/terminaltables",
"id": "21363f3fb0cd1000b9671e38ec47c70bb3d6095c",
"size": "3745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_max_width.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73168"
}
],
"symlink_target": ""
} |
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ce_facts
version_added: "2.4"
author: "wangdezhuang (@QijunPan)"
short_description: Gets facts about HUAWEI CloudEngine switches.
description:
- Collects facts from CloudEngine devices running the CloudEngine
operating system. Fact collection is supported over Cli
transport. This module prepends all of the base network fact keys
with C(ansible_net_<fact>). The facts module will always collect a
base set of facts from the device and can enable or disable
collection of additional facts.
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all, hardware, config, and interfaces. Can specify a
list of values to include a larger subset. Values can also be used
with an initial C(M(!)) to specify that a specific subset should
not be collected.
required: false
default: '!config'
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
- name: CloudEngine facts test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Gather_subset is all"
ce_facts:
gather_subset: all
provider: "{{ cli }}"
- name: "Collect only the config facts"
ce_facts:
gather_subset: config
provider: "{{ cli }}"
- name: "Do not collect hardware facts"
ce_facts:
gather_subset: "!hardware"
provider: "{{ cli }}"
"""
RETURN = """
gather_subset:
description: The list of fact subsets collected from the device
returned: always
type: list
# default
BIOS Version:
description: The BIOS version running on the remote device
returned: always
type: str
Board Type:
description: The board type of the remote device
returned: always
type: str
CPLD1 Version:
description: The CPLD1 Version running the remote device
returned: always
type: str
CPLD2 Version:
description: The CPLD2 Version running the remote device
returned: always
type: str
MAB Version:
description: The MAB Version running the remote device
returned: always
type: str
PCB Version:
description: The PCB Version running the remote device
returned: always
type: str
hostname:
description: The hostname of the remote device
returned: always
type: str
# hardware
FAN:
description: The fan state on the device
returned: when hardware is configured
type: str
PWR:
description: The power state on the device
returned: when hardware is configured
type: str
filesystems:
description: The filesystems on the device
returned: when hardware is configured
type: str
flash_free:
description: The flash free space on the device
returned: when hardware is configured
type: str
flash_total:
description: The flash total space on the device
returned: when hardware is configured
type: str
memory_free:
description: The memory free space on the remote device
returned: when hardware is configured
type: str
memory_total:
description: The memory total space on the remote device
returned: when hardware is configured
type: str
# config
config:
description: The current system configuration on the device
returned: when config is configured
type: str
# interfaces
all_ipv4_addresses:
description: All IPv4 addresses configured on the device
returned: when interfaces is configured
type: list
interfaces:
description: A hash of all interfaces running on the system
returned: when interfaces is configured
type: dict
neighbors:
description: The list of LLDP neighbors from the remote device
returned: when interfaces is configured
type: dict
"""
import re
from ansible.module_utils.network.cloudengine.ce import run_commands
from ansible.module_utils.network.cloudengine.ce import ce_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
class FactsBase(object):
COMMANDS = frozenset()
def __init__(self, module):
self.module = module
self.facts = dict()
self.responses = None
def populate(self):
self.responses = run_commands(self.module, list(self.COMMANDS))
class Default(FactsBase):
""" Class default """
COMMANDS = [
'display version',
'display current-configuration | include sysname'
]
def populate(self):
""" Populate method """
super(Default, self).populate()
data = self.responses[0]
if data:
version = data.split("\n")
tmp_version = version[11:]
for item in tmp_version:
tmp_item = item.split()
tmp_key = tmp_item[1] + " " + tmp_item[2]
self.facts[tmp_key] = tmp_item[4]
data = self.responses[1]
if data:
tmp_value = re.findall(r'sysname (.*)', data)
self.facts['hostname'] = tmp_value[0]
class Config(FactsBase):
""" Class config """
COMMANDS = [
'display current-configuration configuration system'
]
def populate(self):
""" Populate method """
super(Config, self).populate()
data = self.responses[0]
if data:
self.facts['config'] = data.split("\n")
class Hardware(FactsBase):
""" Class hardware """
COMMANDS = [
'dir',
'display memory',
'display device'
]
def populate(self):
""" Populate method """
super(Hardware, self).populate()
data = self.responses[0]
if data:
self.facts['filesystems'] = re.findall(r'^Directory of (.*)/', data)[0]
self.facts['flash_total'] = re.findall(r'(.*) total', data)[0].replace(",", "")
self.facts['flash_free'] = re.findall(r'total \((.*) free\)', data)[0].replace(",", "")
data = self.responses[1]
if data:
memory_total = re.findall(r'Total Memory Used: (.*) Kbytes', data)[0]
use_percent = re.findall(r'Memory Using Percentage: (.*)%', data)[0]
memory_free = str(int(memory_total) - int(memory_total) * int(use_percent) / 100)
self.facts['memory_total'] = memory_total + " Kb"
self.facts['memory_free'] = memory_free + " Kb"
data = self.responses[2]
if data:
device_info = data.split("\n")
tmp_device_info = device_info[4:-1]
for item in tmp_device_info:
tmp_item = item.split()
if len(tmp_item) == 8:
self.facts[tmp_item[2]] = tmp_item[6]
elif len(tmp_item) == 7:
self.facts[tmp_item[0]] = tmp_item[5]
class Interfaces(FactsBase):
""" Class interfaces """
COMMANDS = [
'display interface brief',
'display ip interface brief',
'display lldp neighbor brief'
]
def populate(self):
""" Populate method"""
interface_dict = dict()
ipv4_addr_dict = dict()
neighbors_dict = dict()
super(Interfaces, self).populate()
data = self.responses[0]
if data:
interface_info = data.split("\n")
tmp_interface = interface_info[12:]
for item in tmp_interface:
tmp_item = item.split()
interface_dict[tmp_item[0]] = tmp_item[1]
self.facts['interfaces'] = interface_dict
data = self.responses[1]
if data:
ipv4_addr = data.split("\n")
tmp_ipv4 = ipv4_addr[11:]
for item in tmp_ipv4:
tmp_item = item.split()
ipv4_addr_dict[tmp_item[0]] = tmp_item[1]
self.facts['all_ipv4_addresses'] = ipv4_addr_dict
data = self.responses[2]
if data:
neighbors = data.split("\n")
tmp_neighbors = neighbors[2:]
for item in tmp_neighbors:
tmp_item = item.split()
neighbors_dict[tmp_item[0]] = tmp_item[3]
self.facts['neighbors'] = neighbors_dict
FACT_SUBSETS = dict(
default=Default,
hardware=Hardware,
interfaces=Interfaces,
config=Config,
)
VALID_SUBSETS = frozenset(FACT_SUBSETS.keys())
def main():
""" Module main """
spec = dict(
gather_subset=dict(default=['!config'], type='list')
)
spec.update(ce_argument_spec)
module = AnsibleModule(argument_spec=spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
gather_subset = module.params['gather_subset']
runable_subsets = set()
exclude_subsets = set()
for subset in gather_subset:
if subset == 'all':
runable_subsets.update(VALID_SUBSETS)
continue
if subset.startswith('!'):
subset = subset[1:]
if subset == 'all':
exclude_subsets.update(VALID_SUBSETS)
continue
exclude = True
else:
exclude = False
if subset not in VALID_SUBSETS:
module.fail_json(msg='Bad subset')
if exclude:
exclude_subsets.add(subset)
else:
runable_subsets.add(subset)
if not runable_subsets:
runable_subsets.update(VALID_SUBSETS)
runable_subsets.difference_update(exclude_subsets)
runable_subsets.add('default')
facts = dict()
facts['gather_subset'] = list(runable_subsets)
instances = list()
for key in runable_subsets:
instances.append(FACT_SUBSETS[key](module))
for inst in instances:
inst.populate()
facts.update(inst.facts)
ansible_facts = dict()
for key, value in iteritems(facts):
# this is to maintain capability with nxos_facts 2.1
if key.startswith('_'):
ansible_facts[key[1:]] = value
else:
ansible_facts[key] = value
module.exit_json(ansible_facts=ansible_facts, warnings=warnings)
if __name__ == '__main__':
main()
| {
"content_hash": "03536af0cfb8022c894a72ba02a54d53",
"timestamp": "",
"source": "github",
"line_count": 385,
"max_line_length": 99,
"avg_line_length": 27.106493506493507,
"alnum_prop": 0.6156573399770027,
"repo_name": "SergeyCherepanov/ansible",
"id": "ea8f15d96749886c06772491a872283d2dcfaf7d",
"size": "11111",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/network/cloudengine/ce_facts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
} |
from collections import defaultdict
class Query(object):
'''A representation of the stucture of a SQL query. Only the select_clause and
from_clause are required for a valid query.
'''
def __init__(self, select_clause, from_clause):
self.with_clause = None
self.select_clause = select_clause
self.from_clause = from_clause
self.where_clause = None
self.group_by_clause = None
self.having_clause = None
self.union_clause = None
@property
def table_exprs(self):
'''Provides a list of all table_exprs that are declared by this query. This
includes table_exprs in the WITH and FROM sections.
'''
table_exprs = self.from_clause.table_exprs
if self.with_clause:
table_exprs += self.with_clause.table_exprs
return table_exprs
class SelectClause(object):
'''This encapuslates the SELECT part of a query. It is convenient to separate
non-agg items from agg items so that it is simple to know if the query
is an agg query or not.
'''
def __init__(self, non_agg_items=None, agg_items=None):
self.non_agg_items = non_agg_items or list()
self.agg_items = agg_items or list()
self.distinct = False
@property
def select_items(self):
'''Provides a consolidated view of all select items.'''
return self.non_agg_items + self.agg_items
class SelectItem(object):
'''A representation of any possible expr than would be valid in
SELECT <SelectItem>[, <SelectItem>...] FROM ...
Each SelectItem contains a ValExpr which will either be a instance of a
DataType (representing a constant), a Column, or a Func.
Ex: "SELECT int_col + smallint_col FROM alltypes" would have a val_expr of
Plus(Column(<alltypes.int_col>), Column(<alltypes.smallint_col>)).
'''
def __init__(self, val_expr, alias=None):
self.val_expr = val_expr
self.alias = alias
@property
def type(self):
'''Returns the DataType of this item.'''
return self.val_expr.type
@property
def is_agg(self):
'''Evaluates to True if this item contains an aggregate expression.'''
return self.val_expr.is_agg
class ValExpr(object):
'''This is an AbstractClass that represents a generic expr that results in a
scalar. The abc module was not used because it caused problems for the pickle
module.
'''
@property
def type(self):
'''This is declared for documentations purposes, subclasses should override this to
return the DataType that this expr represents.
'''
pass
@property
def base_type(self):
'''Return the most fundemental data type that the expr evaluates to. Only
numeric types will result in a different val than would be returned by self.type.
Ex:
if self.type == BigInt:
assert self.base_type == Int
if self.type == Double:
assert self.base_type == Float
if self.type == String:
assert self.base_type == self.type
'''
if self.returns_int:
return Int
if self.returns_float:
return Float
return self.type
@property
def is_func(self):
return isinstance(self, Func)
@property
def is_agg(self):
'''Evaluates to True if this expression contains an aggregate function.'''
if isinstance(self, AggFunc):
return True
if self.is_func:
for arg in self.args:
if arg.is_agg:
return True
@property
def is_col(self):
return isinstance(self, Column)
@property
def is_constant(self):
return isinstance(self, DataType)
@property
def returns_boolean(self):
return issubclass(self.type, Boolean)
@property
def returns_number(self):
return issubclass(self.type, Number)
@property
def returns_int(self):
return issubclass(self.type, Int)
@property
def returns_float(self):
return issubclass(self.type, Float)
@property
def returns_string(self):
return issubclass(self.type, String)
@property
def returns_timestamp(self):
return issubclass(self.type, Timestamp)
class Column(ValExpr):
'''A representation of a col. All TableExprs will have Columns. So a Column
may belong to an InlineView as well as a standard Table.
This class is used in two ways:
1) As a piece of metadata in a table definiton. In this usage the col isn't
intended to represent an val.
2) As an expr in a query, for example an item being selected or as part of
a join condition. In this usage the col is more like a val, which is why
it implements/extends ValExpr.
'''
def __init__(self, owner, name, type_):
self.owner = owner
self.name = name
self._type = type_
@property
def type(self):
return self._type
def __hash__(self):
return hash(self.name)
def __eq__(self, other):
if not isinstance(other, Column):
return False
if self is other:
return True
return self.name == other.name and self.owner.identifier == other.owner.identifier
def __repr__(self):
return '%s<name: %s, type: %s>' % (
type(self).__name__, self.name, self._type.__name__)
class FromClause(object):
'''A representation of a FROM clause. The member variable join_clauses may optionally
contain JoinClause items.
'''
def __init__(self, table_expr, join_clauses=None):
self.table_expr = table_expr
self.join_clauses = join_clauses or list()
@property
def table_exprs(self):
'''Provides a list of all table_exprs that are declared within this FROM
block.
'''
table_exprs = [join_clause.table_expr for join_clause in self.join_clauses]
table_exprs.append(self.table_expr)
return table_exprs
class TableExpr(object):
'''This is an AbstractClass that represents something that a query may use to select
from or join on. The abc module was not used because it caused problems for the
pickle module.
'''
def identifier(self):
'''Returns either a table name or alias if one has been declared.'''
pass
def cols(self):
pass
@property
def cols_by_base_type(self):
'''Group cols by their basic data type and return a dict of the results.
As an example, a "BigInt" would be considered as an "Int".
'''
return DataType.group_by_base_type(self.cols)
@property
def is_table(self):
return isinstance(self, Table)
@property
def is_inline_view(self):
return isinstance(self, InlineView)
@property
def is_with_clause_inline_view(self):
return isinstance(self, WithClauseInlineView)
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return self.identifier == other.identifier
class Table(TableExpr):
'''Represents a standard database table.'''
def __init__(self, name):
self.name = name
self._cols = []
self.alias = None
@property
def identifier(self):
return self.alias or self.name
@property
def cols(self):
return self._cols
@cols.setter
def cols(self, cols):
self._cols = cols
class InlineView(TableExpr):
'''Represents an inline view.
Ex: In the query "SELECT * FROM (SELECT * FROM foo) AS bar",
"(SELECT * FROM foo) AS bar" would be an inline view.
'''
def __init__(self, query):
self.query = query
self.alias = None
@property
def identifier(self):
return self.alias
@property
def cols(self):
return [Column(self, item.alias, item.type) for item in
self.query.select_clause.non_agg_items + self.query.select_clause.agg_items]
class WithClause(object):
'''Represents a WITH clause.
Ex: In the query "WITH bar AS (SELECT * FROM foo) SELECT * FROM bar",
"WITH bar AS (SELECT * FROM foo)" would be the with clause.
'''
def __init__(self, with_clause_inline_views):
self.with_clause_inline_views = with_clause_inline_views
@property
def table_exprs(self):
return self.with_clause_inline_views
class WithClauseInlineView(InlineView):
'''Represents the entries in a WITH clause. These are very similar to InlineViews but
may have an additional alias.
Ex: WITH bar AS (SELECT * FROM foo)
SELECT *
FROM bar as r
JOIN (SELECT * FROM baz) AS z ON ...
The WithClauseInlineView has aliases "bar" and "r" while the InlineView has
only the alias "z".
'''
def __init__(self, query, with_clause_alias):
self.query = query
self.with_clause_alias = with_clause_alias
self.alias = None
@property
def identifier(self):
return self.alias or self.with_clause_alias
class JoinClause(object):
'''A representation of a JOIN clause.
Ex: SELECT * FROM foo <join_type> JOIN <table_expr> [ON <boolean_expr>]
The member variable boolean_expr will be an instance of a boolean func
defined below.
'''
JOINS_TYPES = ['INNER', 'LEFT', 'RIGHT', 'FULL OUTER', 'CROSS']
def __init__(self, join_type, table_expr, boolean_expr=None):
self.join_type = join_type
self.table_expr = table_expr
self.boolean_expr = boolean_expr
class WhereClause(object):
'''The member variable boolean_expr will be an instance of a boolean func
defined below.
'''
def __init__(self, boolean_expr):
self.boolean_expr = boolean_expr
class GroupByClause(object):
def __init__(self, select_items):
self.group_by_items = select_items
class HavingClause(object):
'''The member variable boolean_expr will be an instance of a boolean func
defined below.
'''
def __init__(self, boolean_expr):
self.boolean_expr = boolean_expr
class UnionClause(object):
'''A representation of a UNION clause.
If the member variable "all" is True, the instance represents a "UNION ALL".
'''
def __init__(self, query):
self.query = query
self.all = False
@property
def queries(self):
queries = list()
query = self.query
while True:
queries.append(query)
if not query.union_clause:
break
query = query.union_clause.query
return queries
class DataTypeMetaclass(type):
'''Provides sorting of classes used to determine upcasting.'''
def __cmp__(cls, other):
return cmp(
getattr(cls, 'CMP_VALUE', cls.__name__),
getattr(other, 'CMP_VALUE', other.__name__))
class DataType(ValExpr):
'''Base class for data types.
Data types are represented as classes so inheritence can be used.
'''
__metaclass__ = DataTypeMetaclass
def __init__(self, val):
self.val = val
@property
def type(self):
return type(self)
@staticmethod
def group_by_base_type(vals):
'''Group cols by their basic data type and return a dict of the results.
As an example, a "BigInt" would be considered as an "Int".
'''
vals_by_type = defaultdict(list)
for val in vals:
type_ = val.type
if issubclass(type_, Int):
type_ = Int
elif issubclass(type_, Float):
type_ = Float
vals_by_type[type_].append(val)
return vals_by_type
class Boolean(DataType):
pass
class Number(DataType):
pass
class Int(Number):
# Used to compare with other numbers for determining upcasting
CMP_VALUE = 2
# Used during data generation to keep vals in range
MIN = -2 ** 31
MAX = -MIN - 1
# Aliases used when reading and writing table definitions
POSTGRESQL = ['INTEGER']
class TinyInt(Int):
CMP_VALUE = 0
MIN = -2 ** 7
MAX = -MIN - 1
POSTGRESQL = ['SMALLINT']
class SmallInt(Int):
CMP_VALUE = 1
MIN = -2 ** 15
MAX = -MIN - 1
class BigInt(Int):
CMP_VALUE = 3
MIN = -2 ** 63
MAX = -MIN - 1
class Float(Number):
CMP_VALUE = 4
POSTGRESQL = ['REAL']
class Double(Float):
CMP_VALUE = 5
MYSQL = ['DOUBLE', 'DECIMAL'] # Use double by default but add decimal synonym
POSTGRESQL = ['DOUBLE PRECISION']
class String(DataType):
MIN = 0
# The Impala limit is 32,767 but MySQL has a row size limit of 65,535. To allow 3+
# String cols per table, the limit will be lowered to 1,000. That should be fine
# for testing anyhow.
MAX = 1000
MYSQL = ['VARCHAR(%s)' % MAX]
POSTGRESQL = MYSQL + ['CHARACTER VARYING']
class Timestamp(DataType):
MYSQL = ['DATETIME']
POSTGRESQL = ['TIMESTAMP WITHOUT TIME ZONE']
NUMBER_TYPES = [Int, TinyInt, SmallInt, BigInt, Float, Double]
TYPES = NUMBER_TYPES + [Boolean, String, Timestamp]
class Func(ValExpr):
'''Base class for funcs'''
def __init__(self, *args):
self.args = list(args)
def __hash__(self):
return hash(type(self)) + hash(tuple(self.args))
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
if self is other:
return True
return self.args == other.args
class UnaryFunc(Func):
def __init__(self, arg):
Func.__init__(self, arg)
class BinaryFunc(Func):
def __init__(self, left, right):
Func.__init__(self, left, right)
@property
def left(self):
return self.args[0]
@left.setter
def left(self, left):
self.args[0] = left
@property
def right(self):
return self.args[1]
@right.setter
def right(self, right):
self.args[1] = right
class BooleanFunc(Func):
@property
def type(self):
return Boolean
class IntFunc(Func):
@property
def type(self):
return Int
class DoubleFunc(Func):
@property
def type(self):
return Double
class StringFunc(Func):
@property
def type(self):
return String
class UpcastingFunc(Func):
@property
def type(self):
return max(arg.type for arg in self.args)
class AggFunc(Func):
# Avoid having a self.distinct because it would need to be __init__'d explictly,
# which none of the AggFunc subclasses do (ex: Avg doesn't have it's
# own __init__).
@property
def distinct(self):
return getattr(self, '_distinct', False)
@distinct.setter
def distinct(self, val):
return setattr(self, '_distinct', val)
# The classes below diverge from above by including the SQL representation. It's a lot
# easier this way because there are a lot of funcs but they all have the same
# structure. Non-standard funcs, such as string concatenation, would need to have
# their representation information elsewhere (like classes above).
Parentheses = type('Parentheses', (UnaryFunc, UpcastingFunc), {'FORMAT': '({0})'})
IsNull = type('IsNull', (UnaryFunc, BooleanFunc), {'FORMAT': '{0} IS NULL'})
IsNotNull = type('IsNotNull', (UnaryFunc, BooleanFunc), {'FORMAT': '{0} IS NOT NULL'})
And = type('And', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} AND {1}'})
Or = type('Or', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} OR {1}'})
Equals = type('Equals', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} = {1}'})
NotEquals = type('NotEquals', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} != {1}'})
GreaterThan = type('GreaterThan', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} > {1}'})
LessThan = type('LessThan', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} < {1}'})
GreaterThanOrEquals = type(
'GreaterThanOrEquals', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} >= {1}'})
LessThanOrEquals = type(
'LessThanOrEquals', (BinaryFunc, BooleanFunc), {'FORMAT': '{0} <= {1}'})
Plus = type('Plus', (BinaryFunc, UpcastingFunc), {'FORMAT': '{0} + {1}'})
Minus = type('Minus', (BinaryFunc, UpcastingFunc), {'FORMAT': '{0} - {1}'})
Multiply = type('Multiply', (BinaryFunc, UpcastingFunc), {'FORMAT': '{0} * {1}'})
Divide = type('Divide', (BinaryFunc, DoubleFunc), {'FORMAT': '{0} / {1}'})
Floor = type('Floor', (UnaryFunc, IntFunc), {'FORMAT': 'FLOOR({0})'})
Concat = type('Concat', (BinaryFunc, StringFunc), {'FORMAT': 'CONCAT({0}, {1})'})
Length = type('Length', (UnaryFunc, IntFunc), {'FORMAT': 'LENGTH({0})'})
ExtractYear = type(
'ExtractYear', (UnaryFunc, IntFunc), {'FORMAT': "EXTRACT('YEAR' FROM {0})"})
# Formatting of agg funcs is a little trickier since they may have a distinct
Avg = type('Avg', (UnaryFunc, DoubleFunc, AggFunc), {})
Count = type('Count', (UnaryFunc, IntFunc, AggFunc), {})
Max = type('Max', (UnaryFunc, UpcastingFunc, AggFunc), {})
Min = type('Min', (UnaryFunc, UpcastingFunc, AggFunc), {})
Sum = type('Sum', (UnaryFunc, UpcastingFunc, AggFunc), {})
UNARY_BOOLEAN_FUNCS = [IsNull, IsNotNull]
BINARY_BOOLEAN_FUNCS = [And, Or]
RELATIONAL_OPERATORS = [
Equals, NotEquals, GreaterThan, LessThan, GreaterThanOrEquals, LessThanOrEquals]
MATH_OPERATORS = [Plus, Minus, Multiply] # Leaving out Divide
BINARY_STRING_FUNCS = [Concat]
AGG_FUNCS = [Avg, Count, Max, Min, Sum]
class If(Func):
FORMAT = 'CASE WHEN {0} THEN {1} ELSE {2} END'
def __init__(self, boolean_expr, consquent_expr, alternative_expr):
Func.__init__(
self, boolean_expr, consquent_expr, alternative_expr)
@property
def boolean_expr(self):
return self.args[0]
@property
def consquent_expr(self):
return self.args[1]
@property
def alternative_expr(self):
return self.args[2]
@property
def type(self):
return max((self.consquent_expr, self.alternative_expr))
class Greatest(BinaryFunc, UpcastingFunc, If):
def __init__(self, left, rigt):
BinaryFunc.__init__(self, left, rigt)
If.__init__(self, GreaterThan(left, rigt), left, rigt)
@property
def type(self):
return UpcastingFunc.type.fget(self)
class Cast(Func):
FORMAT = 'CAST({0} AS {1})'
def __init__(self, val_expr, resulting_type):
if resulting_type not in TYPES:
raise Exception('Unexpected type: {0}'.format(resulting_type))
Func.__init__(self, val_expr, resulting_type)
@property
def val_expr(self):
return self.args[0]
@property
def resulting_type(self):
return self.args[1]
@property
def type(self):
return self.resulting_type
| {
"content_hash": "c0af438c8c481d2685befd5debfd62a4",
"timestamp": "",
"source": "github",
"line_count": 727,
"max_line_length": 88,
"avg_line_length": 24.382393397524073,
"alnum_prop": 0.6555342434841476,
"repo_name": "sql-zuiwanyuan/Impala",
"id": "d1ead7a32f4b506d1a36d6711a4146e749a31f99",
"size": "18326",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/comparison/model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from django.db import models
from django.conf import settings
class Event(models.Model):
who = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
recorded_time = models.DateTimeField(auto_now_add=True)
when = models.DateTimeField(null=True, blank=True)
category = models.CharField(max_length=64, blank=True, null=True)
action = models.CharField(max_length=64, blank=True, null=True)
label = models.CharField(max_length=64, blank=True, null=True)
what = models.CharField(max_length=255, blank=True, null=True)
| {
"content_hash": "66c5617f81eff0456572be471f2ca061",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 76,
"avg_line_length": 42.76923076923077,
"alnum_prop": 0.7392086330935251,
"repo_name": "alexlovelltroy/django-telemetry",
"id": "a660bec95c3c5b375cbaa7ee92acd64774b91d2e",
"size": "556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "telemetry/events/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1541"
},
{
"name": "JavaScript",
"bytes": "127353"
},
{
"name": "Python",
"bytes": "22488"
}
],
"symlink_target": ""
} |
from bs4 import BeautifulSoup
from argparse import ArgumentParser
from os import path
from zipfile import ZipFile, ZIP_DEFLATED
import hashlib
import sys
TEMPLATE_FILE = path.join(path.realpath(__file__), 'templates', 'addon.xml')
AVD_PATH = path.expanduser('~/.android/avd')
if sys.platform == 'macosx':
SDK_PATH = path.expanduser('~/android-sdk-macosx')
elif sys.platform == 'win32':
SDK_PATH = path.abspath('C:\android-sdk-windows')
else: #we're going to assume that anything else is linux
SDK_PATH= path.expanduser('~/android-sdk-linux')
class ConfigFile(dict):
'''
Parses configuration files. Initialize class with a file object
'''
def __init__(self, file_handle):
for line in file_handle:
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
key, value = line.split('=')
self[key] = value
def write(self, file_handle):
'''
write contents of ConfigFile to file_handle
'''
for k in self.keys():
file_handle.write(k + '=' + self[k])
if __name__ == '__main__':
parser = ArgumentParser(description='Create an installable AVD package from an existing AVD')
parser.add_argument('base_avd', metavar='AVD', nargs=1, help='AVD to base package off of')
parser.add_argument('-c', '--config', metavar='FILE', action='store', type=str, required=True, dest='config_file', help='configuration file')
parser.add_argument('-o', '--output', metavar='PATH', action='store', type=str, dest='output_path', default='.', help="generate output in PATH (default is '.'")
parser.add_argument('-s', '--system', metavar='PATH', action='store', type=str, dest='system', default=False, help="path to a custom system.img file")
parser.add_argument('--avd_path', metavar='PATH', action='store', type=str, default=AVD_PATH, dest='avd_path',
help="android avd path (default is '~/.android/avd')")
parser.add_argument('--sdk_path', metavar='PATH', action='store', type=str, default=SDK_PATH, dest='sdk_path',
help="android sdk path (default is '~/android-sdk-linux')")
args = parser.parse_args()
base_avd = args.base_avd[0]
avd_path = args.avd_path
sdk_path = path.expanduser(args.sdk_path)
print ' [+] Building a package from AVD: ' + base_avd
#load config file
cf = ConfigFile(open(args.config_file))
#create zip
filename = path.join(args.output_path, cf['vendor'] + '_' + cf['name'] + '_' + cf['revision'] + '.zip')
zf = ZipFile(filename, 'w', ZIP_DEFLATED)
#add config file as manifest to zip
print ' [+] Writing manifest'
zf.write(args.config_file, path.join(filename, 'manifest.ini'))
#load image paths
base_avd_conf = ConfigFile(open(path.join(avd_path, base_avd + '.avd', 'config.ini')))
user_image_path = path.join(avd_path, base_avd + '.avd')
sdk_image_paths = [path.join(sdk_path, base_avd_conf[k]) for k in sorted(base_avd_conf.keys()) if k.startswith('image.sysdir') if path.exists(path.join(sdk_path, base_avd_conf[k]))]
#add image files to zf
print ' [+] Writing userdata.img'
if path.exists(path.join(user_image_path ,'user-data_qemu.img')):
zf.write(path.join(user_image_path, 'userdata-qemu.img'), path.join(filename, 'images/userdata.img'))
else:
zf.write(path.join(user_image_path, 'userdata-qemu.img'), path.join(filename, 'images/userdata-qemu.img'))
print ' [+] Writing system.img'
if args.system:
zf.write(path.join(sdk_image_paths[0], 'system.img'), path.join(filename, args.system))
else:
zf.write(path.join(sdk_image_paths[0], 'system.img'), path.join(filename, 'images/system.img'))
print ' [+] Writing ramdisk.img'
zf.write(path.join(sdk_image_paths[0], 'ramdisk.img'), path.join(filename, 'images/ramdisk.img'))
print ' [+] Deflating zip'
zf.close()
#compute checksum of zip file
print ' [+] Computing checksum'
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
for chunk in iter(lambda: f.read(8192), b''):
sha1.update(chunk)
#create addons.xml
print ' [+] Writing repository information'
soup = BeautifulSoup(open(TEMPLATE_FILE))
name = soup.find('sdk:name')
name.string = cf['name']
level = soup.find('sdk:api-level')
level.string = cf['api']
vendor = soup.find('sdk:vendor')
vendor.string = cf['vendor']
revision = soup.find('sdk:revision')
revision.string = cf['revision']
desc = soup.find('sdk:description')
desc.string = cf['description']
size = soup.find('sdk:size')
size.string = str(path.getsize(filename))
checksum = soup.find('sdk:checksum')
checksum.string = sha1.hexdigest()
url = soup.find('sdk:url')
url.string = filename
f = open(path.join(args.output_path, 'addon.xml'),'w')
f.write(str(soup))
| {
"content_hash": "388459a931ef070386784c44ae3c0d66",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 185,
"avg_line_length": 41.575,
"alnum_prop": 0.6269793545800761,
"repo_name": "SecurityCompass/AVDClone",
"id": "94c787dd2ba2471fa4c58032ce4edc474f1ea2c1",
"size": "4989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AVDClone.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "4989"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='RegistrationProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40, verbose_name='activation key')),
('user', models.ForeignKey(verbose_name='user', to=settings.AUTH_USER_MODEL, unique=True)),
],
options={
'verbose_name': 'registration profile',
'verbose_name_plural': 'registration profiles',
},
),
]
| {
"content_hash": "17091aae6bf5252422d1ae5d389c91fa",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 114,
"avg_line_length": 33.69230769230769,
"alnum_prop": 0.6004566210045662,
"repo_name": "torchbox/django-registration",
"id": "9253e3c6359d45f17bc416bd6665a4ac4f962b10",
"size": "900",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "registration/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "71682"
}
],
"symlink_target": ""
} |
from nose.tools import * # flake8: noqa
from website.models import Node
from tests.base import ApiTestCase
from api.base.settings.defaults import API_BASE
from tests.factories import UserFactory, ProjectFactory, FolderFactory, DashboardFactory
class TestUsers(ApiTestCase):
def setUp(self):
super(TestUsers, self).setUp()
self.user_one = UserFactory.build()
self.user_one.save()
self.user_two = UserFactory.build()
self.user_two.save()
def tearDown(self):
super(TestUsers, self).tearDown()
def test_returns_200(self):
res = self.app.get('/{}users/'.format(API_BASE))
assert_equal(res.status_code, 200)
def test_find_user_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_two._id, ids)
def test_all_users_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_multiple_in_users(self):
url = "/{}users/?filter[fullname]=fred".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_single_user_in_users(self):
url = "/{}users/?filter[fullname]=my".format(API_BASE)
self.user_one.fullname = 'My Mom'
self.user_one.save()
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_find_no_user_in_users(self):
url = "/{}users/?filter[fullname]=NotMyMom".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_not_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
class TestUserDetail(ApiTestCase):
def setUp(self):
super(TestUserDetail, self).setUp()
self.user_one = UserFactory.build()
self.user_one.set_password('justapoorboy')
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.auth_one = (self.user_one.username, 'justapoorboy')
self.user_two = UserFactory.build()
self.user_two.set_password('justapoorboy')
self.user_two.save()
self.auth_two = (self.user_two.username, 'justapoorboy')
def tearDown(self):
super(TestUserDetail, self).tearDown()
def test_gets_200(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_get_correct_pk_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
user_json = res.json['data']
assert_equal(user_json['fullname'], self.user_one.fullname)
assert_equal(user_json['social_accounts']['twitter'], 'howtopizza')
def test_get_incorrect_pk_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
user_json = res.json['data']
assert_not_equal(user_json['fullname'], self.user_one.fullname)
def test_get_incorrect_pk_user_not_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.auth_one)
user_json = res.json['data']
assert_not_equal(user_json['fullname'], self.user_one.fullname)
assert_equal(user_json['fullname'], self.user_two.fullname)
class TestUserNodes(ApiTestCase):
def setUp(self):
super(TestUserNodes, self).setUp()
self.user_one = UserFactory.build()
self.user_one.set_password('justapoorboy')
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.auth_one = (self.user_one.username, 'justapoorboy')
self.user_two = UserFactory.build()
self.user_two.set_password('justapoorboy')
self.user_two.save()
self.auth_two = (self.user_two.username, 'justapoorboy')
self.public_project_user_one = ProjectFactory(title="Public Project User One",
is_public=True,
creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two",
is_public=True,
creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserNodes, self).tearDown()
def test_authorized_in_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
assert_equal(res.status_code, 200)
def test_anonymous_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_get_projects_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_not_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_logged_in_as_different_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.auth_one)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_two._id, ids)
assert_not_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
class TestUserRoutesNodeRoutes(ApiTestCase):
def setUp(self):
super(TestUserRoutesNodeRoutes, self).setUp()
self.user_one = UserFactory.build()
self.user_one.set_password('justapoorboy')
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.auth_one = (self.user_one.username, 'justapoorboy')
self.user_two = UserFactory.build()
self.user_two.set_password('justapoorboy')
self.user_two.save()
self.auth_two = (self.user_two.username, 'justapoorboy')
self.public_project_user_one = ProjectFactory(title="Public Project User One", is_public=True, creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One", is_public=False, creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two", is_public=True, creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two", is_public=False, creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One", is_public=False, creator=self.user_one, is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One", is_public=False, creator=self.user_one, is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserRoutesNodeRoutes, self).tearDown()
Node.remove()
def test_path_Users_User_id_Nodes_user_not_logged_in(self): #~WORK
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_404_path_users_user_id_user_not_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.private_project_user_two._id)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_user_logged_in(self):
url = "/{}nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_200_path_users_user_id_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
assert_equal(res.status_code, 200)
def test_get_200_path_users_me_user_logged_in(self):
url = "/{}users/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
assert_equal(res.status_code, 200)
def test_path_users_me_nodes_user_logged_in(self):
url = "/{}users/me/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_path_users_user_id_nodes_user_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one)
assert_equal(res.status_code, 200)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_404_path_users_user_id_me_user_logged_in(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_user_not_logged_in(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_two, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_user_logged_in(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_user_not_logged_in(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_two, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_user_logged_in(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_user_not_logged_in(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_two, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_user_not_logged_in(self):
url = "/{}nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.auth_two, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_me_no_user(self):
url = "/users/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_400_path_nodes_me_no_user(self):
url = "/nodes/me/".format(API_BASE)
res = self.app.get(url, auth=self.auth_one, expect_errors=True)
assert_equal(res.status_code, 404)
| {
"content_hash": "7531ede793782af352dc00c3655347bb",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 144,
"avg_line_length": 44.596439169139465,
"alnum_prop": 0.6011045312396034,
"repo_name": "cldershem/osf.io",
"id": "ab1fe3bef7ed9a30b3baac2214ee52316938c012",
"size": "15053",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "tests/api_tests/users/test_views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "113099"
},
{
"name": "HTML",
"bytes": "31536"
},
{
"name": "JavaScript",
"bytes": "1086775"
},
{
"name": "Mako",
"bytes": "499971"
},
{
"name": "Python",
"bytes": "3074126"
},
{
"name": "Shell",
"bytes": "1735"
}
],
"symlink_target": ""
} |
""" common utilities """
import itertools
import numpy as np
from pandas import (
DataFrame,
MultiIndex,
Series,
date_range,
)
import pandas._testing as tm
from pandas.core.api import (
Float64Index,
UInt64Index,
)
def _mklbl(prefix, n):
return [f"{prefix}{i}" for i in range(n)]
def _axify(obj, key, axis):
# create a tuple accessor
axes = [slice(None)] * obj.ndim
axes[axis] = key
return tuple(axes)
class Base:
"""indexing comprehensive base class"""
_kinds = {"series", "frame"}
_typs = {
"ints",
"uints",
"labels",
"mixed",
"ts",
"floats",
"empty",
"ts_rev",
"multi",
}
def setup_method(self, method):
self.series_ints = Series(np.random.rand(4), index=np.arange(0, 8, 2))
self.frame_ints = DataFrame(
np.random.randn(4, 4), index=np.arange(0, 8, 2), columns=np.arange(0, 12, 3)
)
self.series_uints = Series(
np.random.rand(4), index=UInt64Index(np.arange(0, 8, 2))
)
self.frame_uints = DataFrame(
np.random.randn(4, 4),
index=UInt64Index(range(0, 8, 2)),
columns=UInt64Index(range(0, 12, 3)),
)
self.series_floats = Series(
np.random.rand(4), index=Float64Index(range(0, 8, 2))
)
self.frame_floats = DataFrame(
np.random.randn(4, 4),
index=Float64Index(range(0, 8, 2)),
columns=Float64Index(range(0, 12, 3)),
)
m_idces = [
MultiIndex.from_product([[1, 2], [3, 4]]),
MultiIndex.from_product([[5, 6], [7, 8]]),
MultiIndex.from_product([[9, 10], [11, 12]]),
]
self.series_multi = Series(np.random.rand(4), index=m_idces[0])
self.frame_multi = DataFrame(
np.random.randn(4, 4), index=m_idces[0], columns=m_idces[1]
)
self.series_labels = Series(np.random.randn(4), index=list("abcd"))
self.frame_labels = DataFrame(
np.random.randn(4, 4), index=list("abcd"), columns=list("ABCD")
)
self.series_mixed = Series(np.random.randn(4), index=[2, 4, "null", 8])
self.frame_mixed = DataFrame(np.random.randn(4, 4), index=[2, 4, "null", 8])
self.series_ts = Series(
np.random.randn(4), index=date_range("20130101", periods=4)
)
self.frame_ts = DataFrame(
np.random.randn(4, 4), index=date_range("20130101", periods=4)
)
dates_rev = date_range("20130101", periods=4).sort_values(ascending=False)
self.series_ts_rev = Series(np.random.randn(4), index=dates_rev)
self.frame_ts_rev = DataFrame(np.random.randn(4, 4), index=dates_rev)
self.frame_empty = DataFrame()
self.series_empty = Series(dtype=object)
# form agglomerates
for kind in self._kinds:
d = {}
for typ in self._typs:
d[typ] = getattr(self, f"{kind}_{typ}")
setattr(self, kind, d)
def generate_indices(self, f, values=False):
"""
generate the indices
if values is True , use the axis values
is False, use the range
"""
axes = f.axes
if values:
axes = (list(range(len(ax))) for ax in axes)
return itertools.product(*axes)
def get_value(self, name, f, i, values=False):
"""return the value for the location i"""
# check against values
if values:
return f.values[i]
elif name == "iat":
return f.iloc[i]
else:
assert name == "at"
return f.loc[i]
def check_values(self, f, func, values=False):
if f is None:
return
axes = f.axes
indices = itertools.product(*axes)
for i in indices:
result = getattr(f, func)[i]
# check against values
if values:
expected = f.values[i]
else:
expected = f
for a in reversed(i):
expected = expected.__getitem__(a)
tm.assert_almost_equal(result, expected)
def check_result(self, method, key, typs=None, axes=None, fails=None):
def _eq(axis, obj, key):
"""compare equal for these 2 keys"""
axified = _axify(obj, key, axis)
try:
getattr(obj, method).__getitem__(axified)
except (IndexError, TypeError, KeyError) as detail:
# if we are in fails, the ok, otherwise raise it
if fails is not None:
if isinstance(detail, fails):
return
raise
if typs is None:
typs = self._typs
if axes is None:
axes = [0, 1]
else:
assert axes in [0, 1]
axes = [axes]
# check
for kind in self._kinds:
d = getattr(self, kind)
for ax in axes:
for typ in typs:
assert typ in self._typs
obj = d[typ]
if ax < obj.ndim:
_eq(axis=ax, obj=obj, key=key)
| {
"content_hash": "1c786eea921178c1c3a109e19f7e37c8",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 88,
"avg_line_length": 27.88421052631579,
"alnum_prop": 0.5096262740656852,
"repo_name": "dsm054/pandas",
"id": "f8db005583bd8b177f252b898e0a52f7fde893a4",
"size": "5298",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pandas/tests/indexing/common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4912"
},
{
"name": "C",
"bytes": "405762"
},
{
"name": "C++",
"bytes": "17194"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "14362074"
},
{
"name": "Shell",
"bytes": "29904"
},
{
"name": "Smarty",
"bytes": "2069"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import inspect
import os
import sys
import lit.Test
import lit.formats
import lit.TestingConfig
import lit.util
# LitConfig must be a new style class for properties to work
class LitConfig(object):
"""LitConfig - Configuration data for a 'lit' test runner instance, shared
across all tests.
The LitConfig object is also used to communicate with client configuration
files, it is always passed in as the global variable 'lit' so that
configuration files can access common functionality and internal components
easily.
"""
def __init__(self, progname, path, quiet,
useValgrind, valgrindLeakCheck, valgrindArgs,
noExecute, debug, isWindows, singleProcess,
params, config_prefix = None,
maxIndividualTestTime = 0,
maxFailures = None,
parallelism_groups = {},
echo_all_commands = False):
# The name of the test runner.
self.progname = progname
# The items to add to the PATH environment variable.
self.path = [str(p) for p in path]
self.quiet = bool(quiet)
self.useValgrind = bool(useValgrind)
self.valgrindLeakCheck = bool(valgrindLeakCheck)
self.valgrindUserArgs = list(valgrindArgs)
self.noExecute = noExecute
self.debug = debug
self.singleProcess = singleProcess
self.isWindows = bool(isWindows)
self.params = dict(params)
self.bashPath = None
# Configuration files to look for when discovering test suites.
self.config_prefix = config_prefix or 'lit'
self.suffixes = ['cfg.py', 'cfg']
self.config_names = ['%s.%s' % (self.config_prefix,x) for x in self.suffixes]
self.site_config_names = ['%s.site.%s' % (self.config_prefix,x) for x in self.suffixes]
self.local_config_names = ['%s.local.%s' % (self.config_prefix,x) for x in self.suffixes]
self.numErrors = 0
self.numWarnings = 0
self.valgrindArgs = []
if self.useValgrind:
self.valgrindArgs = ['valgrind', '-q', '--run-libc-freeres=no',
'--tool=memcheck', '--trace-children=yes',
'--error-exitcode=123']
if self.valgrindLeakCheck:
self.valgrindArgs.append('--leak-check=full')
else:
# The default is 'summary'.
self.valgrindArgs.append('--leak-check=no')
self.valgrindArgs.extend(self.valgrindUserArgs)
self.maxIndividualTestTime = maxIndividualTestTime
self.maxFailures = maxFailures
self.parallelism_groups = parallelism_groups
self.echo_all_commands = echo_all_commands
@property
def maxIndividualTestTime(self):
"""
Interface for getting maximum time to spend executing
a single test
"""
return self._maxIndividualTestTime
@maxIndividualTestTime.setter
def maxIndividualTestTime(self, value):
"""
Interface for setting maximum time to spend executing
a single test
"""
if not isinstance(value, int):
self.fatal('maxIndividualTestTime must set to a value of type int.')
self._maxIndividualTestTime = value
if self.maxIndividualTestTime > 0:
# The current implementation needs psutil to set
# a timeout per test. Check it's available.
# See lit.util.killProcessAndChildren()
try:
import psutil # noqa: F401
except ImportError:
self.fatal("Setting a timeout per test requires the"
" Python psutil module but it could not be"
" found. Try installing it via pip or via"
" your operating system's package manager.")
elif self.maxIndividualTestTime < 0:
self.fatal('The timeout per test must be >= 0 seconds')
def load_config(self, config, path):
"""load_config(config, path) - Load a config object from an alternate
path."""
if self.debug:
self.note('load_config from %r' % path)
config.load_from_path(path, self)
return config
def getBashPath(self):
"""getBashPath - Get the path to 'bash'"""
if self.bashPath is not None:
return self.bashPath
self.bashPath = lit.util.which('bash', os.pathsep.join(self.path))
if self.bashPath is None:
self.bashPath = lit.util.which('bash')
if self.bashPath is None:
self.bashPath = ''
return self.bashPath
def getToolsPath(self, dir, paths, tools):
if dir is not None and os.path.isabs(dir) and os.path.isdir(dir):
if not lit.util.checkToolsPath(dir, tools):
return None
else:
dir = lit.util.whichTools(tools, paths)
# bash
self.bashPath = lit.util.which('bash', dir)
if self.bashPath is None:
self.bashPath = ''
return dir
def _write_message(self, kind, message):
# Get the file/line where this message was generated.
f = inspect.currentframe()
# Step out of _write_message, and then out of wrapper.
f = f.f_back.f_back
file,line,_,_,_ = inspect.getframeinfo(f)
location = '%s:%d' % (file, line)
sys.stderr.write('%s: %s: %s: %s\n' % (self.progname, location,
kind, message))
def note(self, message):
self._write_message('note', message)
def warning(self, message):
self._write_message('warning', message)
self.numWarnings += 1
def error(self, message):
self._write_message('error', message)
self.numErrors += 1
def fatal(self, message):
self._write_message('fatal', message)
sys.exit(2)
| {
"content_hash": "fbd270a77ca83165ce152b3fab462897",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 97,
"avg_line_length": 37.03680981595092,
"alnum_prop": 0.589862514493954,
"repo_name": "endlessm/chromium-browser",
"id": "e8fb1533a86147f75806a419a2f83fda9bdb6668",
"size": "6037",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "third_party/swiftshader/third_party/llvm-7.0/llvm/utils/lit/lit/LitConfig.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""HMAC (Keyed-Hashing for Message Authentication) Python module.
Implements the HMAC algorithm as described by RFC 2104.
"""
import warnings as _warnings
from _operator import _compare_digest as compare_digest
try:
import _hashlib as _hashopenssl
except ImportError:
_hashopenssl = None
_openssl_md_meths = None
else:
_openssl_md_meths = frozenset(_hashopenssl.openssl_md_meth_names)
import hashlib as _hashlib
trans_5C = bytes((x ^ 0x5C) for x in range(256))
trans_36 = bytes((x ^ 0x36) for x in range(256))
# The size of the digests returned by HMAC depends on the underlying
# hashing module used. Use digest_size from the instance of HMAC instead.
digest_size = None
class HMAC:
"""RFC 2104 HMAC class. Also complies with RFC 4231.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
def __init__(self, key, msg = None, digestmod = None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. *OR*
A hashlib constructor returning a new hash object. *OR*
A hash name suitable for hashlib.new().
Defaults to hashlib.md5.
Implicit default to hashlib.md5 is deprecated since Python
3.4 and will be removed in Python 3.8.
Note: key and msg must be a bytes or bytearray objects.
"""
if not isinstance(key, (bytes, bytearray)):
raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__)
if digestmod is None:
_warnings.warn("HMAC() without an explicit digestmod argument "
"is deprecated since Python 3.4, and will be removed "
"in 3.8",
DeprecationWarning, 2)
digestmod = _hashlib.md5
if callable(digestmod):
self.digest_cons = digestmod
elif isinstance(digestmod, str):
self.digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
else:
self.digest_cons = lambda d=b'': digestmod.new(d)
self.outer = self.digest_cons()
self.inner = self.digest_cons()
self.digest_size = self.inner.digest_size
if hasattr(self.inner, 'block_size'):
blocksize = self.inner.block_size
if blocksize < 16:
_warnings.warn('block_size of %d seems too small; using our '
'default of %d.' % (blocksize, self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
else:
_warnings.warn('No block_size attribute on given digest object; '
'Assuming %d.' % (self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
# self.blocksize is the default blocksize. self.block_size is
# effective block size as well as the public API attribute.
self.block_size = blocksize
if len(key) > blocksize:
key = self.digest_cons(key).digest()
key = key.ljust(blocksize, b'\0')
self.outer.update(key.translate(trans_5C))
self.inner.update(key.translate(trans_36))
if msg is not None:
self.update(msg)
@property
def name(self):
return "hmac-" + self.inner.name
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
# Call __new__ directly to avoid the expensive __init__.
other = self.__class__.__new__(self.__class__)
other.digest_cons = self.digest_cons
other.digest_size = self.digest_size
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def _current(self):
"""Return a hash object for the current state.
To be used only internally with digest() and hexdigest().
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self._current()
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
h = self._current()
return h.hexdigest()
def new(key, msg = None, digestmod = None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
def digest(key, msg, digest):
"""Fast inline implementation of HMAC
key: key for the keyed hash object.
msg: input message
digest: A hash name suitable for hashlib.new() for best performance. *OR*
A hashlib constructor returning a new hash object. *OR*
A module supporting PEP 247.
Note: key and msg must be a bytes or bytearray objects.
"""
if (_hashopenssl is not None and
isinstance(digest, str) and digest in _openssl_md_meths):
return _hashopenssl.hmac_digest(key, msg, digest)
if callable(digest):
digest_cons = digest
elif isinstance(digest, str):
digest_cons = lambda d=b'': _hashlib.new(digest, d)
else:
digest_cons = lambda d=b'': digest.new(d)
inner = digest_cons()
outer = digest_cons()
blocksize = getattr(inner, 'block_size', 64)
if len(key) > blocksize:
key = digest_cons(key).digest()
key = key + b'\x00' * (blocksize - len(key))
inner.update(key.translate(trans_36))
outer.update(key.translate(trans_5C))
inner.update(msg)
outer.update(inner.digest())
return outer.digest()
| {
"content_hash": "78459280db799b0ec049b2925422df6e",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 96,
"avg_line_length": 34.66489361702128,
"alnum_prop": 0.60196409390824,
"repo_name": "zooba/PTVS",
"id": "43b7212976372c4945b1718f16a0c6de00b88a83",
"size": "6517",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/hmac.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12390821"
},
{
"name": "C++",
"bytes": "209386"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "25220"
},
{
"name": "Python",
"bytes": "888412"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
__author__ = 'tonycastronova'
class POSTGRESQL():
def __init__(self):
pass
def map_data_type(self,type):
type = self._fixTypeNames(type.lower())
type = self._mapPostgresDataTypes(type.lower())
return type
def _fixTypeNames(self, type):
fixNames = {
'int4' : 'integer',
'int' : 'integer',
'bool' : 'boolean',
'float8' : 'double precision',
'int8' : 'bigint',
'serial8' : 'bigserial',
'serial4' : 'serial',
'float4' : 'real',
'int2' : 'smallint',
'character varying' : 'varchar',
'datetime' : 'timestamp',
'uniqueid' : 'uuid'
}
if type in fixNames:
return fixNames[type]
return type
def _mapPostgresDataTypes(self, type):
dtype_equiv = {
'bigint':'bigint',
'binary':'bytea',
'bit':'bit',
'blob':'bytea',
'boolean':'boolean',
'char ':'character',
'date':'date',
'datetime':'timestamp',
'decimal ':'real',
'double':'double precision',
'float':'double precision',
'integer':'integer',
'mediumint':'integer',
'money':'money',
'number':'real',
'numeric':'numeric',
'real':'real',
'smallint':'smallint',
'text':'text',
'time':'time',
'timestamp':'timestamp',
'tinyint':'smallint',
'uniqueid':'uuid',
'uuid':'uuid',
'varbinary':'bit',
'varchar':'varchar',
'varchar2':'varchar',
}
if type in dtype_equiv:
return dtype_equiv[type]
return type
def mapAutoIncrement(self, type):
dtype_equiv = {
"smallint" :"serial",
"integer" :"serial",
"bigint" :"bigserial",
"decimal" :"serial",
"numeric" :"serial",
"real" :"serial",
"double precision" :"bigserial",
}
if type in dtype_equiv:
return dtype_equiv[type]
return type
class MYSQL():
def __init__(self):
pass
def map_data_type(self,type):
type = self._mapMySQLDataTypes(type.lower())
return type
def _mapMySQLDataTypes(self, type):
dtype_equiv = {
'bigint':'BIGINT',
'binary':'BINARY',
'bit':'BIT',
'blob':'BLOB',
'boolean':'TINYINT(1)',
'char ':'CHAR',
'date':'DATE',
'datetime':'DATETIME',
'decimal ':'DECIMAL',
'double':'DOUBLE',
'float':'FLOAT',
'integer':'INT',
'mediumint':'MEDIUMINT',
'money':'NUMERIC',
'number':'NUMERIC',
'numeric':'NUMERIC',
'real':'DECIMAL',
'smallint':'SMALLINT',
'text':'TEXT',
'time':'TIME',
'timestamp':'TIMESTAMP',
'tinyint':'TINYINT',
'uniqueid':'VARCHAR(36)',
'uuid':'BINARY',
'varbinary':'VARBINARY',
'varchar':'VARCHAR',
'varchar2':'VARCHAR',
}
if type in dtype_equiv:
return dtype_equiv[type]
return type
class SQLITE():
def __init__(self):
pass
def map_data_type(self,type):
type = self._mapSQLiteDataTypes(type.lower())
return type
def _mapSQLiteDataTypes(self, type):
dtype_equiv = {
'bigint':'INTEGER',
'binary':'BINARY',
'bit':'BIT',
'blob':'BLOB',
'boolean':'BIT',
'char ':'CHAR',
'date':'DATE',
'datetime':'DATETIME',
'decimal ':'DECIMAL',
'double':'DOUBLE',
'float':'FLOAT',
'integer':'INTEGER',
'mediumint':'MEDIUMINT',
'money':'NUMERIC',
'number':'NUMERIC',
'numeric':'NUMERIC',
'real':'DECIMAL',
'smallint':'SMALLINT',
'text':'TEXT',
'time':'TIME',
'timestamp':'TIMESTAMP',
'tinyint':'TINYINT',
'uniqueid':'VARCHAR(36)',
'uuid':'BINARY',
'varbinary':'VARBINARY',
'varchar':'VARCHAR',
'varchar2':'VARCHAR',
}
if type in dtype_equiv:
return dtype_equiv[type]
return type
class MSSQL():
def __init__(self):
pass
def map_data_type(self,type):
type = self._mapMsSQLDataTypes(type.lower())
return type
def _mapMsSQLDataTypes(self, type):
dtype_equiv = {
'bigint':'bigint',
'binary':'binary',
'bit':'bit',
'blob':'binary',
'boolean':'bit',
'char ':'char',
'date':'date',
'datetime':'datetime',
'decimal ':'decimal',
'double':'float',
'float':'float',
'integer':'int',
'mediumint':'int',
'money':'money',
'number':'numeric',
'numeric':'numeric',
'real':'real',
'smallint':'smallint',
'text':'text',
'time':'time',
'timestamp':'timestamp',
'tinyint':'tinyint',
'uniqueid':'uniqueidentifier',
'uuid':'uniqueidentifier',
'varbinary':'varbinary',
'varchar':'varchar',
'varchar2':'varchar',
}
if type in dtype_equiv:
# print '%s -> %s'% (type,dtype_equiv[type])
return dtype_equiv[type]
return type
| {
"content_hash": "cd81b51f8f4e9666f27a470e7337d491",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 56,
"avg_line_length": 26.053097345132745,
"alnum_prop": 0.43766983695652173,
"repo_name": "miguelcleon/ODM2",
"id": "5c0bc8d146b208d70e22ba0ef4a98627ebbc7d95",
"size": "5888",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/build_schemas/data_mapping.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "84315"
},
{
"name": "SQLPL",
"bytes": "73553"
}
],
"symlink_target": ""
} |
from google.cloud import pubsub # pylint:disable=import-error
def test_pubsub_is_blank(pubsub_client: pubsub.Client) -> None:
topic = pubsub_client.topic('topic')
assert not topic.exists()
topic.create()
sub = topic.subscription('my_sub')
assert not sub.exists()
sub.create()
assert sub.pull(return_immediately=True) == []
topic.publish(b'test message', foo='bar')
pulled = sub.pull()
for ack_id, message in pulled:
assert message.data == b'test message'
assert message.attributes['foo'] == 'bar'
sub.acknowledge([ack_id])
def test_pubsub_is_still_blank(pubsub_client: pubsub.Client) -> None:
topic = pubsub_client.topic('topic')
assert not topic.exists()
topic.create()
sub = topic.subscription('my_sub')
assert not sub.exists()
sub.create()
assert sub.pull(return_immediately=True) == []
topic.publish(b'test message', foo='bar')
pulled = sub.pull()
for ack_id, message in pulled:
assert message.data == b'test message'
assert message.attributes['foo'] == 'bar'
sub.acknowledge([ack_id])
| {
"content_hash": "fcfb309cddfdf2459633c0f7cba36bdd",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 69,
"avg_line_length": 28.94871794871795,
"alnum_prop": 0.6430469441984057,
"repo_name": "JasonMWhite/gitlawca-runner",
"id": "c9efd45bfbfcda5506d562129b6c51e410d6a5d3",
"size": "1129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/scraper/test_pubsub.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3929780"
},
{
"name": "Makefile",
"bytes": "439"
},
{
"name": "Python",
"bytes": "45254"
},
{
"name": "Shell",
"bytes": "1057"
}
],
"symlink_target": ""
} |
"""
This module provides Highcharts class, which is a thin wrapper around
Highcharts JS library.
"""
from json import dumps as json
from collections import defaultdict
from collections.abc import MutableMapping, Mapping, Set, Sequence, Iterable
from os.path import join, dirname
from urllib.parse import urljoin
from urllib.request import pathname2url
import numpy as np
from PyQt4.QtCore import QUrl, QObject, pyqtProperty, pyqtSlot, QEventLoop
from PyQt4.QtGui import qApp, QColor
from Orange.widgets.webview import WebView
def _Autotree():
return defaultdict(_Autotree)
def _to_primitive_types(d):
# pylint: disable=too-many-return-statements
if isinstance(d, np.integer):
return int(d)
if isinstance(d, (float, np.floating)):
return float(d) if not np.isnan(d) else None
if isinstance(d, (str, int, bool)):
return d
if isinstance(d, np.ndarray):
# Highcharts chokes on NaN values. Instead it prefers 'null' for
# points it is not intended to show.
new = d.astype(object)
new[np.isnan(d)] = None
return new.tolist()
if isinstance(d, Mapping):
return {k: _to_primitive_types(d[k]) for k in d}
if isinstance(d, Set):
return {k: 1 for k in d}
if isinstance(d, (Sequence, Iterable)):
return [_to_primitive_types(i) for i in d]
if d is None:
return None
if isinstance(d, QColor):
return d.name()
raise TypeError
def _merge_dicts(master, update):
"""Merge dicts recursively in place (``master`` is modified)"""
for k, v in master.items():
if k in update:
if isinstance(v, MutableMapping) and isinstance(update[k], MutableMapping):
update[k] = _merge_dicts(v, update[k])
master.update(update)
return master
def _kwargs_options(kwargs):
"""Transforma a dict into a hierarchical dict.
Example
-------
>>> (_kwargs_options(dict(a_b_c=1, a_d_e=2, x=3)) ==
... dict(a=dict(b=dict(c=1), d=dict(e=2)), x=3))
True
"""
kwoptions = _Autotree()
for kws, val in kwargs.items():
cur = kwoptions
kws = kws.split('_')
for kw in kws[:-1]:
cur = cur[kw]
cur[kws[-1]] = val
return kwoptions
class Highchart(WebView):
"""Create a Highcharts webview widget.
Parameters
----------
parent: QObject
Qt parent object, if any.
bridge: QObject
Exposed as ``window.pybridge`` in JavaScript.
options: dict
Default options for this chart. See Highcharts docs. Some
options are already set in the default theme.
highchart: str
One of `Chart`, `StockChart`, or `Map` Highcharts JS types.
enable_zoom: bool
Enables scroll wheel zooming and right-click zoom reset.
enable_select: str
If '+', allow series' points to be selected by clicking
on the markers, bars or pie slices. Can also be one of
'x', 'y', or 'xy' (all of which can also end with '+' for the
above), in which case it indicates the axes on which
to enable rectangle selection. The list of selected points
for each input series (i.e. a list of arrays) is
passed to the ``selection_callback``.
Each selected point is represented as its index in the series.
If the selection is empty, the callback parameter is a single
empty list.
javascript: str
Additional JavaScript code to evaluate beforehand. If you
need something exposed in the global namespace,
assign it as an attribute to the ``window`` object.
debug: bool
Enables right-click context menu and inspector tools.
**kwargs:
The additional options. The underscores in argument names imply
hierarchy, e.g., keyword argument such as ``chart_type='area'``
results in the following object, in JavaScript::
{
chart: {
type: 'area'
}
}
The original `options` argument is updated with options from
these kwargs-derived objects.
"""
_HIGHCHARTS_HTML = urljoin(
'file:', pathname2url(join(join(dirname(__file__), '_highcharts'), 'chart.html')))
def __init__(self,
parent=None,
bridge=None,
options=None,
*,
highchart='Chart',
enable_zoom=False,
enable_select=False,
selection_callback=None,
javascript='',
debug=False,
**kwargs):
options = (options or {}).copy()
enable_select = enable_select or ''
if not isinstance(options, dict):
raise ValueError('options must be dict')
if enable_select not in ('', '+', 'x', 'y', 'xy', 'x+', 'y+', 'xy+'):
raise ValueError("enable_select must be '+', 'x', 'y', or 'xy'")
if enable_select and not selection_callback:
raise ValueError('enable_select requires selection_callback')
super().__init__(parent, bridge,
debug=debug,
url=QUrl(self._HIGHCHARTS_HTML))
self.debug = debug
self.highchart = highchart
self.enable_zoom = enable_zoom
enable_point_select = '+' in enable_select
enable_rect_select = enable_select.replace('+', '')
if enable_zoom:
_merge_dicts(options, _kwargs_options(dict(
mapNavigation_enableMouseWheelZoom=True,
mapNavigation_enableButtons=False)))
if enable_select:
self._selection_callback = selection_callback
self.frame.addToJavaScriptWindowObject('__highchart', self)
_merge_dicts(options, _kwargs_options(dict(
chart_events_click='/**/unselectAllPoints/**/')))
if enable_point_select:
_merge_dicts(options, _kwargs_options(dict(
plotOptions_series_allowPointSelect=True,
plotOptions_series_point_events_click='/**/clickedPointSelect/**/')))
if enable_rect_select:
_merge_dicts(options, _kwargs_options(dict(
chart_zoomType=enable_rect_select,
chart_events_selection='/**/rectSelectPoints/**/')))
if kwargs:
_merge_dicts(options, _kwargs_options(kwargs))
super_evalJS = super().evalJS
def evalOptions():
super_evalJS(javascript)
self.evalJS('''
var options = {options};
fixupOptionsObject(options);
Highcharts.setOptions(options);
'''.format(options=json(options)))
self.frame.loadFinished.connect(evalOptions)
# Give above scripts time to load
qApp.processEvents(QEventLoop.ExcludeUserInputEvents)
qApp.processEvents(QEventLoop.ExcludeUserInputEvents)
def contextMenuEvent(self, event):
""" Zoom out on right click. Also disable context menu."""
if self.enable_zoom:
self.evalJS('chart.zoomOut();')
if self.debug:
super().contextMenuEvent(event)
@staticmethod
def _JSObject_factory(obj):
pyqt_type = type(obj).__mro__[-2]
if isinstance(obj, (list, np.ndarray)):
pyqt_type = 'QVariantList'
elif isinstance(obj, Mapping):
pyqt_type = 'QVariantMap'
else:
raise TypeError("Can't expose object of type {}. Too easy. Use "
"evalJS method instead.".format(type(obj)))
class _JSObject(QObject):
""" This class hopefully prevent options data from being marshalled
into a string-like dumb (JSON) object when passed into JavaScript. """
def __init__(self, parent, obj):
super().__init__(parent)
self._obj = obj
@pyqtProperty(pyqt_type)
def _options(self):
return self._obj
return _JSObject
def exposeObject(self, name, obj):
"""Expose the object `obj` as ``window.<name>`` in JavaScript.
If the object contains any string values that start and end with
literal ``/**/``, those are evaluated as JS expressions the result
value replaces the string in the object.
The exposure, as defined here, represents a snapshot of object at
the time of execution. Any future changes on the original Python
object are not (necessarily) visible in its JavaScript counterpart.
Parameters
----------
name: str
The global name the object is exposed as.
obj: object
The object to expose. Must contain only primitive types, such as:
int, float, str, bool, list, dict, set, numpy.ndarray.
"""
try:
obj = _to_primitive_types(obj)
except TypeError:
raise TypeError(
'object must consist of primitive types '
'(allowed: int, float, str, bool, list, '
'dict, set, numpy.ndarray, ...)') from None
pydata = self._JSObject_factory(obj)(self, obj)
self.frame.addToJavaScriptWindowObject('_' + name, pydata)
self.evalJS('''
window.{0} = window._{0}._options;
fixupOptionsObject({0});
'''.format(name))
def chart(self, options=None, *,
highchart=None, javascript='', javascript_after='', **kwargs):
""" Populate the webview with a new Highcharts JS chart.
Parameters
----------
options, highchart, javascript, **kwargs:
The parameters are the same as for the object constructor.
javascript_after: str
Same as `javascript`, except that the code is evaluated
after the chart, available as ``window.chart``, is created.
Notes
-----
Passing ``{ series: [{ data: some_data }] }``, if ``some_data`` is
a numpy array, it is **more efficient** to leave it as numpy array
instead of converting it ``some_data.tolist()``, which is done
implicitly.
"""
# Give default options some time to apply
qApp.processEvents(QEventLoop.ExcludeUserInputEvents)
options = (options or {}).copy()
if not isinstance(options, MutableMapping):
raise ValueError('options must be dict')
if kwargs:
_merge_dicts(options, _kwargs_options(kwargs))
self.exposeObject('pydata', options)
highchart = highchart or self.highchart or 'Chart'
self.evalJS('''
{javascript};
window.chart = new Highcharts.{highchart}(pydata);
{javascript_after};
'''.format(javascript=javascript,
javascript_after=javascript_after,
highchart=highchart,))
def evalJS(self, javascript):
""" Asynchronously evaluate JavaScript code. """
# Why do we need this async? I don't know. But performance of
# loading/evaluating any JS code is greatly improved this way.
_ASYNC = 'setTimeout(function() { %s; }, 10);'
super().evalJS(_ASYNC % javascript)
def clear(self):
"""Remove all series from the chart"""
self.evalJS('''
if (window.chart) {
while(chart.series.length > 0) {
chart.series[0].remove(false);
}
chart.redraw();
}
''')
@pyqtSlot('QVariantList')
def _on_selected_points(self, points):
self._selection_callback([np.sort(selected).astype(int)
for selected in points])
def main():
""" A simple test. """
from PyQt4.QtGui import QApplication
app = QApplication([])
def _on_selected_points(points):
print(len(points), points)
w = Highchart(enable_zoom=True, enable_select='xy+',
selection_callback=_on_selected_points,
debug=True)
w.chart(dict(series=[dict(data=np.random.random((100, 2)))]),
credits_text='BTYB Yours Truly',
title_text='Foo plot',
chart_type='scatter')
w.show()
app.exec()
if __name__ == '__main__':
main()
| {
"content_hash": "c85c8e02e21ceb5480ee58a28583dd8b",
"timestamp": "",
"source": "github",
"line_count": 346,
"max_line_length": 90,
"avg_line_length": 35.77167630057804,
"alnum_prop": 0.5818857558374404,
"repo_name": "qPCR4vir/orange3",
"id": "0f0942a2d829ce63b1f0ac7e7610fafecee19bd9",
"size": "12377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Orange/widgets/highcharts.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "20412"
},
{
"name": "C++",
"bytes": "1992"
},
{
"name": "GLSL",
"bytes": "75"
},
{
"name": "HTML",
"bytes": "3503"
},
{
"name": "JavaScript",
"bytes": "12007"
},
{
"name": "Jupyter Notebook",
"bytes": "6662"
},
{
"name": "NSIS",
"bytes": "20281"
},
{
"name": "Python",
"bytes": "4205054"
},
{
"name": "Shell",
"bytes": "48335"
}
],
"symlink_target": ""
} |
# -*- coding: utf-8 -*-
import colander
import mock
import uuid
import unittest
from pyramid.response import Response
from kinto.core.views.batch import BatchPayloadSchema, batch as batch_service
from kinto.core.testing import DummyRequest
from kinto.core.utils import json
from .support import BaseWebTest
class BatchViewTest(BaseWebTest, unittest.TestCase):
def test_does_not_require_authentication(self):
body = {'requests': []}
self.app.post_json('/batch', body)
def test_returns_400_if_body_has_missing_requests(self):
self.app.post('/batch', {}, headers=self.headers, status=400)
def test_returns_responses_if_schema_is_valid(self):
body = {'requests': []}
resp = self.app.post_json('/batch', body, headers=self.headers)
self.assertIn('responses', resp.json)
def test_defaults_are_applied_to_requests(self):
request = {'path': '/v0/'}
defaults = {'method': 'POST'}
result = self.app.post_json('/batch',
{'requests': [request],
'defaults': defaults})
self.assertEqual(result.json['responses'][0]['status'], 405)
def test_only_post_is_allowed(self):
self.app.get('/batch', headers=self.headers, status=405)
self.app.put('/batch', headers=self.headers, status=405)
self.app.patch('/batch', headers=self.headers, status=405)
self.app.delete('/batch', headers=self.headers, status=405)
def test_batch_adds_missing_api_with_prefix(self):
request = {'path': '/v0/'}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
hello = resp.json['responses'][0]
self.assertEqual(hello['path'], '/v0/')
self.assertEqual(hello['status'], 200)
self.assertEqual(hello['body']['project_name'], 'myapp')
self.assertIn('application/json', hello['headers']['Content-Type'])
def test_empty_response_body_with_head(self):
request = {'path': '/v0/', 'method': 'HEAD'}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
head = resp.json['responses'][0]
self.assertEqual(head['body'], '')
self.assertNotEqual(len(head['headers']), 0)
def test_api_errors_are_json_formatted(self):
request = {'path': '/unknown'}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
error = resp.json['responses'][0]
self.assertEqual(error['body']['code'], 404)
def test_internal_errors_makes_the_batch_fail(self):
request = {'path': '/v0/'}
body = {'requests': [request]}
with mock.patch('kinto.core.views.hello.get_eos') as mocked:
mocked.side_effect = AttributeError
self.app.post_json('/batch', body, headers=self.headers,
status=500)
def test_errors_handled_by_view_does_not_make_the_batch_fail(self):
from requests.exceptions import HTTPError
request = {'path': '/v0/'}
body = {'requests': [request]}
with mock.patch('kinto.core.views.hello.get_eos') as mocked:
response = mock.MagicMock(status_code=404)
mocked.side_effect = HTTPError(response=response)
resp = self.app.post_json('/batch', body, headers=self.headers,
status=200)
subresponse = resp.json['responses'][0]['body']
self.assertEqual(subresponse, {
'errno': 999,
'code': 404,
'error': 'Not Found'
})
def test_batch_cannot_be_recursive(self):
requests = {'requests': [{'path': '/v0/'}]}
request = {'method': 'POST', 'path': '/v0/batch', 'body': requests}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, status=400)
self.assertIn('Recursive', resp.json['message'])
def test_responses_are_resolved_with_api_with_prefix(self):
request = {'path': '/'}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
hello = resp.json['responses'][0]
self.assertEqual(hello['path'], '/v0/')
self.assertEqual(hello['status'], 200)
self.assertEqual(hello['body']['project_name'], 'myapp')
self.assertIn('application/json', hello['headers']['Content-Type'])
def test_redirect_responses_are_followed(self):
request = {'path': '/mushrooms/'} # trailing slash
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
collection = resp.json['responses'][0]
self.assertEqual(collection['status'], 200)
self.assertEqual(collection['path'], '/v0/mushrooms')
self.assertEqual(collection['body'], {'data': []})
def test_body_is_transmitted_during_redirect(self):
request = {
'method': 'PUT',
'path': '/mushrooms/%s/' % str(uuid.uuid4()),
'body': {'data': {'name': 'Trompette de la mort'}}
}
body = {'requests': [request]}
resp = self.app.post_json('/batch', body, headers=self.headers)
response = resp.json['responses'][0]
self.assertEqual(response['status'], 201)
record = response['body']['data']
self.assertEqual(record['name'], 'Trompette de la mort')
def test_400_error_message_is_forwarded(self):
headers = self.headers.copy()
headers['If-Match'] = '"*"'
request = {
'method': 'PUT',
'path': '/mushrooms/%s' % str(uuid.uuid4()),
'body': {'data': {'name': 'Trompette de la mort'}},
'headers': headers
}
body = {'requests': [request, request]}
resp = self.app.post_json('/batch', body, status=200)
self.assertEqual(resp.json['responses'][1]['status'], 400)
self.assertEqual(resp.json['responses'][1]['body']['message'],
('headers: Invalid value for If-Match. The value '
'should be integer between double quotes.'))
def test_412_errors_are_forwarded(self):
headers = self.headers.copy()
headers['If-None-Match'] = '*'
request = {
'method': 'PUT',
'path': '/mushrooms/%s' % str(uuid.uuid4()),
'body': {'data': {'name': 'Trompette de la mort'}},
'headers': headers
}
body = {'requests': [request, request]}
resp = self.app.post_json('/batch', body, status=200)
self.assertEqual(resp.json['responses'][0]['status'], 201)
self.assertEqual(resp.json['responses'][1]['status'], 412)
class BatchSchemaTest(unittest.TestCase):
def setUp(self):
self.schema = BatchPayloadSchema()
def assertInvalid(self, payload):
self.assertRaises(colander.Invalid, self.schema.deserialize, payload)
def test_requests_is_mandatory(self):
self.assertInvalid({})
def test_unknown_attributes_are_dropped(self):
deserialized = self.schema.deserialize({'requests': [], 'unknown': 42})
self.assertNotIn('unknown', deserialized)
def test_list_of_requests_can_be_empty(self):
self.schema.deserialize({'requests': []})
def test_list_of_requests_must_be_a_list(self):
self.assertInvalid({'requests': {}})
def test_list_of_requests_must_be_dicts(self):
request = 42
self.assertInvalid({'requests': [request]})
def test_request_path_must_start_with_slash(self):
request = {'path': 'http://localhost'}
self.assertInvalid({'requests': [request]})
def test_request_path_is_mandatory(self):
request = {'method': 'HEAD'}
self.assertInvalid({'requests': [request]})
def test_request_method_must_be_known_uppercase_word(self):
request = {'path': '/', 'method': 'get'}
self.assertInvalid({'requests': [request]})
#
# headers
#
def test_request_headers_should_be_strings(self):
headers = {'Accept': 3.14}
request = {'path': '/', 'headers': headers}
self.assertInvalid({'requests': [request]})
def test_request_headers_cannot_be_recursive(self):
headers = {'Accept': {'sub': 'dict'}}
request = {'path': '/', 'headers': headers}
self.assertInvalid({'requests': [request]})
def test_request_headers_are_preserved(self):
headers = {'Accept': 'audio/*'}
request = {'path': '/', 'headers': headers}
deserialized = self.schema.deserialize({'requests': [request]})
self.assertEqual(deserialized['requests'][0]['headers']['Accept'],
'audio/*')
#
# body
#
def test_body_is_an_arbitrary_mapping(self):
payload = {"json": "payload"}
request = {'path': '/', 'body': payload}
deserialized = self.schema.deserialize({'requests': [request]})
self.assertEqual(deserialized['requests'][0]['body'], payload)
#
# defaults
#
def test_defaults_must_be_a_mapping_if_specified(self):
request = {'path': '/'}
batch_payload = {'requests': [request], 'defaults': 42}
self.assertInvalid(batch_payload)
def test_defaults_must_be_a_request_schema_if_specified(self):
request = {'path': '/'}
defaults = {'body': 3}
batch_payload = {'requests': [request], 'defaults': defaults}
self.assertInvalid(batch_payload)
def test_unknown_defaults_are_ignored_silently(self):
request = {'path': '/'}
defaults = {'foo': 'bar'}
batch_payload = {'requests': [request], 'defaults': defaults}
result = self.schema.deserialize(self.schema.unflatten(batch_payload))
self.assertNotIn('foo', result['requests'][0])
def test_defaults_can_be_specified_empty(self):
request = {'path': '/'}
defaults = {}
batch_payload = {'requests': [request], 'defaults': defaults}
self.schema.deserialize(self.schema.unflatten(batch_payload))
def test_defaults_path_is_applied_to_requests(self):
request = {'method': 'GET'}
defaults = {'path': '/'}
batch_payload = {'requests': [request], 'defaults': defaults}
result = self.schema.deserialize(self.schema.unflatten(batch_payload))
self.assertEqual(result['requests'][0]['path'], '/')
def test_defaults_body_is_applied_to_requests(self):
request = {'path': '/'}
defaults = {'body': {'json': 'payload'}}
batch_payload = {'requests': [request], 'defaults': defaults}
result = self.schema.deserialize(self.schema.unflatten(batch_payload))
self.assertEqual(result['requests'][0]['body'], {'json': 'payload'})
def test_defaults_headers_are_applied_to_requests(self):
request = {'path': '/'}
defaults = {'headers': {'Content-Type': 'text/html'}}
batch_payload = {'requests': [request], 'defaults': defaults}
result = self.schema.deserialize(self.schema.unflatten(batch_payload))
self.assertEqual(result['requests'][0]['headers']['Content-Type'],
'text/html')
def test_defaults_values_do_not_overwrite_requests_values(self):
request = {'path': '/', 'headers': {'Authorization': 'me'}}
defaults = {'headers': {'Authorization': 'you', 'Accept': '*/*'}}
batch_payload = {'requests': [request], 'defaults': defaults}
result = self.schema.deserialize(self.schema.unflatten(batch_payload))
self.assertEqual(result['requests'][0]['headers'],
{'Authorization': 'me', 'Accept': '*/*'})
def test_defaults_values_for_path_must_start_with_slash(self):
request = {}
defaults = {'path': 'http://localhost'}
batch_payload = {'requests': [request], 'defaults': defaults}
self.assertInvalid(batch_payload)
class BatchServiceTest(unittest.TestCase):
def setUp(self):
self.method, self.view, self.options = batch_service.definitions[0]
self.request = DummyRequest()
def post(self, validated):
self.request.validated = validated
return self.view(self.request)
def test_returns_empty_list_of_responses_if_requests_empty(self):
result = self.post({'requests': []})
self.assertEqual(result['responses'], [])
def test_returns_one_response_per_request(self):
requests = [{'path': '/'}]
result = self.post({'requests': requests})
self.assertEqual(len(result['responses']), len(requests))
def test_relies_on_pyramid_invoke_subrequest(self):
self.post({'requests': [{'path': '/'}]})
self.assertTrue(self.request.invoke_subrequest.called)
def test_returns_requests_path_in_responses(self):
result = self.post({'requests': [{'path': '/'}]})
self.assertEqual(result['responses'][0]['path'], '/v0/')
def test_subrequests_have_parent_attribute(self):
self.request.path = '/batch'
self.post({'requests': [{'path': '/'}]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertEqual(subrequest.parent.path, '/batch')
def test_subrequests_are_GET_by_default(self):
self.post({'requests': [{'path': '/'}]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertEqual(subrequest.method, 'GET')
def test_original_request_headers_are_passed_to_subrequests(self):
self.request.headers['Authorization'] = 'Basic ertyfghjkl'
self.post({'requests': [{'path': '/'}]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertIn('Basic', subrequest.headers['Authorization'])
def test_subrequests_body_are_json_serialized(self):
request = {'path': '/', 'body': {'json': 'payload'}}
self.post({'requests': [request]})
wanted = {"json": "payload"}
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertEqual(subrequest.body.decode('utf8'),
json.dumps(wanted))
def test_subrequests_body_have_json_content_type(self):
self.request.headers['Content-Type'] = 'text/xml'
request = {'path': '/', 'body': {'json': 'payload'}}
self.post({'requests': [request]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertIn('application/json',
subrequest.headers['Content-Type'])
def test_subrequests_body_have_utf8_charset(self):
request = {'path': '/', 'body': {'json': u"😂"}}
self.post({'requests': [request]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertIn('charset=utf-8', subrequest.headers['Content-Type'])
wanted = {"json": u"😂"}
self.assertEqual(subrequest.body.decode('utf8'),
json.dumps(wanted))
def test_subrequests_paths_are_url_encoded(self):
request = {'path': u'/test?param=©'}
self.post({'requests': [request]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertEqual(subrequest.path, u'/v0/test')
self.assertEqual(subrequest.GET['param'], u'©')
def test_subrequests_responses_paths_are_url_decoded(self):
request = {'path': u'/test?param=©'}
resp = self.post({'requests': [request]})
path = resp['responses'][0]['path']
self.assertEqual(path, u'/v0/test')
def test_response_body_is_string_if_remote_response_is_not_json(self):
response = Response(body='Internal Error')
self.request.invoke_subrequest.return_value = response
request = {'path': u'/test'}
resp = self.post({'requests': [request]})
body = resp['responses'][0]['body'].decode('utf-8')
self.assertEqual(body, 'Internal Error')
def test_number_of_requests_is_not_limited_when_settings_set_to_none(self):
self.request.registry.settings['batch_max_requests'] = None
requests = {}
for i in range(30):
requests.setdefault('requests', []).append({'path': '/'})
self.post(requests)
def test_number_of_requests_is_limited_to_25_by_default(self):
requests = {}
for i in range(26):
requests.setdefault('requests', []).append({'path': '/'})
result = self.post(requests)
self.assertEqual(self.request.errors[0]['description'],
'Number of requests is limited to 25')
self.assertIsNone(result) # rest of view not executed
def test_return_400_if_number_of_requests_is_greater_than_settings(self):
self.request.registry.settings['batch_max_requests'] = 22
requests = {}
for i in range(23):
requests.setdefault('requests', []).append({'path': '/'})
result = self.post(requests)
self.assertEqual(self.request.errors[0]['description'],
'Number of requests is limited to 22')
self.assertIsNone(result) # rest of view not executed
| {
"content_hash": "f1c38c6de72d59845db09f75a76543b4",
"timestamp": "",
"source": "github",
"line_count": 414,
"max_line_length": 79,
"avg_line_length": 41.632850241545896,
"alnum_prop": 0.5942794151775354,
"repo_name": "monikagrabowska/osf.io",
"id": "7f536f228f4b300e9493cba567220e82413f9f08",
"size": "17245",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "kinto/tests/core/test_views_batch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "176566"
},
{
"name": "HTML",
"bytes": "183119"
},
{
"name": "JavaScript",
"bytes": "2017358"
},
{
"name": "Jupyter Notebook",
"bytes": "8510"
},
{
"name": "Makefile",
"bytes": "6905"
},
{
"name": "Mako",
"bytes": "755899"
},
{
"name": "PLpgSQL",
"bytes": "22144"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "9632033"
},
{
"name": "Shell",
"bytes": "436"
}
],
"symlink_target": ""
} |
#!/usr/bin/python2
#
# Copyright 2017 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# generate_entry_points.py:
# Generates the OpenGL bindings and entry point layers for ANGLE.
# NOTE: don't run this script directly. Run scripts/run_code_generation.py.
import sys, os, pprint, json
from datetime import date
import registry_xml
# List of GLES1 extensions for which we don't need to add Context.h decls.
gles1_no_context_decl_extensions = [
"GL_OES_framebuffer_object",
]
# This is a list of exceptions for entry points which don't want to have
# the EVENT macro. This is required for some debug marker entry points.
no_event_marker_exceptions_list = sorted([
"glPushGroupMarkerEXT",
"glPopGroupMarkerEXT",
"glInsertEventMarkerEXT",
])
# Strip these suffixes from Context entry point names. NV is excluded (for now).
strip_suffixes = ["ANGLE", "EXT", "KHR", "OES", "CHROMIUM", "OVR"]
template_entry_point_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// entry_points_{annotation_lower}_autogen.h:
// Defines the {comment} entry points.
#ifndef {lib}_ENTRY_POINTS_{annotation_upper}_AUTOGEN_H_
#define {lib}_ENTRY_POINTS_{annotation_upper}_AUTOGEN_H_
{includes}
namespace gl
{{
{entry_points}
}} // namespace gl
#endif // {lib}_ENTRY_POINTS_{annotation_upper}_AUTOGEN_H_
"""
template_entry_point_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// entry_points_{annotation_lower}_autogen.cpp:
// Defines the {comment} entry points.
{includes}
namespace gl
{{
{entry_points}}} // namespace gl
"""
template_entry_points_enum_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// entry_points_enum_autogen.h:
// Defines the {lib} entry points enumeration.
#ifndef LIBANGLE_ENTRYPOINTSENUM_AUTOGEN_H_
#define LIBANGLE_ENTRYPOINTSENUM_AUTOGEN_H_
namespace gl
{{
enum class EntryPoint
{{
{entry_points_list}
}};
const char *GetEntryPointName(EntryPoint ep);
}} // namespace gl
#endif // LIBANGLE_ENTRY_POINTS_ENUM_AUTOGEN_H_
"""
template_entry_points_name_case = """ case EntryPoint::{enum}:
return "gl{enum}";"""
template_entry_points_enum_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// entry_points_enum_autogen.cpp:
// Helper methods for the {lib} entry points enumeration.
#include "libANGLE/entry_points_enum_autogen.h"
#include "common/debug.h"
namespace gl
{{
const char *GetEntryPointName(EntryPoint ep)
{{
switch (ep)
{{
{entry_points_name_cases}
default:
UNREACHABLE();
return "error";
}}
}}
}} // namespace gl
"""
template_lib_entry_point_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {lib_name}.cpp: Implements the exported {lib_description} functions.
{includes}
extern "C" {{
{entry_points}
}} // extern "C"
"""
template_entry_point_decl = """ANGLE_EXPORT {return_type}GL_APIENTRY {name}{explicit_context_suffix}({explicit_context_param}{explicit_context_comma}{params});"""
template_entry_point_no_return = """void GL_APIENTRY {name}{explicit_context_suffix}({explicit_context_param}{explicit_context_comma}{params})
{{
Context *context = {context_getter};
{event_comment}EVENT("gl{name}", "context = %d{comma_if_needed}{format_params}", CID(context){comma_if_needed}{pass_params});
if (context)
{{{assert_explicit_context}{packed_gl_enum_conversions}
std::unique_lock<std::mutex> shareContextLock = GetShareGroupLock(context);
bool isCallValid = (context->skipValidation() || Validate{name}({validate_params}));
if (isCallValid)
{{
context->{name_lower_no_suffix}({internal_params});
}}
ANGLE_CAPTURE({name}, isCallValid, {validate_params});
}}
}}
"""
template_entry_point_with_return = """{return_type}GL_APIENTRY {name}{explicit_context_suffix}({explicit_context_param}{explicit_context_comma}{params})
{{
Context *context = {context_getter};
{event_comment}EVENT("gl{name}", "context = %d{comma_if_needed}{format_params}", CID(context){comma_if_needed}{pass_params});
{return_type} returnValue;
if (context)
{{{assert_explicit_context}{packed_gl_enum_conversions}
std::unique_lock<std::mutex> shareContextLock = GetShareGroupLock(context);
bool isCallValid = (context->skipValidation() || Validate{name}({validate_params}));
if (isCallValid)
{{
returnValue = context->{name_lower_no_suffix}({internal_params});
}}
else
{{
returnValue = GetDefaultReturnValue<EntryPoint::{name}, {return_type}>();
}}
ANGLE_CAPTURE({name}, isCallValid, {validate_params}, returnValue);
}}
else
{{
returnValue = GetDefaultReturnValue<EntryPoint::{name}, {return_type}>();
}}
return returnValue;
}}
"""
context_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Context_{annotation_lower}_autogen.h: Creates a macro for interfaces in Context.
#ifndef ANGLE_CONTEXT_{annotation_upper}_AUTOGEN_H_
#define ANGLE_CONTEXT_{annotation_upper}_AUTOGEN_H_
#define ANGLE_{annotation_upper}_CONTEXT_API \\
{interface}
#endif // ANGLE_CONTEXT_API_{version}_AUTOGEN_H_
"""
context_decl_format = """ {return_type} {name_lower_no_suffix}({internal_params}); \\"""
libgles_entry_point_def = """{return_type}GL_APIENTRY gl{name}{explicit_context_suffix}({explicit_context_param}{explicit_context_comma}{params})
{{
return gl::{name}{explicit_context_suffix}({explicit_context_internal_param}{explicit_context_comma}{internal_params});
}}
"""
template_glext_explicit_context_inc = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// gl{version}ext_explicit_context_autogen.inc:
// Function declarations for the EGL_ANGLE_explicit_context extension
{function_pointers}
#ifdef GL_GLEXT_PROTOTYPES
{function_prototypes}
#endif
"""
template_glext_function_pointer = """typedef {return_type}(GL_APIENTRYP PFN{name_upper}{explicit_context_suffix_upper}PROC)({explicit_context_param}{explicit_context_comma}{params});"""
template_glext_function_prototype = """{apicall} {return_type}GL_APIENTRY {name}{explicit_context_suffix}({explicit_context_param}{explicit_context_comma}{params});"""
template_validation_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// validation{annotation}_autogen.h:
// Validation functions for the OpenGL {comment} entry points.
#ifndef LIBANGLE_VALIDATION_{annotation}_AUTOGEN_H_
#define LIBANGLE_VALIDATION_{annotation}_AUTOGEN_H_
#include "common/PackedEnums.h"
namespace gl
{{
class Context;
{prototypes}
}} // namespace gl
#endif // LIBANGLE_VALIDATION_{annotation}_AUTOGEN_H_
"""
template_capture_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// capture_gles_{annotation}_autogen.h:
// Capture functions for the OpenGL ES {comment} entry points.
#ifndef LIBANGLE_CAPTURE_GLES_{annotation}_AUTOGEN_H_
#define LIBANGLE_CAPTURE_GLES_{annotation}_AUTOGEN_H_
#include "common/PackedEnums.h"
#include "libANGLE/FrameCapture.h"
namespace gl
{{
{prototypes}
}} // namespace gl
#endif // LIBANGLE_CAPTURE_GLES_{annotation}_AUTOGEN_H_
"""
template_capture_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// capture_gles_{annotation_with_dash}_autogen.cpp:
// Capture functions for the OpenGL ES {comment} entry points.
#include "libANGLE/capture_gles_{annotation_with_dash}_autogen.h"
#include "libANGLE/Context.h"
#include "libANGLE/FrameCapture.h"
#include "libANGLE/gl_enum_utils.h"
#include "libANGLE/validation{annotation_no_dash}.h"
using namespace angle;
namespace gl
{{
{capture_methods}
}} // namespace gl
"""
template_capture_method_with_return_value = """
CallCapture Capture{short_name}({params_with_type}, {return_value_type_original} returnValue)
{{
ParamBuffer paramBuffer;
{parameter_captures}
ParamCapture returnValueCapture("returnValue", ParamType::T{return_value_type_custom});
InitParamValue(ParamType::T{return_value_type_custom}, returnValue, &returnValueCapture.value);
paramBuffer.addReturnValue(std::move(returnValueCapture));
return CallCapture(gl::EntryPoint::{short_name}, std::move(paramBuffer));
}}
"""
template_capture_method_no_return_value = """
CallCapture Capture{short_name}({params_with_type})
{{
ParamBuffer paramBuffer;
{parameter_captures}
return CallCapture(gl::EntryPoint::{short_name}, std::move(paramBuffer));
}}
"""
template_parameter_capture_value = """paramBuffer.addValueParam("{name}", ParamType::T{type}, {name});"""
template_parameter_capture_gl_enum = """paramBuffer.addEnumParam("{name}", GLenumGroup::{group}, ParamType::T{type}, {name});"""
template_parameter_capture_pointer = """
ParamCapture {name}Param("{name}", ParamType::T{type});
InitParamValue(ParamType::T{type}, {name}, &{name}Param.value);
{capture_name}({params}, &{name}Param);
paramBuffer.addParam(std::move({name}Param));
"""
template_parameter_capture_pointer_func = """void {name}({params});"""
template_capture_replay_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// frame_capture_replay_autogen.cpp:
// Util function to dispatch captured GL calls through Context and replay them.
#include "angle_gl.h"
#include "common/debug.h"
#include "common/debug.h"
#include "libANGLE/Context.h"
#include "libANGLE/Context.inl.h"
#include "libANGLE/FrameCapture.h"
using namespace gl;
namespace angle
{{
void FrameCapture::ReplayCall(gl::Context *context,
ReplayContext *replayContext,
const CallCapture &call)
{{
const ParamBuffer ¶ms = call.params;
switch (call.entryPoint)
{{
{call_replay_cases}
default:
UNREACHABLE();
}}
}}
}} // namespace angle
"""
template_capture_replay_call_case = """case gl::EntryPoint::{entry_point}:
context->{context_call}({param_value_access});break;"""
static_cast_to_dict = {
"GLintptr": "unsigned long long",
"GLsizeiptr": "unsigned long long",
"GLuint64": "unsigned long long",
}
reinterpret_cast_to_dict = {
"GLsync": "uintptr_t",
"GLDEBUGPROC": "uintptr_t",
"GLDEBUGPROCKHR": "uintptr_t",
"GLeglImageOES": "uintptr_t",
}
format_dict = {
"GLbitfield": "%s",
"GLboolean": "%s",
"GLbyte": "%d",
"GLclampx": "0x%X",
"GLDEBUGPROC": "0x%016\" PRIxPTR \"",
"GLDEBUGPROCKHR": "0x%016\" PRIxPTR \"",
"GLdouble": "%f",
"GLeglImageOES": "0x%016\" PRIxPTR \"",
"GLenum": "%s",
"GLfixed": "0x%X",
"GLfloat": "%f",
"GLint": "%d",
"GLintptr": "%llu",
"GLshort": "%d",
"GLsizei": "%d",
"GLsizeiptr": "%llu",
"GLsync": "0x%016\" PRIxPTR \"",
"GLubyte": "%d",
"GLuint": "%u",
"GLuint64": "%llu",
"GLushort": "%u",
"int": "%d",
# WGL specific types
"BOOL": "%u",
"DWORD": "0x%016\" PRIxPTR \"",
"FLOAT": "%f",
"HDC": "0x%016\" PRIxPTR \"",
"HENHMETAFILE": "0x%016\" PRIxPTR \"",
"HGLRC": "0x%016\" PRIxPTR \"",
"LPCSTR": "0x%016\" PRIxPTR \"",
"LPGLYPHMETRICSFLOAT": "0x%016\" PRIxPTR \"",
"UINT": "%u",
}
template_header_includes = """#include <GLES{major}/gl{major}{minor}.h>
#include <export.h>"""
template_sources_includes = """#include "libGLESv2/entry_points_{header_version}_autogen.h"
#include "libANGLE/Context.h"
#include "libANGLE/Context.inl.h"
#include "libANGLE/capture_{header_version}_autogen.h"
#include "libANGLE/gl_enum_utils.h"
#include "libANGLE/validation{validation_header_version}.h"
#include "libANGLE/entry_points_utils.h"
#include "libGLESv2/global_state.h"
"""
template_header_includes_gl32 = """#include <export.h>
#include "angle_gl.h"
"""
template_sources_includes_gl32 = """#include "libGL/entry_points_{}_autogen.h"
#include "libANGLE/Context.h"
#include "libANGLE/Context.inl.h"
#include "libANGLE/gl_enum_utils.h"
#include "libANGLE/validationEGL.h"
#include "libANGLE/validationES.h"
#include "libANGLE/validationES1.h"
#include "libANGLE/validationES2.h"
#include "libANGLE/validationES3.h"
#include "libANGLE/validationES31.h"
#include "libANGLE/validationES32.h"
#include "libANGLE/validationESEXT.h"
#include "libANGLE/validationGL{}{}_autogen.h"
#include "libANGLE/entry_points_utils.h"
#include "libGLESv2/global_state.h"
"""
template_event_comment = """// Don't run the EVENT() macro on the EXT_debug_marker entry points.
// It can interfere with the debug events being set by the caller.
// """
template_capture_proto = "angle::CallCapture Capture%s(%s);"
template_validation_proto = "bool Validate%s(%s);"
template_windows_def_file = """; GENERATED FILE - DO NOT EDIT.
; Generated by {script_name} using data from {data_source_name}.
;
; Copyright {year} The ANGLE Project Authors. All rights reserved.
; Use of this source code is governed by a BSD-style license that can be
; found in the LICENSE file.
LIBRARY {lib}
EXPORTS
{exports}
"""
template_frame_capture_utils_header = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// frame_capture_utils_autogen.h:
// ANGLE Frame capture types and helper functions.
#ifndef LIBANGLE_FRAME_CAPTURE_UTILS_AUTOGEN_H_
#define LIBANGLE_FRAME_CAPTURE_UTILS_AUTOGEN_H_
#include "common/PackedEnums.h"
namespace angle
{{
enum class ParamType
{{
{param_types}
}};
constexpr uint32_t kParamTypeCount = {param_type_count};
union ParamValue
{{
{param_union_values}
}};
template <ParamType PType, typename T>
T GetParamVal(const ParamValue &value);
{get_param_val_specializations}
template <ParamType PType, typename T>
T GetParamVal(const ParamValue &value)
{{
UNREACHABLE();
return T();
}}
template <typename T>
T AccessParamValue(ParamType paramType, const ParamValue &value)
{{
switch (paramType)
{{
{access_param_value_cases}
}}
}}
template <ParamType PType, typename T>
void SetParamVal(T valueIn, ParamValue *valueOut);
{set_param_val_specializations}
template <ParamType PType, typename T>
void SetParamVal(T valueIn, ParamValue *valueOut)
{{
UNREACHABLE();
}}
template <typename T>
void InitParamValue(ParamType paramType, T valueIn, ParamValue *valueOut)
{{
switch (paramType)
{{
{init_param_value_cases}
}}
}}
void WriteParamTypeToStream(std::ostream &os, ParamType paramType, const ParamValue& paramValue);
const char *ParamTypeToString(ParamType paramType);
enum class ResourceIDType
{{
{resource_id_types}
}};
ResourceIDType GetResourceIDTypeFromParamType(ParamType paramType);
const char *GetResourceIDTypeName(ResourceIDType resourceIDType);
}} // namespace angle
#endif // LIBANGLE_FRAME_CAPTURE_UTILS_AUTOGEN_H_
"""
template_frame_capture_utils_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// frame_capture_utils_autogen.cpp:
// ANGLE Frame capture types and helper functions.
#include "libANGLE/frame_capture_utils_autogen.h"
#include "libANGLE/FrameCapture.h"
namespace angle
{{
void WriteParamTypeToStream(std::ostream &os, ParamType paramType, const ParamValue& paramValue)
{{
switch (paramType)
{{
{write_param_type_to_stream_cases}
default:
os << "unknown";
break;
}}
}}
const char *ParamTypeToString(ParamType paramType)
{{
switch (paramType)
{{
{param_type_to_string_cases}
default:
UNREACHABLE();
return "unknown";
}}
}}
ResourceIDType GetResourceIDTypeFromParamType(ParamType paramType)
{{
switch (paramType)
{{
{param_type_resource_id_cases}
default:
return ResourceIDType::InvalidEnum;
}}
}}
const char *GetResourceIDTypeName(ResourceIDType resourceIDType)
{{
switch (resourceIDType)
{{
{resource_id_type_name_cases}
default:
UNREACHABLE();
return "GetResourceIDTypeName error";
}}
}}
}} // namespace angle
"""
template_get_param_val_specialization = """template <>
inline {type} GetParamVal<ParamType::T{enum}, {type}>(const ParamValue &value)
{{
return value.{union_name};
}}"""
template_access_param_value_case = """ case ParamType::T{enum}:
return GetParamVal<ParamType::T{enum}, T>(value);"""
template_set_param_val_specialization = """template <>
inline void SetParamVal<ParamType::T{enum}>({type} valueIn, ParamValue *valueOut)
{{
valueOut->{union_name} = valueIn;
}}"""
template_init_param_value_case = """ case ParamType::T{enum}:
SetParamVal<ParamType::T{enum}>(valueIn, valueOut);
break;"""
template_write_param_type_to_stream_case = """ case ParamType::T{enum}:
WriteParamValueToStream<ParamType::T{enum}>(os, paramValue.{union_name});
break;"""
template_param_type_to_string_case = """ case ParamType::T{enum}:
return "{type}";"""
template_param_type_to_resource_id_type_case = """ case ParamType::T{enum}:
return ResourceIDType::{resource_id_type};"""
template_resource_id_type_name_case = """ case ResourceIDType::{resource_id_type}:
return "{resource_id_type}";"""
def script_relative(path):
return os.path.join(os.path.dirname(sys.argv[0]), path)
def format_entry_point_decl(cmd_name, proto, params, is_explicit_context):
comma_if_needed = ", " if len(params) > 0 else ""
return template_entry_point_decl.format(
name=cmd_name[2:],
return_type=proto[:-len(cmd_name)],
params=", ".join(params),
comma_if_needed=comma_if_needed,
explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "")
def type_name_sep_index(param):
space = param.rfind(" ")
pointer = param.rfind("*")
return max(space, pointer)
def just_the_type(param):
if "*" in param:
return param[:type_name_sep_index(param) + 1]
return param[:type_name_sep_index(param)]
def just_the_name(param):
return param[type_name_sep_index(param) + 1:]
def make_param(param_type, param_name):
return param_type + " " + param_name
def just_the_type_packed(param, entry):
name = just_the_name(param)
if entry.has_key(name):
return entry[name]
else:
return just_the_type(param)
def just_the_name_packed(param, reserved_set):
name = just_the_name(param)
if name in reserved_set:
return name + 'Packed'
else:
return name
def param_print_argument(command_node, param):
name_only = just_the_name(param)
type_only = just_the_type(param)
if "*" in param:
return "(uintptr_t)" + name_only
if type_only in reinterpret_cast_to_dict:
return "(" + reinterpret_cast_to_dict[type_only] + ")" + name_only
if type_only in static_cast_to_dict:
return "static_cast<" + static_cast_to_dict[type_only] + ">(" + name_only + ")"
if type_only == "GLboolean":
return "GLbooleanToString(%s)" % (name_only,)
if type_only == "GLbitfield":
group_name = find_gl_enum_group_in_command(command_node, name_only)
return "GLbitfieldToString(GLenumGroup::%s, %s).c_str()" % (group_name, name_only)
if type_only == "GLenum":
group_name = find_gl_enum_group_in_command(command_node, name_only)
return "GLenumToString(GLenumGroup::%s, %s)" % (group_name, name_only)
return name_only
def param_format_string(param):
if "*" in param:
return param + " = 0x%016\" PRIxPTR \""
else:
type_only = just_the_type(param)
if type_only not in format_dict:
raise Exception(type_only + " is not a known type in 'format_dict'")
return param + " = " + format_dict[type_only]
def default_return_value(cmd_name, return_type):
if return_type == "void":
return ""
return "GetDefaultReturnValue<EntryPoint::" + cmd_name[2:] + ", " + return_type + ">()"
def get_context_getter_function(cmd_name, is_explicit_context):
if is_explicit_context:
return "static_cast<gl::Context *>(ctx)"
lost_context_acceptable_cmds = [
"glGetError",
"glGetSync",
"glGetQueryObjecti",
"glGetProgramiv",
"glGetGraphicsResetStatus",
"glGetShaderiv",
]
for context_lost_entry_pont in lost_context_acceptable_cmds:
if cmd_name.startswith(context_lost_entry_pont):
return "GetGlobalContext()"
return "GetValidGlobalContext()"
def strip_suffix(name):
for suffix in strip_suffixes:
if name.endswith(suffix):
name = name[0:-len(suffix)]
return name
def find_gl_enum_group_in_command(command_node, param_name):
group_name = None
for param_node in command_node.findall('./param'):
if param_node.find('./name').text == param_name:
group_name = param_node.attrib.get('group', None)
break
if group_name is None or group_name in registry_xml.unsupported_enum_group_names:
group_name = registry_xml.default_enum_group_name
return group_name
def get_packed_enums(cmd_packed_gl_enums, cmd_name):
# Always strip the suffix when querying packed enums.
return cmd_packed_gl_enums.get(strip_suffix(cmd_name), {})
def format_entry_point_def(command_node, cmd_name, proto, params, is_explicit_context,
cmd_packed_gl_enums):
packed_gl_enums = get_packed_enums(cmd_packed_gl_enums, cmd_name)
internal_params = [just_the_name_packed(param, packed_gl_enums) for param in params]
packed_gl_enum_conversions = []
for param in params:
name = just_the_name(param)
if name in packed_gl_enums:
internal_name = name + "Packed"
internal_type = packed_gl_enums[name]
packed_gl_enum_conversions += [
"\n " + internal_type + " " + internal_name + " = FromGL<" + internal_type +
">(" + name + ");"
]
pass_params = [param_print_argument(command_node, param) for param in params]
format_params = [param_format_string(param) for param in params]
return_type = proto[:-len(cmd_name)]
default_return = default_return_value(cmd_name, return_type.strip())
event_comment = template_event_comment if cmd_name in no_event_marker_exceptions_list else ""
name_lower_no_suffix = strip_suffix(cmd_name[2:3].lower() + cmd_name[3:])
format_params = {
"name":
cmd_name[2:],
"name_lower_no_suffix":
name_lower_no_suffix,
"return_type":
return_type,
"params":
", ".join(params),
"internal_params":
", ".join(internal_params),
"packed_gl_enum_conversions":
"".join(packed_gl_enum_conversions),
"pass_params":
", ".join(pass_params),
"comma_if_needed":
", " if len(params) > 0 else "",
"validate_params":
", ".join(["context"] + internal_params),
"format_params":
", ".join(format_params),
"context_getter":
get_context_getter_function(cmd_name, is_explicit_context),
"event_comment":
event_comment,
"explicit_context_suffix":
"ContextANGLE" if is_explicit_context else "",
"explicit_context_param":
"GLeglContext ctx" if is_explicit_context else "",
"explicit_context_comma":
", " if is_explicit_context and len(params) > 0 else "",
"assert_explicit_context":
"\nASSERT(context == GetValidGlobalContext());" if is_explicit_context else ""
}
if return_type.strip() == "void":
return template_entry_point_no_return.format(**format_params)
else:
return template_entry_point_with_return.format(**format_params)
def get_capture_param_type_name(param_type):
pointer_count = param_type.count("*")
is_const = "const" in param_type.split()
param_type = param_type.replace("*", "").strip()
param_type = " ".join([param for param in param_type.split() if param != "const"])
if is_const:
param_type += "Const"
for x in range(pointer_count):
param_type += "Pointer"
return param_type
def format_capture_method(command, cmd_name, proto, params, all_param_types, capture_pointer_funcs,
cmd_packed_gl_enums):
packed_gl_enums = get_packed_enums(cmd_packed_gl_enums, cmd_name)
params_with_type = get_internal_params(
cmd_name, ["const State &glState", "bool isCallValid"] + params, cmd_packed_gl_enums)
params_just_name = ", ".join(
["glState", "isCallValid"] +
[just_the_name_packed(param, packed_gl_enums) for param in params])
parameter_captures = []
for param in params:
param_name = just_the_name_packed(param, packed_gl_enums)
param_type = just_the_type_packed(param, packed_gl_enums).strip()
pointer_count = param_type.count("*")
param_type = get_capture_param_type_name(param_type)
if pointer_count > 0:
params = params_just_name
capture_name = "Capture%s_%s" % (cmd_name[2:], param_name)
capture = template_parameter_capture_pointer.format(
name=param_name, type=param_type, capture_name=capture_name, params=params)
capture_pointer_func = template_parameter_capture_pointer_func.format(
name=capture_name, params=params_with_type + ", angle::ParamCapture *paramCapture")
capture_pointer_funcs += [capture_pointer_func]
elif param_type in ('GLenum', 'GLbitfield'):
gl_enum_group = find_gl_enum_group_in_command(command, param_name)
capture = template_parameter_capture_gl_enum.format(
name=param_name, type=param_type, group=gl_enum_group)
else:
capture = template_parameter_capture_value.format(name=param_name, type=param_type)
all_param_types.add(param_type)
parameter_captures += [capture]
return_type = proto[:-len(cmd_name)].strip()
format_args = {
"full_name": cmd_name,
"short_name": cmd_name[2:],
"params_with_type": params_with_type,
"params_just_name": params_just_name,
"parameter_captures": "\n ".join(parameter_captures),
"return_value_type_original": return_type,
"return_value_type_custom": get_capture_param_type_name(return_type)
}
if return_type == "void":
return template_capture_method_no_return_value.format(**format_args)
else:
return template_capture_method_with_return_value.format(**format_args)
def get_internal_params(cmd_name, params, cmd_packed_gl_enums):
packed_gl_enums = get_packed_enums(cmd_packed_gl_enums, cmd_name)
return ", ".join([
make_param(
just_the_type_packed(param, packed_gl_enums),
just_the_name_packed(param, packed_gl_enums)) for param in params
])
def format_context_decl(cmd_name, proto, params, template, cmd_packed_gl_enums):
internal_params = get_internal_params(cmd_name, params, cmd_packed_gl_enums)
return_type = proto[:-len(cmd_name)]
name_lower_no_suffix = cmd_name[2:3].lower() + cmd_name[3:]
name_lower_no_suffix = strip_suffix(name_lower_no_suffix)
return template.format(
return_type=return_type,
name_lower_no_suffix=name_lower_no_suffix,
internal_params=internal_params)
def format_libgles_entry_point_def(cmd_name, proto, params, is_explicit_context):
internal_params = [just_the_name(param) for param in params]
return_type = proto[:-len(cmd_name)]
return libgles_entry_point_def.format(
name=cmd_name[2:],
return_type=return_type,
params=", ".join(params),
internal_params=", ".join(internal_params),
explicit_context_suffix="ContextANGLE" if is_explicit_context else "",
explicit_context_param="GLeglContext ctx" if is_explicit_context else "",
explicit_context_comma=", " if is_explicit_context and len(params) > 0 else "",
explicit_context_internal_param="ctx" if is_explicit_context else "")
def format_validation_proto(cmd_name, params, cmd_packed_gl_enums):
internal_params = get_internal_params(cmd_name, ["Context *context"] + params,
cmd_packed_gl_enums)
return template_validation_proto % (cmd_name[2:], internal_params)
def format_capture_proto(cmd_name, proto, params, cmd_packed_gl_enums):
internal_params = get_internal_params(
cmd_name, ["const State &glState", "bool isCallValid"] + params, cmd_packed_gl_enums)
return_type = proto[:-len(cmd_name)].strip()
if return_type != "void":
internal_params += ", %s returnValue" % return_type
return template_capture_proto % (cmd_name[2:], internal_params)
def path_to(folder, file):
return os.path.join(script_relative(".."), "src", folder, file)
def get_entry_points(all_commands, commands, is_explicit_context, is_wgl, all_param_types,
cmd_packed_gl_enums):
decls = []
defs = []
export_defs = []
validation_protos = []
capture_protos = []
capture_methods = []
capture_pointer_funcs = []
for command in all_commands:
proto = command.find('proto')
cmd_name = proto.find('name').text
if is_wgl:
cmd_name = cmd_name if cmd_name[:3] == 'wgl' else 'wgl' + cmd_name
if cmd_name not in commands:
continue
param_text = ["".join(param.itertext()) for param in command.findall('param')]
proto_text = "".join(proto.itertext())
decls.append(
format_entry_point_decl(cmd_name, proto_text, param_text, is_explicit_context))
defs.append(
format_entry_point_def(command, cmd_name, proto_text, param_text, is_explicit_context,
cmd_packed_gl_enums))
export_defs.append(
format_libgles_entry_point_def(cmd_name, proto_text, param_text, is_explicit_context))
validation_protos.append(
format_validation_proto(cmd_name, param_text, cmd_packed_gl_enums))
capture_protos.append(
format_capture_proto(cmd_name, proto_text, param_text, cmd_packed_gl_enums))
capture_methods.append(
format_capture_method(command, cmd_name, proto_text, param_text, all_param_types,
capture_pointer_funcs, cmd_packed_gl_enums))
return decls, defs, export_defs, validation_protos, capture_protos, capture_methods, capture_pointer_funcs
def get_decls(formatter, all_commands, gles_commands, already_included, cmd_packed_gl_enums):
decls = []
for command in all_commands:
proto = command.find('proto')
cmd_name = proto.find('name').text
if cmd_name not in gles_commands:
continue
name_no_suffix = strip_suffix(cmd_name)
if name_no_suffix in already_included:
continue
param_text = ["".join(param.itertext()) for param in command.findall('param')]
proto_text = "".join(proto.itertext())
decls.append(
format_context_decl(cmd_name, proto_text, param_text, formatter, cmd_packed_gl_enums))
return decls
def get_glext_decls(all_commands, gles_commands, version, is_explicit_context):
glext_ptrs = []
glext_protos = []
is_gles1 = False
if (version == ""):
is_gles1 = True
for command in all_commands:
proto = command.find('proto')
cmd_name = proto.find('name').text
if cmd_name not in gles_commands:
continue
param_text = ["".join(param.itertext()) for param in command.findall('param')]
proto_text = "".join(proto.itertext())
return_type = proto_text[:-len(cmd_name)]
params = ", ".join(param_text)
format_params = {
"apicall": "GL_API" if is_gles1 else "GL_APICALL",
"name": cmd_name,
"name_upper": cmd_name.upper(),
"return_type": return_type,
"params": params,
"explicit_context_comma": ", " if is_explicit_context and len(params) > 0 else "",
"explicit_context_suffix": "ContextANGLE" if is_explicit_context else "",
"explicit_context_suffix_upper": "CONTEXTANGLE" if is_explicit_context else "",
"explicit_context_param": "GLeglContext ctx" if is_explicit_context else ""
}
glext_ptrs.append(template_glext_function_pointer.format(**format_params))
glext_protos.append(template_glext_function_prototype.format(**format_params))
return glext_ptrs, glext_protos
def write_file(annotation, comment, template, entry_points, suffix, includes, lib, file):
content = template.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name=file,
year=date.today().year,
annotation_lower=annotation.lower(),
annotation_upper=annotation.upper(),
comment=comment,
lib=lib.upper(),
includes=includes,
entry_points=entry_points)
path = path_to(lib, "entry_points_{}_autogen.{}".format(annotation.lower(), suffix))
with open(path, "w") as out:
out.write(content)
out.close()
def write_export_files(entry_points, includes, source, lib_name, lib_description):
content = template_lib_entry_point_source.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name=source,
year=date.today().year,
lib_name=lib_name,
lib_description=lib_description,
includes=includes,
entry_points=entry_points)
path = path_to(lib_name, "{}_autogen.cpp".format(lib_name))
with open(path, "w") as out:
out.write(content)
out.close()
def write_context_api_decls(template, decls, api):
for ver in decls['core'].keys():
interface_lines = []
for i in decls['core'][ver]:
interface_lines.append(i)
annotation = '{}_{}_{}'.format(api, ver[0], ver[1])
version = '{}_{}'.format(ver[0], ver[1])
content = template.format(
annotation_lower=annotation.lower(),
annotation_upper=annotation.upper(),
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml",
year=date.today().year,
version=version,
interface="\n".join(interface_lines))
path = path_to("libANGLE", "Context_%s_autogen.h" % annotation.lower())
with open(path, "w") as out:
out.write(content)
out.close()
if 'exts' in decls.keys():
interface_lines = []
for annotation in decls['exts'].keys():
interface_lines.append("\\\n /* " + annotation + " */ \\\n\\")
for extname in sorted(decls['exts'][annotation].keys()):
interface_lines.append(" /* " + extname + " */ \\")
interface_lines.extend(decls['exts'][annotation][extname])
content = template.format(
annotation_lower='gles_ext',
annotation_upper='GLES_EXT',
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml",
year=date.today().year,
version='EXT',
interface="\n".join(interface_lines))
path = path_to("libANGLE", "Context_gles_ext_autogen.h")
with open(path, "w") as out:
out.write(content)
out.close()
def write_glext_explicit_context_inc(version, ptrs, protos):
possible_versions = ["31", "32"]
folder_version = version if version not in possible_versions else "3"
content = template_glext_explicit_context_inc.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
version=version,
function_pointers=ptrs,
function_prototypes=protos)
path = os.path.join(
script_relative(".."), "include", "GLES{}".format(folder_version),
"gl{}ext_explicit_context_autogen.inc".format(version))
with open(path, "w") as out:
out.write(content)
out.close()
def write_validation_header(annotation, comment, protos, source):
content = template_validation_header.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name=source,
year=date.today().year,
annotation=annotation,
comment=comment,
prototypes="\n".join(protos))
path = path_to("libANGLE", "validation%s_autogen.h" % annotation)
with open(path, "w") as out:
out.write(content)
out.close()
def write_capture_header(annotation, comment, protos, capture_pointer_funcs):
content = template_capture_header.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
annotation=annotation,
comment=comment,
prototypes="\n".join(["\n// Method Captures\n"] + protos + ["\n// Parameter Captures\n"] +
capture_pointer_funcs))
path = path_to("libANGLE", "capture_gles_%s_autogen.h" % annotation)
with open(path, "w") as out:
out.write(content)
out.close()
def write_capture_source(annotation_with_dash, annotation_no_dash, comment, capture_methods):
content = template_capture_source.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
annotation_with_dash=annotation_with_dash,
annotation_no_dash=annotation_no_dash,
comment=comment,
capture_methods="\n".join(capture_methods))
path = path_to("libANGLE", "capture_gles_%s_autogen.cpp" % annotation_with_dash)
with open(path, "w") as out:
out.write(content)
out.close()
def is_packed_enum_param_type(param_type):
return param_type[0:2] != "GL" and "void" not in param_type
def get_gl_pointer_type(param_type):
if "ConstPointerPointer" in param_type:
return "const " + param_type.replace("ConstPointerPointer", "") + " * const *"
if "ConstPointer" in param_type:
return "const " + param_type.replace("ConstPointer", "") + " *"
if "PointerPointer" in param_type:
return param_type.replace("PointerPointer", "") + " **"
if "Pointer" in param_type:
return param_type.replace("Pointer", "") + " *"
return param_type
def get_param_type_type(param_type):
if is_packed_enum_param_type(param_type):
param_type = "gl::" + param_type
return get_gl_pointer_type(param_type)
def get_gl_param_type_type(param_type):
if not is_packed_enum_param_type(param_type):
return get_gl_pointer_type(param_type)
else:
base_type = param_type.replace("Pointer", "").replace("Const", "")
if base_type[-2:] == "ID":
replace_type = "GLuint"
else:
replace_type = "GLenum"
param_type = param_type.replace(base_type, replace_type)
return get_gl_pointer_type(param_type)
def get_param_type_union_name(param_type):
return param_type + "Val"
def format_param_type_union_type(param_type):
return "%s %s;" % (get_param_type_type(param_type), get_param_type_union_name(param_type))
def format_get_param_val_specialization(param_type):
return template_get_param_val_specialization.format(
enum=param_type,
type=get_param_type_type(param_type),
union_name=get_param_type_union_name(param_type))
def format_access_param_value_case(param_type):
return template_access_param_value_case.format(enum=param_type)
def format_set_param_val_specialization(param_type):
return template_set_param_val_specialization.format(
enum=param_type,
type=get_param_type_type(param_type),
union_name=get_param_type_union_name(param_type))
def format_init_param_value_case(param_type):
return template_init_param_value_case.format(enum=param_type)
def format_write_param_type_to_stream_case(param_type):
return template_write_param_type_to_stream_case.format(
enum=param_type, union_name=get_param_type_union_name(param_type))
def get_resource_id_types(all_param_types):
return [t[:-2] for t in filter(lambda t: t.endswith("ID"), all_param_types)]
def format_resource_id_types(all_param_types):
resource_id_types = get_resource_id_types(all_param_types)
resource_id_types += ["EnumCount", "InvalidEnum = EnumCount"]
resource_id_types = ",\n ".join(resource_id_types)
return resource_id_types
def write_capture_helper_header(all_param_types):
param_types = "\n ".join(["T%s," % t for t in all_param_types])
param_union_values = "\n ".join([format_param_type_union_type(t) for t in all_param_types])
get_param_val_specializations = "\n\n".join(
[format_get_param_val_specialization(t) for t in all_param_types])
access_param_value_cases = "\n".join(
[format_access_param_value_case(t) for t in all_param_types])
set_param_val_specializations = "\n\n".join(
[format_set_param_val_specialization(t) for t in all_param_types])
init_param_value_cases = "\n".join([format_init_param_value_case(t) for t in all_param_types])
resource_id_types = format_resource_id_types(all_param_types)
content = template_frame_capture_utils_header.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
param_types=param_types,
param_type_count=len(all_param_types),
param_union_values=param_union_values,
get_param_val_specializations=get_param_val_specializations,
access_param_value_cases=access_param_value_cases,
set_param_val_specializations=set_param_val_specializations,
init_param_value_cases=init_param_value_cases,
resource_id_types=resource_id_types)
path = path_to("libANGLE", "frame_capture_utils_autogen.h")
with open(path, "w") as out:
out.write(content)
out.close()
def format_param_type_to_string_case(param_type):
return template_param_type_to_string_case.format(
enum=param_type, type=get_gl_param_type_type(param_type))
def get_resource_id_type_from_param_type(param_type):
if param_type.endswith("ConstPointer"):
return param_type.replace("ConstPointer", "")[:-2]
if param_type.endswith("Pointer"):
return param_type.replace("Pointer", "")[:-2]
return param_type[:-2]
def format_param_type_to_resource_id_type_case(param_type):
return template_param_type_to_resource_id_type_case.format(
enum=param_type, resource_id_type=get_resource_id_type_from_param_type(param_type))
def format_param_type_resource_id_cases(all_param_types):
id_types = filter(
lambda t: t.endswith("ID") or t.endswith("IDConstPointer") or t.endswith("IDPointer"),
all_param_types)
return "\n".join([format_param_type_to_resource_id_type_case(t) for t in id_types])
def format_resource_id_type_name_case(resource_id_type):
return template_resource_id_type_name_case.format(resource_id_type=resource_id_type)
def write_capture_helper_source(all_param_types):
write_param_type_to_stream_cases = "\n".join(
[format_write_param_type_to_stream_case(t) for t in all_param_types])
param_type_to_string_cases = "\n".join(
[format_param_type_to_string_case(t) for t in all_param_types])
param_type_resource_id_cases = format_param_type_resource_id_cases(all_param_types)
resource_id_types = get_resource_id_types(all_param_types)
resource_id_type_name_cases = "\n".join(
[format_resource_id_type_name_case(t) for t in resource_id_types])
content = template_frame_capture_utils_source.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
write_param_type_to_stream_cases=write_param_type_to_stream_cases,
param_type_to_string_cases=param_type_to_string_cases,
param_type_resource_id_cases=param_type_resource_id_cases,
resource_id_type_name_cases=resource_id_type_name_cases)
path = path_to("libANGLE", "frame_capture_utils_autogen.cpp")
with open(path, "w") as out:
out.write(content)
out.close()
def get_command_params_text(command_node, cmd_name):
param_text_list = list()
for param_node in command_node.findall('param'):
param_text_list.append("".join(param_node.itertext()))
return param_text_list
def is_get_pointer_command(command_name):
return command_name.endswith('Pointerv') and command_name.startswith('glGet')
def format_capture_replay_param_access(command_name, param_text_list, cmd_packed_gl_enums):
param_access_strs = list()
cmd_packed_enums = get_packed_enums(cmd_packed_gl_enums, command_name)
for i, param_text in enumerate(param_text_list):
param_type = just_the_type_packed(param_text, cmd_packed_enums)
param_name = just_the_name_packed(param_text, cmd_packed_enums)
pointer_count = param_type.count('*')
is_const = 'const' in param_type
if pointer_count == 0:
param_template = 'params.getParam("{name}", ParamType::T{enum_type}, {index}).value.{enum_type}Val'
elif pointer_count == 1 and is_const:
param_template = 'replayContext->getAsConstPointer<{type}>(params.getParam("{name}", ParamType::T{enum_type}, {index}))'
elif pointer_count == 2 and is_const:
param_template = 'replayContext->getAsPointerConstPointer<{type}>(params.getParam("{name}", ParamType::T{enum_type}, {index}))'
elif pointer_count == 1 or (pointer_count == 2 and is_get_pointer_command(command_name)):
param_template = 'replayContext->getReadBufferPointer<{type}>(params.getParam("{name}", ParamType::T{enum_type}, {index}))'
else:
assert False, "Not supported param type %s" % param_type
param_access_strs.append(
param_template.format(
index=i,
name=param_name,
type=param_type,
enum_type=get_capture_param_type_name(param_type)))
return ",".join(param_access_strs)
def format_capture_replay_call_case(command_to_param_types_mapping, cmd_packed_gl_enums):
call_str_list = list()
for command_name, cmd_param_texts in sorted(command_to_param_types_mapping.items()):
entry_point_name = command_name[2:] # strip the 'gl' prefix
call_str_list.append(
template_capture_replay_call_case.format(
entry_point=entry_point_name,
param_value_access=format_capture_replay_param_access(
command_name, cmd_param_texts, cmd_packed_gl_enums),
context_call=entry_point_name[0].lower() + entry_point_name[1:],
))
return '\n'.join(call_str_list)
def write_capture_replay_source(all_commands_nodes, gles_command_names, cmd_packed_gl_enums):
all_commands_names = set(gles_command_names)
command_to_param_types_mapping = dict()
for command_node in all_commands_nodes:
command_name = command_node.find('proto').find('name').text
if command_name not in all_commands_names:
continue
command_to_param_types_mapping[command_name] = get_command_params_text(
command_node, command_name)
call_replay_cases = format_capture_replay_call_case(command_to_param_types_mapping,
cmd_packed_gl_enums)
source_content = template_capture_replay_source.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
call_replay_cases=call_replay_cases,
)
source_file_path = registry_xml.script_relative(
"../src/libANGLE/frame_capture_replay_autogen.cpp")
with open(source_file_path, 'w') as f:
f.write(source_content)
def write_windows_def_file(data_source_name, lib, libexport, folder, exports):
content = template_windows_def_file.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name=data_source_name,
exports="\n".join(exports),
year=date.today().year,
lib=libexport)
path = path_to(folder, "%s_autogen.def" % lib)
with open(path, "w") as out:
out.write(content)
out.close()
def get_exports(commands, fmt=None):
if fmt:
return [" %s" % fmt(cmd) for cmd in sorted(commands)]
else:
return [" %s" % cmd for cmd in sorted(commands)]
# Get EGL exports
def get_egl_exports():
egl = registry_xml.RegistryXML('egl.xml', 'egl_angle_ext.xml')
exports = []
capser = lambda fn: "EGL_" + fn[3:]
for major, minor in [[1, 0], [1, 1], [1, 2], [1, 3], [1, 4], [1, 5]]:
annotation = "{}_{}".format(major, minor)
name_prefix = "EGL_VERSION_"
feature_name = "{}{}".format(name_prefix, annotation)
egl.AddCommands(feature_name, annotation)
commands = egl.commands[annotation]
if len(commands) == 0:
continue
exports.append("\n ; EGL %d.%d" % (major, minor))
exports += get_exports(commands, capser)
egl.AddExtensionCommands(registry_xml.supported_egl_extensions, ['egl'])
for extension_name, ext_cmd_names in sorted(egl.ext_data.iteritems()):
if len(ext_cmd_names) == 0:
continue
exports.append("\n ; %s" % extension_name)
exports += get_exports(ext_cmd_names, capser)
return exports
def main():
# auto_script parameters.
if len(sys.argv) > 1:
inputs = ['entry_point_packed_gl_enums.json'] + registry_xml.xml_inputs
outputs = [
'../src/libANGLE/Context_gl_1_0_autogen.h',
'../src/libANGLE/Context_gl_1_1_autogen.h',
'../src/libANGLE/Context_gl_1_2_autogen.h',
'../src/libANGLE/Context_gl_1_3_autogen.h',
'../src/libANGLE/Context_gl_1_4_autogen.h',
'../src/libANGLE/Context_gl_1_5_autogen.h',
'../src/libANGLE/Context_gl_2_0_autogen.h',
'../src/libANGLE/Context_gl_2_1_autogen.h',
'../src/libANGLE/Context_gl_3_0_autogen.h',
'../src/libANGLE/Context_gl_3_1_autogen.h',
'../src/libANGLE/Context_gl_3_2_autogen.h',
'../src/libANGLE/Context_gl_3_3_autogen.h',
'../src/libANGLE/Context_gl_4_0_autogen.h',
'../src/libANGLE/Context_gl_4_1_autogen.h',
'../src/libANGLE/Context_gl_4_2_autogen.h',
'../src/libANGLE/Context_gl_4_3_autogen.h',
'../src/libANGLE/Context_gl_4_4_autogen.h',
'../src/libANGLE/Context_gl_4_5_autogen.h',
'../src/libANGLE/Context_gl_4_6_autogen.h',
'../src/libANGLE/Context_gles_1_0_autogen.h',
'../src/libANGLE/Context_gles_2_0_autogen.h',
'../src/libANGLE/Context_gles_3_0_autogen.h',
'../src/libANGLE/Context_gles_3_1_autogen.h',
'../src/libANGLE/Context_gles_3_2_autogen.h',
'../src/libANGLE/Context_gles_ext_autogen.h',
'../src/libANGLE/capture_gles_1_0_autogen.cpp',
'../src/libANGLE/capture_gles_1_0_autogen.h',
'../src/libANGLE/capture_gles_2_0_autogen.cpp',
'../src/libANGLE/capture_gles_2_0_autogen.h',
'../src/libANGLE/capture_gles_3_0_autogen.cpp',
'../src/libANGLE/capture_gles_3_0_autogen.h',
'../src/libANGLE/capture_gles_3_1_autogen.cpp',
'../src/libANGLE/capture_gles_3_1_autogen.h',
'../src/libANGLE/capture_gles_3_2_autogen.cpp',
'../src/libANGLE/capture_gles_3_2_autogen.h',
'../src/libANGLE/capture_gles_ext_autogen.cpp',
'../src/libANGLE/capture_gles_ext_autogen.h',
'../src/libANGLE/frame_capture_replay_autogen.cpp',
'../src/libANGLE/frame_capture_utils_autogen.cpp',
'../src/libANGLE/frame_capture_utils_autogen.h',
'../src/libANGLE/entry_points_enum_autogen.cpp',
'../src/libANGLE/entry_points_enum_autogen.h',
'../src/libANGLE/validationES1_autogen.h',
'../src/libANGLE/validationES2_autogen.h',
'../src/libANGLE/validationES31_autogen.h',
'../src/libANGLE/validationES32_autogen.h',
'../src/libANGLE/validationES3_autogen.h',
'../src/libANGLE/validationESEXT_autogen.h',
'../src/libANGLE/validationGL1_autogen.h',
'../src/libANGLE/validationGL2_autogen.h',
'../src/libANGLE/validationGL3_autogen.h',
'../src/libANGLE/validationGL4_autogen.h',
'../src/libANGLE/validationGL11_autogen.h',
'../src/libANGLE/validationGL12_autogen.h',
'../src/libANGLE/validationGL13_autogen.h',
'../src/libANGLE/validationGL14_autogen.h',
'../src/libANGLE/validationGL15_autogen.h',
'../src/libANGLE/validationGL21_autogen.h',
'../src/libANGLE/validationGL31_autogen.h',
'../src/libANGLE/validationGL32_autogen.h',
'../src/libANGLE/validationGL33_autogen.h',
'../src/libANGLE/validationGL41_autogen.h',
'../src/libANGLE/validationGL42_autogen.h',
'../src/libANGLE/validationGL43_autogen.h',
'../src/libANGLE/validationGL44_autogen.h',
'../src/libANGLE/validationGL45_autogen.h',
'../src/libANGLE/validationGL46_autogen.h',
'../src/libGLESv2/entry_points_gles_1_0_autogen.cpp',
'../src/libGLESv2/entry_points_gles_1_0_autogen.h',
'../src/libGLESv2/entry_points_gles_2_0_autogen.cpp',
'../src/libGLESv2/entry_points_gles_2_0_autogen.h',
'../src/libGLESv2/entry_points_gles_3_0_autogen.cpp',
'../src/libGLESv2/entry_points_gles_3_0_autogen.h',
'../src/libGLESv2/entry_points_gles_3_1_autogen.cpp',
'../src/libGLESv2/entry_points_gles_3_1_autogen.h',
'../src/libGLESv2/entry_points_gles_3_2_autogen.cpp',
'../src/libGLESv2/entry_points_gles_3_2_autogen.h',
'../src/libGLESv2/entry_points_gles_ext_autogen.cpp',
'../src/libGLESv2/entry_points_gles_ext_autogen.h',
'../src/libGLESv2/libGLESv2_autogen.cpp',
'../src/libGLESv2/libGLESv2_autogen.def',
'../src/libGLESv2/libGLESv2_no_capture_autogen.def',
'../src/libGLESv2/libGLESv2_with_capture_autogen.def',
'../src/libGL/entry_points_gl_1_0_autogen.cpp',
'../src/libGL/entry_points_gl_1_0_autogen.h',
'../src/libGL/entry_points_gl_1_1_autogen.cpp',
'../src/libGL/entry_points_gl_1_1_autogen.h',
'../src/libGL/entry_points_gl_1_2_autogen.cpp',
'../src/libGL/entry_points_gl_1_2_autogen.h',
'../src/libGL/entry_points_gl_1_3_autogen.cpp',
'../src/libGL/entry_points_gl_1_3_autogen.h',
'../src/libGL/entry_points_gl_1_4_autogen.cpp',
'../src/libGL/entry_points_gl_1_4_autogen.h',
'../src/libGL/entry_points_gl_1_5_autogen.cpp',
'../src/libGL/entry_points_gl_1_5_autogen.h',
'../src/libGL/entry_points_gl_2_0_autogen.cpp',
'../src/libGL/entry_points_gl_2_0_autogen.h',
'../src/libGL/entry_points_gl_2_1_autogen.cpp',
'../src/libGL/entry_points_gl_2_1_autogen.h',
'../src/libGL/entry_points_gl_3_0_autogen.cpp',
'../src/libGL/entry_points_gl_3_0_autogen.h',
'../src/libGL/entry_points_gl_3_1_autogen.cpp',
'../src/libGL/entry_points_gl_3_1_autogen.h',
'../src/libGL/entry_points_gl_3_2_autogen.cpp',
'../src/libGL/entry_points_gl_3_2_autogen.h',
'../src/libGL/entry_points_gl_3_3_autogen.cpp',
'../src/libGL/entry_points_gl_3_3_autogen.h',
'../src/libGL/entry_points_gl_4_0_autogen.cpp',
'../src/libGL/entry_points_gl_4_0_autogen.h',
'../src/libGL/entry_points_gl_4_1_autogen.cpp',
'../src/libGL/entry_points_gl_4_1_autogen.h',
'../src/libGL/entry_points_gl_4_2_autogen.cpp',
'../src/libGL/entry_points_gl_4_2_autogen.h',
'../src/libGL/entry_points_gl_4_3_autogen.cpp',
'../src/libGL/entry_points_gl_4_3_autogen.h',
'../src/libGL/entry_points_gl_4_4_autogen.cpp',
'../src/libGL/entry_points_gl_4_4_autogen.h',
'../src/libGL/entry_points_gl_4_5_autogen.cpp',
'../src/libGL/entry_points_gl_4_5_autogen.h',
'../src/libGL/entry_points_gl_4_6_autogen.cpp',
'../src/libGL/entry_points_gl_4_6_autogen.h',
'../src/libGL/libGL_autogen.cpp',
'../src/libGL/libGL_autogen.def',
]
if sys.argv[1] == 'inputs':
print ','.join(inputs)
elif sys.argv[1] == 'outputs':
print ','.join(outputs)
else:
print('Invalid script parameters')
return 1
return 0
with open(script_relative('entry_point_packed_gl_enums.json')) as f:
cmd_packed_gl_enums = json.loads(f.read())
glesdecls = {}
glesdecls['core'] = {}
glesdecls['exts'] = {}
for ver in [(1, 0), (2, 0), (3, 0), (3, 1), (3, 2)]:
glesdecls['core'][ver] = []
for ver in ['GLES1 Extensions', 'GLES2+ Extensions', 'ANGLE Extensions']:
glesdecls['exts'][ver] = {}
libgles_ep_defs = []
libgles_ep_exports = []
xml = registry_xml.RegistryXML('gl.xml', 'gl_angle_ext.xml')
# Stores core commands to keep track of duplicates
all_commands_no_suffix = []
all_commands_with_suffix = []
all_gles_param_types = set()
# First run through the main GLES entry points. Since ES2+ is the primary use
# case, we go through those first and then add ES1-only APIs at the end.
versions = [[2, 0], [3, 0], [3, 1], [3, 2], [1, 0]]
for major_version, minor_version in versions:
version = "{}_{}".format(major_version, minor_version)
annotation = "GLES_{}".format(version)
name_prefix = "GL_ES_VERSION_"
if major_version == 1:
name_prefix = "GL_VERSION_ES_CM_"
comment = version.replace("_", ".")
feature_name = "{}{}".format(name_prefix, version)
xml.AddCommands(feature_name, version)
gles_commands = xml.commands[version]
all_commands = xml.all_commands
all_commands_no_suffix.extend(xml.commands[version])
all_commands_with_suffix.extend(xml.commands[version])
decls, defs, libgles_defs, validation_protos, capture_protos, capture_methods, capture_pointer_funcs = get_entry_points(
all_commands, gles_commands, False, False, all_gles_param_types, cmd_packed_gl_enums)
# Write the version as a comment before the first EP.
libgles_defs.insert(0, "\n// OpenGL ES %s" % comment)
libgles_ep_exports.append("\n ; OpenGL ES %s" % comment)
libgles_ep_defs += libgles_defs
libgles_ep_exports += get_exports(gles_commands)
major_if_not_one = major_version if major_version != 1 else ""
minor_if_not_zero = minor_version if minor_version != 0 else ""
header_includes = template_header_includes.format(
major=major_if_not_one, minor=minor_if_not_zero)
# We include the platform.h header since it undefines the conflicting MemoryBarrier macro.
if major_version == 3 and minor_version == 1:
header_includes += "\n#include \"common/platform.h\"\n"
version_annotation = "%s%s" % (major_version, minor_if_not_zero)
source_includes = template_sources_includes.format(
header_version=annotation.lower(), validation_header_version="ES" + version_annotation)
write_file(annotation, "GLES " + comment, template_entry_point_header, "\n".join(decls),
"h", header_includes, "libGLESv2", "gl.xml")
write_file(annotation, "GLES " + comment, template_entry_point_source, "\n".join(defs),
"cpp", source_includes, "libGLESv2", "gl.xml")
glesdecls['core'][(major_version, minor_version)] = get_decls(
context_decl_format, all_commands, gles_commands, [], cmd_packed_gl_enums)
validation_annotation = "ES%s%s" % (major_version, minor_if_not_zero)
write_validation_header(validation_annotation, "ES %s" % comment, validation_protos,
"gl.xml and gl_angle_ext.xml")
write_capture_header(version, comment, capture_protos, capture_pointer_funcs)
write_capture_source(version, validation_annotation, comment, capture_methods)
# After we finish with the main entry points, we process the extensions.
extension_defs = []
extension_decls = []
extension_commands = []
# Accumulated validation prototypes.
ext_validation_protos = []
ext_capture_protos = []
ext_capture_methods = []
ext_capture_param_funcs = []
for gles1ext in registry_xml.gles1_extensions:
glesdecls['exts']['GLES1 Extensions'][gles1ext] = []
for glesext in registry_xml.gles_extensions:
glesdecls['exts']['GLES2+ Extensions'][glesext] = []
for angle_ext in registry_xml.angle_extensions:
glesdecls['exts']['ANGLE Extensions'][angle_ext] = []
xml.AddExtensionCommands(registry_xml.supported_extensions, ['gles2', 'gles1'])
for extension_name, ext_cmd_names in sorted(xml.ext_data.iteritems()):
extension_commands.extend(xml.ext_data[extension_name])
# Detect and filter duplicate extensions.
decls, defs, libgles_defs, validation_protos, capture_protos, capture_methods, capture_param_funcs = get_entry_points(
xml.all_commands, ext_cmd_names, False, False, all_gles_param_types,
cmd_packed_gl_enums)
# Avoid writing out entry points defined by a prior extension.
for dupe in xml.ext_dupes[extension_name]:
msg = "// {} is already defined.\n".format(dupe[2:])
defs.append(msg)
# Write the extension name as a comment before the first EP.
comment = "\n// {}".format(extension_name)
defs.insert(0, comment)
decls.insert(0, comment)
libgles_defs.insert(0, comment)
libgles_ep_exports.append("\n ; %s" % extension_name)
extension_defs += defs
extension_decls += decls
ext_validation_protos += [comment] + validation_protos
ext_capture_protos += [comment] + capture_protos
ext_capture_methods += capture_methods
ext_capture_param_funcs += capture_param_funcs
libgles_ep_defs += libgles_defs
libgles_ep_exports += get_exports(ext_cmd_names)
if (extension_name in registry_xml.gles1_extensions and
extension_name not in gles1_no_context_decl_extensions):
glesdecls['exts']['GLES1 Extensions'][extension_name] = get_decls(
context_decl_format, all_commands, ext_cmd_names, all_commands_no_suffix,
cmd_packed_gl_enums)
if extension_name in registry_xml.gles_extensions:
glesdecls['exts']['GLES2+ Extensions'][extension_name] = get_decls(
context_decl_format, all_commands, ext_cmd_names, all_commands_no_suffix,
cmd_packed_gl_enums)
if extension_name in registry_xml.angle_extensions:
glesdecls['exts']['ANGLE Extensions'][extension_name] = get_decls(
context_decl_format, all_commands, ext_cmd_names, all_commands_no_suffix,
cmd_packed_gl_enums)
for name in extension_commands:
all_commands_with_suffix.append(name)
all_commands_no_suffix.append(strip_suffix(name))
# Special handling for EGL_ANGLE_explicit_context extension
if registry_xml.support_EGL_ANGLE_explicit_context:
comment = "\n// EGL_ANGLE_explicit_context"
extension_defs.append(comment)
extension_decls.append(comment)
libgles_ep_defs.append(comment)
cmds = xml.all_cmd_names.get_all_commands()
# Get the explicit context entry points
decls, defs, libgles_defs, validation_protos, capture_protos, capture_methods, capture_param_funcs = get_entry_points(
xml.all_commands, cmds, True, False, all_gles_param_types, cmd_packed_gl_enums)
# Append the explicit context entry points
extension_decls += decls
extension_defs += defs
libgles_ep_defs += libgles_defs
libgles_ep_exports.append("\n ; EGL_ANGLE_explicit_context")
libgles_ep_exports += get_exports(cmds, lambda x: "%sContextANGLE" % x)
# Generate .inc files for extension function pointers and declarations
for major, minor in versions:
annotation = "{}_{}".format(major, minor)
major_if_not_one = major if major != 1 else ""
minor_if_not_zero = minor if minor != 0 else ""
version = "{}{}".format(major_if_not_one, minor_if_not_zero)
glext_ptrs, glext_protos = get_glext_decls(all_commands,
xml.all_cmd_names.get_commands(annotation),
version, True)
glext_ext_ptrs = []
glext_ext_protos = []
# Append extensions for 1.0 and 2.0
if (annotation == "1_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(
all_commands, xml.all_cmd_names.get_commands("glext"), version, True)
elif (annotation == "2_0"):
glext_ext_ptrs, glext_ext_protos = get_glext_decls(
all_commands, xml.all_cmd_names.get_commands("gl2ext"), version, True)
glext_ptrs += glext_ext_ptrs
glext_protos += glext_ext_protos
write_glext_explicit_context_inc(version, "\n".join(glext_ptrs),
"\n".join(glext_protos))
# Now we generate entry points for the desktop implementation
gldecls = {}
gldecls['core'] = {}
for ver in [(1, 0), (1, 1), (1, 2), (1, 3), (1, 4), (1, 5), (2, 0), (2, 1), (3, 0), (3, 1),
(3, 2), (3, 3), (4, 0), (4, 1), (4, 2), (4, 3), (4, 4), (4, 5), (4, 6)]:
gldecls['core'][ver] = []
libgl_ep_defs = []
libgl_ep_exports = []
glxml = registry_xml.RegistryXML('gl.xml')
for major_version, minor_version in [[1, 0], [1, 1], [1, 2], [1, 3], [1, 4], [1, 5], [2, 0],
[2, 1], [3, 0], [3, 1], [3, 2], [3, 3], [4, 0], [4, 1],
[4, 2], [4, 3], [4, 4], [4, 5], [4, 6]]:
version = "{}_{}".format(major_version, minor_version)
annotation = "GL_{}".format(version)
name_prefix = "GL_VERSION_"
comment = version.replace("_", ".")
feature_name = "{}{}".format(name_prefix, version)
glxml.AddCommands(feature_name, version)
all_libgl_commands = glxml.commands[version]
just_libgl_commands = [
cmd for cmd in all_libgl_commands if cmd not in all_commands_no_suffix
]
just_libgl_commands_suffix = [
cmd for cmd in all_libgl_commands if cmd not in all_commands_with_suffix
]
all_commands32 = glxml.all_commands
# Validation duplicates handled with suffix
_, _, _, validation_protos32, _, _, _ = get_entry_points(
all_commands32, just_libgl_commands_suffix, False, False, all_gles_param_types,
cmd_packed_gl_enums)
decls_gl, defs_gl, libgl_defs, _, _, _, _ = get_entry_points(
all_commands32, all_libgl_commands, False, False, all_gles_param_types,
cmd_packed_gl_enums)
# Write the version as a comment before the first EP.
libgl_defs.insert(0, "\n// GL %s" % comment)
libgl_ep_exports.append("\n ; GL %s" % comment)
libgl_ep_defs += libgl_defs
libgl_ep_exports += get_exports(all_libgl_commands)
minor_if_not_zero = minor_version if minor_version != 0 else ""
header_includes = template_header_includes_gl32
source_includes = template_sources_includes_gl32.format(annotation.lower(), major_version,
minor_if_not_zero)
# Entry point files
write_file(annotation, "GL " + comment, template_entry_point_header, "\n".join(decls_gl),
"h", header_includes, "libGL", "gl.xml")
write_file(annotation, "GL " + comment, template_entry_point_source, "\n".join(defs_gl),
"cpp", source_includes, "libGL", "gl.xml")
gldecls['core'][(major_version, minor_version)] = get_decls(
context_decl_format, all_commands32, just_libgl_commands, all_commands_no_suffix,
cmd_packed_gl_enums)
# Validation files
validation_annotation = "GL%s%s" % (major_version, minor_if_not_zero)
write_validation_header(validation_annotation, "%s" % comment, validation_protos32,
"gl.xml and wgl.xml")
# WGL
wglxml = registry_xml.RegistryXML('wgl.xml')
name_prefix = "WGL_VERSION_"
version = "1_0"
comment = version.replace("_", ".")
feature_name = "{}{}".format(name_prefix, version)
wglxml.AddCommands(feature_name, version)
wgl_commands = wglxml.commands[version]
all_commands32.extend(wglxml.all_commands)
wgl_commands = [cmd if cmd[:3] == 'wgl' else 'wgl' + cmd for cmd in wgl_commands]
wgl_param_types = set()
decls_wgl, defs_wgl, wgl_defs, validation_protos_wgl, _, _, _ = get_entry_points(
all_commands32, wgl_commands, False, True, wgl_param_types, {})
# Write the version as a comment before the first EP.
libgl_ep_exports.append("\n ; WGL %s" % comment)
# Other versions of these functions are used
wgl_commands.remove("wglUseFontBitmaps")
wgl_commands.remove("wglUseFontOutlines")
libgl_ep_exports += get_exports(wgl_commands)
header_includes = template_header_includes.format(major="", minor="")
header_includes += """
#include <GLES/glext.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <GLES3/gl32.h>
"""
source_includes = template_sources_includes.format(
header_version="gles_ext", validation_header_version="ESEXT")
source_includes += """
#include "libANGLE/capture_gles_1_0_autogen.h"
#include "libANGLE/capture_gles_2_0_autogen.h"
#include "libANGLE/capture_gles_3_0_autogen.h"
#include "libANGLE/capture_gles_3_1_autogen.h"
#include "libANGLE/capture_gles_3_2_autogen.h"
#include "libANGLE/validationES1.h"
#include "libANGLE/validationES2.h"
#include "libANGLE/validationES3.h"
#include "libANGLE/validationES31.h"
#include "libANGLE/validationES32.h"
"""
write_file("gles_ext", "GLES extension", template_entry_point_header,
"\n".join([item for item in extension_decls]), "h", header_includes, "libGLESv2",
"gl.xml and gl_angle_ext.xml")
write_file("gles_ext", "GLES extension", template_entry_point_source,
"\n".join([item for item in extension_defs]), "cpp", source_includes, "libGLESv2",
"gl.xml and gl_angle_ext.xml")
write_validation_header("ESEXT", "ES extension", ext_validation_protos,
"gl.xml and gl_angle_ext.xml")
write_capture_header("ext", "extension", ext_capture_protos, ext_capture_param_funcs)
write_capture_source("ext", "ESEXT", "extension", ext_capture_methods)
write_context_api_decls(context_header, glesdecls, "gles")
write_context_api_decls(context_header, gldecls, "gl")
# Entry point enum
cmd_names = ["Invalid"] + [cmd[2:] for cmd in xml.all_cmd_names.get_all_commands()]
gl_cmd_names = [cmd[2:] for cmd in glxml.all_cmd_names.get_all_commands()]
cmd_names.extend([cmd for cmd in gl_cmd_names if cmd not in cmd_names])
sorted_cmd_names = sorted(cmd_names)
entry_points_enum_header = template_entry_points_enum_header.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
lib="GL/GLES",
entry_points_list=",\n".join([" " + cmd for cmd in sorted_cmd_names]))
entry_points_enum_header_path = path_to("libANGLE", "entry_points_enum_autogen.h")
with open(entry_points_enum_header_path, "w") as out:
out.write(entry_points_enum_header)
out.close()
entry_points_cases = [
template_entry_points_name_case.format(enum=cmd) for cmd in sorted_cmd_names
]
entry_points_enum_source = template_entry_points_enum_source.format(
script_name=os.path.basename(sys.argv[0]),
data_source_name="gl.xml and gl_angle_ext.xml",
year=date.today().year,
lib="GL/GLES",
entry_points_name_cases="\n".join(entry_points_cases))
entry_points_enum_source_path = path_to("libANGLE", "entry_points_enum_autogen.cpp")
with open(entry_points_enum_source_path, "w") as out:
out.write(entry_points_enum_source)
out.close()
source_includes = """
#include "angle_gl.h"
#include "libGLESv2/entry_points_gles_1_0_autogen.h"
#include "libGLESv2/entry_points_gles_2_0_autogen.h"
#include "libGLESv2/entry_points_gles_3_0_autogen.h"
#include "libGLESv2/entry_points_gles_3_1_autogen.h"
#include "libGLESv2/entry_points_gles_3_2_autogen.h"
#include "libGLESv2/entry_points_gles_ext_autogen.h"
#include "common/event_tracer.h"
"""
write_export_files("\n".join([item for item in libgles_ep_defs]), source_includes,
"gl.xml and gl_angle_ext.xml", "libGLESv2", "OpenGL ES")
source_includes = """
#include "angle_gl.h"
#include "libGL/entry_points_gl_1_0_autogen.h"
#include "libGL/entry_points_gl_1_1_autogen.h"
#include "libGL/entry_points_gl_1_2_autogen.h"
#include "libGL/entry_points_gl_1_3_autogen.h"
#include "libGL/entry_points_gl_1_4_autogen.h"
#include "libGL/entry_points_gl_1_5_autogen.h"
#include "libGL/entry_points_gl_2_0_autogen.h"
#include "libGL/entry_points_gl_2_1_autogen.h"
#include "libGL/entry_points_gl_3_0_autogen.h"
#include "libGL/entry_points_gl_3_1_autogen.h"
#include "libGL/entry_points_gl_3_2_autogen.h"
#include "libGL/entry_points_gl_3_3_autogen.h"
#include "libGL/entry_points_gl_4_0_autogen.h"
#include "libGL/entry_points_gl_4_1_autogen.h"
#include "libGL/entry_points_gl_4_2_autogen.h"
#include "libGL/entry_points_gl_4_3_autogen.h"
#include "libGL/entry_points_gl_4_4_autogen.h"
#include "libGL/entry_points_gl_4_5_autogen.h"
#include "libGL/entry_points_gl_4_6_autogen.h"
#include "common/event_tracer.h"
"""
write_export_files("\n".join([item for item in libgl_ep_defs]), source_includes,
"gl.xml and wgl.xml", "libGL", "Windows GL")
libgles_ep_exports += get_egl_exports()
everything = "Khronos and ANGLE XML files"
for lib in ["libGLESv2" + suffix for suffix in ["", "_no_capture", "_with_capture"]]:
write_windows_def_file(everything, lib, lib, "libGLESv2", libgles_ep_exports)
write_windows_def_file(everything, "libGL", "openGL32", "libGL", libgl_ep_exports)
all_gles_param_types = sorted(all_gles_param_types)
write_capture_helper_header(all_gles_param_types)
write_capture_helper_source(all_gles_param_types)
write_capture_replay_source(xml.all_commands, all_commands_no_suffix, cmd_packed_gl_enums)
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "36f89f5514fff27ecb4baeded89ae64d",
"timestamp": "",
"source": "github",
"line_count": 2156,
"max_line_length": 185,
"avg_line_length": 36.833487940630796,
"alnum_prop": 0.6323272008361351,
"repo_name": "youtube/cobalt",
"id": "8fdffd74d24cf250f5bf2bf1a49ce218304d036f",
"size": "79413",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/angle/scripts/generate_entry_points.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import os.path
import socket # noqa: F401
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError
from urllib3.exceptions import InvalidHeader as _InvalidHeader
from urllib3.exceptions import (
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
)
from urllib3.exceptions import ProxyError as _ProxyError
from urllib3.exceptions import ReadTimeoutError, ResponseError
from urllib3.exceptions import SSLError as _SSLError
from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.response import HTTPResponse
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util import parse_url
from urllib3.util.retry import Retry
from .auth import _basic_auth_str
from .compat import basestring, urlparse
from .cookies import extract_cookies_to_jar
from .exceptions import (
ConnectionError,
ConnectTimeout,
InvalidHeader,
InvalidProxyURL,
InvalidSchema,
InvalidURL,
ProxyError,
ReadTimeout,
RetryError,
SSLError,
)
from .models import Response
from .structures import CaseInsensitiveDict
from .utils import (
DEFAULT_CA_BUNDLE_PATH,
extract_zipped_paths,
get_auth_from_url,
get_encoding_from_headers,
prepend_scheme_if_needed,
select_proxy,
urldefragauth,
)
try:
from urllib3.contrib.socks import SOCKSProxyManager
except ImportError:
def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.")
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
class BaseAdapter:
"""The Base Transport Adapter"""
def __init__(self):
super().__init__()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
raise NotImplementedError
def close(self):
"""Cleans up adapter specific items."""
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = [
"max_retries",
"config",
"_pool_connections",
"_pool_maxsize",
"_pool_block",
]
def __init__(
self,
pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE,
max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK,
):
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
super().__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(
self._pool_connections, self._pool_maxsize, block=self._pool_block
)
def init_poolmanager(
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(
num_pools=connections,
maxsize=maxsize,
block=block,
strict=True,
**pool_kwargs,
)
def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
:rtype: urllib3.ProxyManager
"""
if proxy in self.proxy_manager:
manager = self.proxy_manager[proxy]
elif proxy.lower().startswith("socks"):
username, password = get_auth_from_url(proxy)
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
proxy,
username=username,
password=password,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs,
)
else:
proxy_headers = self.proxy_headers(proxy)
manager = self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs,
)
return manager
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith("https") and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc):
raise OSError(
f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}"
)
conn.cert_reqs = "CERT_REQUIRED"
if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = "CERT_NONE"
conn.ca_certs = None
conn.ca_cert_dir = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file):
raise OSError(
f"Could not find the TLS certificate file, "
f"invalid path: {conn.cert_file}"
)
if conn.key_file and not os.path.exists(conn.key_file):
raise OSError(
f"Could not find the TLS key file, invalid path: {conn.key_file}"
)
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
:rtype: requests.Response
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, "status", None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode("utf-8")
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
:rtype: urllib3.ConnectionPool
"""
proxy = select_proxy(url, proxies)
if proxy:
proxy = prepend_scheme_if_needed(proxy, "http")
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL(
"Please check proxy URL. It is malformed "
"and could be missing the host."
)
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this closes the PoolManager and any active ProxyManager,
which closes any pooled connections.
"""
self.poolmanager.clear()
for proxy in self.proxy_manager.values():
proxy.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
:rtype: str
"""
proxy = select_proxy(request.url, proxies)
scheme = urlparse(request.url).scheme
is_proxied_http_request = proxy and scheme != "https"
using_socks_proxy = False
if proxy:
proxy_scheme = urlparse(proxy).scheme.lower()
using_socks_proxy = proxy_scheme.startswith("socks")
url = request.path_url
if is_proxied_http_request and not using_socks_proxy:
url = urldefragauth(request.url)
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The url of the proxy being used for this request.
:rtype: dict
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username:
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
return headers
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
)
# Send the request.
else:
if hasattr(conn, "proxy_pool"):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
skip_host = "Host" in request.headers
low_conn.putrequest(
request.method,
url,
skip_accept_encoding=True,
skip_host=skip_host,
)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode("utf-8"))
low_conn.send(b"\r\n")
low_conn.send(i)
low_conn.send(b"\r\n")
low_conn.send(b"0\r\n\r\n")
# Receive the response from the server
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False,
)
except Exception:
# If we hit any problems here, clean up the connection.
# Then, raise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, OSError) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
# This branch is for urllib3 versions earlier than v1.22
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
elif isinstance(e, _InvalidHeader):
raise InvalidHeader(e, request=request)
else:
raise
return self.build_response(request, resp)
| {
"content_hash": "fb21f603a8cfc80f560cbe16e02c2ec7",
"timestamp": "",
"source": "github",
"line_count": 584,
"max_line_length": 97,
"avg_line_length": 36.45034246575342,
"alnum_prop": 0.5967961666744962,
"repo_name": "psf/requests",
"id": "d3b2d5bb1e56f1293f79f70af4df2d429a347a21",
"size": "21287",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "requests/adapters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "796"
},
{
"name": "Python",
"bytes": "340075"
}
],
"symlink_target": ""
} |
import unittest
import jenkins
from airflow.contrib.operators.jenkins_job_trigger_operator \
import JenkinsJobTriggerOperator
from airflow.contrib.hooks.jenkins_hook import JenkinsHook
from airflow.exceptions import AirflowException
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class JenkinsOperatorTestCase(unittest.TestCase):
@unittest.skipIf(mock is None, 'mock package not present')
def test_execute(self):
jenkins_mock = mock.Mock(spec=jenkins.Jenkins, auth='secret')
jenkins_mock.get_build_info.return_value = \
{'result': 'SUCCESS',
'url': 'http://aaa.fake-url.com/congratulation/its-a-job'}
jenkins_mock.build_job_url.return_value = \
'http://www.jenkins.url/somewhere/in/the/universe'
hook_mock = mock.Mock(spec=JenkinsHook)
hook_mock.get_jenkins_server.return_value = jenkins_mock
the_parameters = {'a_param': 'blip', 'another_param': '42'}
with mock.patch.object(JenkinsJobTriggerOperator, "get_hook") as get_hook_mocked,\
mock.patch('airflow.contrib.operators'
'.jenkins_job_trigger_operator.jenkins_request_with_headers') \
as mock_make_request:
mock_make_request.side_effect = \
[{'body': '', 'headers': {'Location': 'http://what-a-strange.url/18'}},
{'body': '{"executable":{"number":"1"}}', 'headers': {}}]
get_hook_mocked.return_value = hook_mock
operator = JenkinsJobTriggerOperator(
dag=None,
jenkins_connection_id="fake_jenkins_connection",
# The hook is mocked, this connection won't be used
task_id="operator_test",
job_name="a_job_on_jenkins",
parameters=the_parameters,
sleep_time=1)
operator.execute(None)
self.assertEquals(jenkins_mock.get_build_info.call_count, 1)
jenkins_mock.get_build_info.assert_called_with(name='a_job_on_jenkins',
number='1')
@unittest.skipIf(mock is None, 'mock package not present')
def test_execute_job_polling_loop(self):
jenkins_mock = mock.Mock(spec=jenkins.Jenkins, auth='secret')
jenkins_mock.get_job_info.return_value = {'nextBuildNumber': '1'}
jenkins_mock.get_build_info.side_effect = \
[{'result': None},
{'result': 'SUCCESS',
'url': 'http://aaa.fake-url.com/congratulation/its-a-job'}]
jenkins_mock.build_job_url.return_value = \
'http://www.jenkins.url/somewhere/in/the/universe'
hook_mock = mock.Mock(spec=JenkinsHook)
hook_mock.get_jenkins_server.return_value = jenkins_mock
the_parameters = {'a_param': 'blip', 'another_param': '42'}
with mock.patch.object(JenkinsJobTriggerOperator, "get_hook") as get_hook_mocked,\
mock.patch('airflow.contrib.operators.jenkins_job_trigger_operator'
'.jenkins_request_with_headers') as mock_make_request:
mock_make_request.side_effect = \
[{'body': '', 'headers': {'Location': 'http://what-a-strange.url/18'}},
{'body': '{"executable":{"number":"1"}}', 'headers': {}}]
get_hook_mocked.return_value = hook_mock
operator = JenkinsJobTriggerOperator(
dag=None,
task_id="operator_test",
job_name="a_job_on_jenkins",
jenkins_connection_id="fake_jenkins_connection",
# The hook is mocked, this connection won't be used
parameters=the_parameters,
sleep_time=1)
operator.execute(None)
self.assertEquals(jenkins_mock.get_build_info.call_count, 2)
@unittest.skipIf(mock is None, 'mock package not present')
def test_execute_job_failure(self):
jenkins_mock = mock.Mock(spec=jenkins.Jenkins, auth='secret')
jenkins_mock.get_job_info.return_value = {'nextBuildNumber': '1'}
jenkins_mock.get_build_info.return_value = {
'result': 'FAILURE',
'url': 'http://aaa.fake-url.com/congratulation/its-a-job'}
jenkins_mock.build_job_url.return_value = \
'http://www.jenkins.url/somewhere/in/the/universe'
hook_mock = mock.Mock(spec=JenkinsHook)
hook_mock.get_jenkins_server.return_value = jenkins_mock
the_parameters = {'a_param': 'blip', 'another_param': '42'}
with mock.patch.object(JenkinsJobTriggerOperator, "get_hook") as get_hook_mocked,\
mock.patch('airflow.contrib.operators.'
'jenkins_job_trigger_operator.jenkins_request_with_headers') \
as mock_make_request:
mock_make_request.side_effect = \
[{'body': '', 'headers': {'Location': 'http://what-a-strange.url/18'}},
{'body': '{"executable":{"number":"1"}}', 'headers': {}}]
get_hook_mocked.return_value = hook_mock
operator = JenkinsJobTriggerOperator(
dag=None,
task_id="operator_test",
job_name="a_job_on_jenkins",
parameters=the_parameters,
jenkins_connection_id="fake_jenkins_connection",
# The hook is mocked, this connection won't be used
sleep_time=1)
self.assertRaises(AirflowException, operator.execute, None)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "07e1d5cd572cbd5a5cadca0413803f43",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 90,
"avg_line_length": 43.91472868217054,
"alnum_prop": 0.5862312444836717,
"repo_name": "adamhaney/airflow",
"id": "23e0bcb66fd975a075257ad6404f1495e53bc300",
"size": "6477",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/contrib/operators/test_jenkins_operator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "3602"
},
{
"name": "HTML",
"bytes": "129737"
},
{
"name": "JavaScript",
"bytes": "22091"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5635329"
},
{
"name": "Shell",
"bytes": "41790"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('eventlog', '0010_auto_20170619_1404'),
]
operations = [
migrations.AlterField(
model_name='fieldsightlog',
name='type',
field=models.IntegerField(default=0, choices=[(0, b'USER'), (1, b'FORM'), (2, b'SUBMISSION'), (3, b'Site'), (4, b'Project'), (5, b'Organization'), (6, b'Role'), (7, b'XFORM'), (8, b'SUBMISSION_STATUS')]),
),
]
| {
"content_hash": "6e8b93614db4229b402b1d4a2a572a05",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 216,
"avg_line_length": 30.555555555555557,
"alnum_prop": 0.5818181818181818,
"repo_name": "awemulya/fieldsight-kobocat",
"id": "4e46349111c50c63210fad57032528e9b0123dde",
"size": "574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "onadata/apps/eventlog/migrations/0011_auto_20170807_1337.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "70153"
},
{
"name": "Dockerfile",
"bytes": "2462"
},
{
"name": "HTML",
"bytes": "1488442"
},
{
"name": "JavaScript",
"bytes": "674757"
},
{
"name": "Makefile",
"bytes": "2286"
},
{
"name": "Python",
"bytes": "5340355"
},
{
"name": "Shell",
"bytes": "16493"
}
],
"symlink_target": ""
} |
import math, time
from twisted.protocols import basic
class CollectingLineReceiver(basic.LineReceiver):
def __init__(self):
self.lines = []
self.lineReceived = self.lines.append
def deliver(proto, chunks):
map(proto.dataReceived, chunks)
def benchmark(chunkSize, lineLength, numLines):
bytes = ('x' * lineLength + '\r\n') * numLines
chunkCount = len(bytes) / chunkSize + 1
chunks = []
for n in xrange(chunkCount):
chunks.append(bytes[n*chunkSize:(n+1)*chunkSize])
assert ''.join(chunks) == bytes, (chunks, bytes)
p = CollectingLineReceiver()
before = time.clock()
deliver(p, chunks)
after = time.clock()
assert bytes.splitlines() == p.lines, (bytes.splitlines(), p.lines)
print 'chunkSize:', chunkSize,
print 'lineLength:', lineLength,
print 'numLines:', numLines,
print 'CPU Time: ', after - before
def main():
for numLines in 100, 1000:
for lineLength in (10, 100, 1000):
for chunkSize in (1, 500, 5000):
benchmark(chunkSize, lineLength, numLines)
for numLines in 10000, 50000:
for lineLength in (1000, 2000):
for chunkSize in (51, 500, 5000):
benchmark(chunkSize, lineLength, numLines)
if __name__ == '__main__':
main()
| {
"content_hash": "0a75f8cd018d6f91941abc110c406c00",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 71,
"avg_line_length": 27.80851063829787,
"alnum_prop": 0.6212700841622035,
"repo_name": "movmov/cc",
"id": "7f552919e375878cf742da037a7950596c8852d2",
"size": "1307",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "vendor/Twisted-10.0.0/doc/core/benchmarks/linereceiver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import grpc
import kv_pb2 as kv__pb2
class KVStub(object):
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/proto.KV/Get',
request_serializer=kv__pb2.GetRequest.SerializeToString,
response_deserializer=kv__pb2.GetResponse.FromString,
)
self.Put = channel.unary_unary(
'/proto.KV/Put',
request_serializer=kv__pb2.PutRequest.SerializeToString,
response_deserializer=kv__pb2.Empty.FromString,
)
class KVServicer(object):
def Get(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Put(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_KVServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=kv__pb2.GetRequest.FromString,
response_serializer=kv__pb2.GetResponse.SerializeToString,
),
'Put': grpc.unary_unary_rpc_method_handler(
servicer.Put,
request_deserializer=kv__pb2.PutRequest.FromString,
response_serializer=kv__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'proto.KV', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| {
"content_hash": "5e4c242dac2ec9c94ef589943c7ccc95",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 68,
"avg_line_length": 30.12962962962963,
"alnum_prop": 0.6760909649661955,
"repo_name": "mattatcha/terraform-provider-convox",
"id": "cc331c85651ca2c721cca4872720bd98fd1cc1b5",
"size": "1697",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-python/kv_pb2_grpc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "40281"
},
{
"name": "HCL",
"bytes": "1520"
},
{
"name": "Makefile",
"bytes": "1338"
}
],
"symlink_target": ""
} |
"""Expression Intrinsics and math functions in TVM."""
# pylint: disable=redefined-builtin
from __future__ import absolute_import as _abs
from ._ffi.function import register_func as _register_func
from . import make as _make
from .api import convert, const
from .expr import Call as _Call
from .schedule import Buffer as _Buffer
def _pack_buffer(buf):
"""Build intrinsics that packs the buffer.
"""
assert buf.shape
shape = _make.Call("handle", "tvm_stack_make_shape", buf.shape,
_Call.Intrinsic, None, 0)
strides = _make.Call("handle", "tvm_stack_make_shape", buf.strides,
_Call.Intrinsic, None, 0) if buf.strides else 0
pack_args = [buf.data,
shape,
strides,
len(buf.shape),
const(0, dtype=buf.dtype),
buf.elem_offset]
return _make.Call("handle", "tvm_stack_make_array",
pack_args, _Call.Intrinsic, None, 0)
def call_packed(*args):
"""Build expression by call an external packed function.
The argument to packed function can be Expr or Buffer.
The argument is the corresponding POD type when Expr is presented.
When the argument is Buffer, the corresponding PackedFunc
will recieve an TVMArrayHandle whose content is valid during the callback period.
If the PackedFunc is a python callback, then the corresponding argument is NDArray.
Parameters
----------
args : list of Expr or Buffer.
Positional arguments.
Returns
-------
call : Expr
The call expression.
See Also
--------
tvm.extern : Create tensor with extern function call.
"""
call_args = [_pack_buffer(x) if isinstance(x, _Buffer) else x for x in args]
return _make.Call(
"int32", "tvm_call_packed", call_args, _Call.Intrinsic, None, 0)
def call_pure_intrin(dtype, func_name, *args):
"""Build expression by calling a pure intrinsic function.
Intrinsics can be overloaded with multiple data types via
the intrinsic translation rule.
Parameters
----------
dtype : str
The data type of the result.
func_name: str
The intrinsic function name.
args : list
Positional arguments.
Returns
-------
call : Expr
The call expression.
"""
args = convert(args)
return _make.Call(
dtype, func_name, convert(args), _Call.PureIntrinsic, None, 0)
def call_intrin(dtype, func_name, *args):
"""Build expression by calling an intrinsic function.
Intrinsics can be overloaded with multiple data types via
the intrinsic translation rule.
Parameters
----------
dtype : str
The data type of the result.
func_name: str
The intrinsic function name.
args : list
Positional arguments.
Returns
-------
call : Expr
The call expression.
"""
args = convert(args)
return _make.Call(
dtype, func_name, convert(args), _Call.Intrinsic, None, 0)
def call_pure_extern(dtype, func_name, *args):
"""Build expression by calling a pure extern function.
Parameters
----------
dtype : str
The data type of the result.
func_name: str
The extern function name.
args : list
Positional arguments.
Returns
-------
call : Expr
The call expression.
"""
return _make.Call(
dtype, func_name, convert(args), _Call.PureExtern, None, 0)
def call_extern(dtype, func_name, *args):
"""Build expression by calling a extern function.
Parameters
----------
dtype : str
The data type of the result.
func_name: str
The extern function name.
args : list
Positional arguments.
Returns
-------
call : Expr
The call expression.
"""
return _make.Call(
dtype, func_name, convert(args), _Call.Extern, None, 0)
def call_llvm_intrin(dtype, name, *args):
"""Build expression by calling an llvm intrinsic function
Parameters
----------
dtype : str
The data type of the result.
name : str
The name of the llvm intrinsic function.
args : list
Poistional arguments.
Returns
-------
call : Expr
The call expression.
"""
import tvm
llvm_id = tvm.codegen.llvm_lookup_intrinsic_id(name)
assert llvm_id != 0, "%s is not an LLVM intrinsic" % name
return call_pure_intrin(dtype, 'llvm_intrin', tvm.const(llvm_id, 'uint32'), *args)
def exp(x):
"""Take exponetial of input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "exp", x)
def tanh(x):
"""Take hyperbolic tanh of input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "tanh", x)
def sigmoid(x):
"""Quick function to get sigmoid
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "sigmoid", x)
def log(x):
"""Take log of input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "log", x)
def sqrt(x):
"""Take square root of input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "sqrt", x)
def rsqrt(x):
"""Take reciprocal of square root of input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "rsqrt", x)
def floor(x):
"""Take floor of float input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return _make.floor(x)
def ceil(x):
"""Take ceil of float input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return _make.ceil(x)
def trunc(x):
"""Get truncated value of the input.
The truncated value of the scalar x is the
nearest integer i which is closer to zero than x is.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return _make.trunc(x)
def abs(x):
"""Get absolute value of the input element-wise.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return _make.abs(x)
def round(x):
"""Round elements of the array to the nearest integer.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return _make.round(x)
def power(x, y):
"""x power y
Parameters
----------
x : Expr
Input argument.
y : Expr
The exponent
Returns
-------
z : Expr
The result.
"""
return call_pure_intrin(x.dtype, "pow", x, y)
def popcount(x):
"""Count the number of set bits in input x.
Parameters
----------
x : Expr
Input argument.
Returns
-------
y : Expr
The result.
"""
return call_pure_intrin(x.dtype, "popcount", x)
def fmod(x, y):
"""Return the remainder of x divided by y with the same sign as x.
Parameters
----------
x : Expr
Input argument.
y : Expr
Input argument.
Returns
-------
z : Expr
The result.
"""
return call_pure_intrin(x.dtype, "fmod", x, y)
def if_then_else(cond, t, f):
"""Conditional selection expression.
Parameters
----------
cond : Expr
The condition
t : Expr
The result expression if cond is true.
f : Expr
The result expression if cond is false.
Returns
-------
result : Node
The result of conditional expression.
Note
----
Unlike Select, if_then_else will not execute
the branch that does not satisfy the condition.
You can use it to guard against out of bound access.
Unlike Select, if_then_else cannot be vectorized
if some lanes in the vector have different conditions.
"""
t = convert(t)
f = convert(f)
cond = convert(cond)
if cond.dtype != "bool":
raise TypeError("The condition's data type has to be bool")
return call_pure_intrin(t.dtype, "tvm_if_then_else", cond, t, f)
# Intrinsic rule related code
def register_intrin_rule(target, intrin, f=None, override=False):
"""Register an intrinsic function generation rule.
Intrinsic generation rules are callback functions for
code generator to get device specific calls.
This function simply translates to.
:code:`register_func("tvm.intrin.rule.%s.%s" % (target, intrin), f, override)`
TVM may already pre-register intrinsic rules in the backend.
However, user can use this function to change the intrinsic translation
behavior or add new intrinsic rules during runtime.
Parameters
----------
target : str
The name of codegen target.
intrin : str
The name of the instrinsic.
f : function, optional
The function to be registered.
override: boolean optional
Whether override existing entry.
Returns
-------
fregister : function
Register function if f is not specified.
Examples
--------
The following code registers exp expansion rule for opencl.
.. code-block:: python
register_intrin_rule("opencl", "exp", my_exp_rule, override=True)
"""
return _register_func("tvm.intrin.rule.%s.%s" % (target, intrin), f, override)
def _rule_float_suffix(op):
"""Intrinsic rule: Add float suffix if it is float32.
This is an example intrinsic generation rule.
Parameters
----------
op : Expr
The call expression of original intrinsic.
Returns
-------
ret : Expr
The translated intrinsic rule.
Return same op if no translation is possible.
See Also
--------
register_intrin_rule : The registeration function for intrin rule.
"""
if op.dtype == "float32":
return call_pure_extern(op.dtype, "%sf" % op.name, *op.args)
if op.dtype == "float64":
return call_pure_extern(op.dtype, op.name, *op.args)
return op
def _rule_float_direct(op):
"""Intrinsic rule: Directly call pure extern function for floats.
This is an example intrinsic generation rule.
Parameters
----------
op : Expr
The call expression of original intrinsic.
Returns
-------
ret : Expr
The translated intrinsic rule.
Return same op if no translation is possible.
See Also
--------
register_intrin_rule : The registeration function for intrin rule.
"""
if str(op.dtype).startswith("float"):
return call_pure_extern(op.dtype, op.name, *op.args)
return None
@_register_func("tvm.default_trace_action")
def _tvm_default_trace_action(*args):
print(list(args))
def trace(args, trace_action="tvm.default_trace_action"):
"""Trace tensor data at the runtime.
The trace function allows to trace specific tensor at the
runtime. The tracing value should come as last argument.
The trace action should be specified, by default
tvm.default_trace_action is used.
Parameters
----------
args : list of Expr or Buffers.
Positional arguments.
trace_action : str.
The name of the trace action.
Returns
-------
call : Expr
The call expression.
See Also
--------
tvm.call_packed : Creates packed function.
"""
if not isinstance(args, list):
raise Exception("tvm.trace consumes the args as list type")
call_args = [_pack_buffer(x) if isinstance(x, _Buffer) else x for x in args]
call_args.insert(0, trace_action)
return _make.Call(
args[-1].dtype, "tvm_call_trace_packed", call_args, _Call.Intrinsic, None, 0)
# opencl pattern for exp
register_intrin_rule("opencl", "exp", _rule_float_direct, override=True)
# default pattern for exp
register_intrin_rule("default", "exp", _rule_float_suffix, override=True)
| {
"content_hash": "38819f1ab76c5670dd690f34c8ad1a01",
"timestamp": "",
"source": "github",
"line_count": 582,
"max_line_length": 87,
"avg_line_length": 21.774914089347078,
"alnum_prop": 0.5873116073542176,
"repo_name": "mlperf/training_results_v0.7",
"id": "df854e270e9de1ce3c104c0d1249f01935ae711d",
"size": "13458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/python/tvm/intrin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Awk",
"bytes": "14530"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "172914"
},
{
"name": "C++",
"bytes": "13037795"
},
{
"name": "CMake",
"bytes": "113458"
},
{
"name": "CSS",
"bytes": "70255"
},
{
"name": "Clojure",
"bytes": "622652"
},
{
"name": "Cuda",
"bytes": "1974745"
},
{
"name": "Dockerfile",
"bytes": "149523"
},
{
"name": "Groovy",
"bytes": "160449"
},
{
"name": "HTML",
"bytes": "171537"
},
{
"name": "Java",
"bytes": "189275"
},
{
"name": "JavaScript",
"bytes": "98224"
},
{
"name": "Julia",
"bytes": "430755"
},
{
"name": "Jupyter Notebook",
"bytes": "11091342"
},
{
"name": "Lua",
"bytes": "17720"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "215967"
},
{
"name": "Perl",
"bytes": "1551186"
},
{
"name": "PowerShell",
"bytes": "13906"
},
{
"name": "Python",
"bytes": "36943114"
},
{
"name": "R",
"bytes": "134921"
},
{
"name": "Raku",
"bytes": "7280"
},
{
"name": "Ruby",
"bytes": "4930"
},
{
"name": "SWIG",
"bytes": "140111"
},
{
"name": "Scala",
"bytes": "1304960"
},
{
"name": "Shell",
"bytes": "1312832"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "Starlark",
"bytes": "69877"
},
{
"name": "TypeScript",
"bytes": "243012"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import sys
v = sys.version_info
if v[:2] < (3,3):
error = "ERROR: Jupyter Hub requires Python version 3.3 or above."
print(error, file=sys.stderr)
sys.exit(1)
if os.name in ('nt', 'dos'):
error = "ERROR: Windows is not supported"
print(error, file=sys.stderr)
# At least we're on the python version we need, move on.
from distutils.core import setup
pjoin = os.path.join
here = os.path.abspath(os.path.dirname(__file__))
# Get the current package version.
version_ns = {}
with open(pjoin(here, 'version.py')) as f:
exec(f.read(), {}, version_ns)
setup_args = dict(
name = 'dockerspawner',
packages = ['dockerspawner'],
version = version_ns['__version__'],
description = """Dockerspawner: A custom spawner for Jupyterhub.""",
long_description = "",
author = "Jupyter Development Team",
author_email = "[email protected]",
url = "http://jupyter.org",
license = "BSD",
platforms = "Linux, Mac OS X",
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
classifiers = [
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
# setuptools requirements
if 'setuptools' in sys.modules:
setup_args['install_requires'] = install_requires = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith(('-e', '#')):
continue
install_requires.append(req)
def main():
setup(**setup_args)
if __name__ == '__main__':
main()
| {
"content_hash": "a1a9a07d056cdb4708cee76bbb1c4fe1",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 80,
"avg_line_length": 29.074626865671643,
"alnum_prop": 0.5728952772073922,
"repo_name": "anaderi/dockerspawner",
"id": "83423376567f5d631a9ae3e4955b61dfb750b5c5",
"size": "2311",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "26430"
},
{
"name": "Shell",
"bytes": "927"
}
],
"symlink_target": ""
} |
from traitsui.view import View
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.core.helpers.strtools import ps
from pychron.experiment.conditional.conditional import (
ActionConditional,
TruncationConditional,
TerminationConditional,
CancelationConditional,
)
from pychron.experiment.conditional.conditionals_edit_view import (
ConditionalsViewable,
ConditionalGroup,
)
from pychron.experiment.conditional.groups import PostRunGroup, PreRunGroup
from pychron.pychron_constants import ACTION, TERMINATION, CANCELATION, TRUNCATION
ADD_CONDITIONALS = (
(ps(ACTION), ConditionalGroup, ActionConditional),
(ps(TRUNCATION), ConditionalGroup, TruncationConditional),
(ps(CANCELATION), ConditionalGroup, CancelationConditional),
(ps(TERMINATION), ConditionalGroup, TerminationConditional),
)
class ConditionalsView(ConditionalsViewable):
title = "Active Conditionals"
def add_post_run_terminations(self, items):
self._add_pre_post("PostRunTerminations", items, PostRunGroup)
def add_pre_run_terminations(self, items):
self._add_pre_post("PreRunTerminations", items, PreRunGroup)
def _add_pre_post(self, label, items, klass):
if not items:
items = []
grp = next((gi for gi in self.groups if gi.label == label), None)
if not grp:
self._group_factory(items, klass, auto_select=False, label=label)
else:
grp.conditionals.extend(items)
def add_system_conditionals(self, ditems):
if ditems:
for name, klass, cklass in ADD_CONDITIONALS:
items = ditems.get(name, [])
self._group_factory(
items,
klass,
conditional_klass=cklass,
auto_select=False,
label=name.capitalize(),
)
def add_conditionals(self, ditems, **kw):
if ditems:
for name, klass, cklass in ADD_CONDITIONALS:
items = ditems.get(name, [])
grp = next(
(gi for gi in self.groups if gi.label == name.capitalize()), None
)
if not grp:
self._group_factory(
items, klass, auto_select=False, label=name.capitalize(), **kw
)
else:
grp.conditionals.extend(items)
def traits_view(self):
v = View(self._view_tabs(), buttons=["OK"], title=self.title, width=800)
return v
# ============= EOF =============================================
| {
"content_hash": "57c2f0aba0396dd862a8a6c9ca936cbe",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 86,
"avg_line_length": 35.29113924050633,
"alnum_prop": 0.5749641319942611,
"repo_name": "NMGRL/pychron",
"id": "0b310da5cd135674c6648aaa6e46b1e6868327f0",
"size": "3521",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/experiment/conditional/conditionals_view.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
} |
"""Controllers for the admin view."""
import logging
import jinja2
from core import jobs
from core import jobs_registry
from core.controllers import base
from core.controllers import editor
from core.domain import collection_services
from core.domain import config_domain
from core.domain import config_services
from core.domain import exp_services
from core.domain import recommendations_services
from core.domain import rights_manager
from core.domain import rte_component_registry
from core.platform import models
import feconf
import utils
current_user_services = models.Registry.import_current_user_services()
def require_super_admin(handler):
"""Decorator that checks if the current user is a super admin."""
def test_super_admin(self, **kwargs):
"""Checks if the user is logged in and is a super admin."""
if not self.user_id:
self.redirect(
current_user_services.create_login_url(self.request.uri))
return
if not current_user_services.is_current_user_super_admin():
raise self.UnauthorizedUserException(
'%s is not a super admin of this application', self.user_id)
return handler(self, **kwargs)
return test_super_admin
class AdminPage(base.BaseHandler):
"""Admin page shown in the App Engine admin console."""
@require_super_admin
def get(self):
"""Handles GET requests."""
demo_exploration_ids = feconf.DEMO_EXPLORATIONS.keys()
recent_job_data = jobs.get_data_for_recent_jobs()
unfinished_job_data = jobs.get_data_for_unfinished_jobs()
for job in unfinished_job_data:
job['can_be_canceled'] = job['is_cancelable'] and any([
klass.__name__ == job['job_type']
for klass in jobs_registry.ONE_OFF_JOB_MANAGERS])
queued_or_running_job_types = set([
job['job_type'] for job in unfinished_job_data])
one_off_job_specs = [{
'job_type': klass.__name__,
'is_queued_or_running': (
klass.__name__ in queued_or_running_job_types)
} for klass in jobs_registry.ONE_OFF_JOB_MANAGERS]
continuous_computations_data = jobs.get_continuous_computations_info(
jobs_registry.ALL_CONTINUOUS_COMPUTATION_MANAGERS)
for computation in continuous_computations_data:
if computation['last_started_msec']:
computation['human_readable_last_started'] = (
utils.get_human_readable_time_string(
computation['last_started_msec']))
if computation['last_stopped_msec']:
computation['human_readable_last_stopped'] = (
utils.get_human_readable_time_string(
computation['last_stopped_msec']))
if computation['last_finished_msec']:
computation['human_readable_last_finished'] = (
utils.get_human_readable_time_string(
computation['last_finished_msec']))
self.values.update({
'continuous_computations_data': continuous_computations_data,
'demo_collections': sorted(feconf.DEMO_COLLECTIONS.iteritems()),
'demo_explorations': sorted(feconf.DEMO_EXPLORATIONS.iteritems()),
'demo_exploration_ids': demo_exploration_ids,
'human_readable_current_time': (
utils.get_human_readable_time_string(
utils.get_current_time_in_millisecs())),
'one_off_job_specs': one_off_job_specs,
'recent_job_data': recent_job_data,
'rte_components_html': jinja2.utils.Markup(
rte_component_registry.Registry.get_html_for_all_components()),
'unfinished_job_data': unfinished_job_data,
'value_generators_js': jinja2.utils.Markup(
editor.get_value_generators_js()),
})
self.render_template('pages/admin/admin.html')
class AdminHandler(base.BaseHandler):
"""Handler for the admin page."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@require_super_admin
def get(self):
"""Handles GET requests."""
self.render_json({
'config_properties': (
config_domain.Registry.get_config_property_schemas()),
})
@require_super_admin
def post(self):
"""Handles POST requests."""
try:
if self.payload.get('action') == 'reload_exploration':
exploration_id = self.payload.get('exploration_id')
self._reload_exploration(exploration_id)
elif self.payload.get('action') == 'reload_collection':
collection_id = self.payload.get('collection_id')
self._reload_collection(collection_id)
elif self.payload.get('action') == 'clear_search_index':
exp_services.clear_search_index()
elif self.payload.get('action') == 'save_config_properties':
new_config_property_values = self.payload.get(
'new_config_property_values')
logging.info('[ADMIN] %s saved config property values: %s' %
(self.user_id, new_config_property_values))
for (name, value) in new_config_property_values.iteritems():
config_services.set_property(self.user_id, name, value)
elif self.payload.get('action') == 'revert_config_property':
config_property_id = self.payload.get('config_property_id')
logging.info('[ADMIN] %s reverted config property: %s' %
(self.user_id, config_property_id))
config_services.revert_property(
self.user_id, config_property_id)
elif self.payload.get('action') == 'start_new_job':
for klass in jobs_registry.ONE_OFF_JOB_MANAGERS:
if klass.__name__ == self.payload.get('job_type'):
klass.enqueue(klass.create_new())
break
elif self.payload.get('action') == 'cancel_job':
job_id = self.payload.get('job_id')
job_type = self.payload.get('job_type')
for klass in jobs_registry.ONE_OFF_JOB_MANAGERS:
if klass.__name__ == job_type:
klass.cancel(job_id, self.user_id)
break
elif self.payload.get('action') == 'start_computation':
computation_type = self.payload.get('computation_type')
for klass in jobs_registry.ALL_CONTINUOUS_COMPUTATION_MANAGERS:
if klass.__name__ == computation_type:
klass.start_computation()
break
elif self.payload.get('action') == 'stop_computation':
computation_type = self.payload.get('computation_type')
for klass in jobs_registry.ALL_CONTINUOUS_COMPUTATION_MANAGERS:
if klass.__name__ == computation_type:
klass.stop_computation(self.user_id)
break
elif self.payload.get('action') == 'upload_topic_similarities':
data = self.payload.get('data')
recommendations_services.update_topic_similarities(data)
self.render_json({})
except Exception as e:
self.render_json({'error': unicode(e)})
raise
def _reload_exploration(self, exploration_id):
if feconf.DEV_MODE:
logging.info(
'[ADMIN] %s reloaded exploration %s' %
(self.user_id, exploration_id))
exp_services.load_demo(unicode(exploration_id))
rights_manager.release_ownership_of_exploration(
feconf.SYSTEM_COMMITTER_ID, unicode(exploration_id))
else:
raise Exception('Cannot reload an exploration in production.')
def _reload_collection(self, collection_id):
if feconf.DEV_MODE:
logging.info(
'[ADMIN] %s reloaded collection %s' %
(self.user_id, collection_id))
collection_services.load_demo(unicode(collection_id))
rights_manager.release_ownership_of_collection(
feconf.SYSTEM_COMMITTER_ID, unicode(collection_id))
else:
raise Exception('Cannot reload a collection in production.')
class AdminJobOutput(base.BaseHandler):
"""Retrieves job output to show on the admin page."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@require_super_admin
def get(self):
"""Handles GET requests."""
job_id = self.request.get('job_id')
self.render_json({
'output': jobs.get_job_output(job_id)
})
class AdminTopicsCsvDownloadHandler(base.BaseHandler):
"""Retrieves topic similarity data for download."""
@require_super_admin
def get(self):
self.response.headers['Content-Type'] = 'text/csv'
self.response.headers['Content-Disposition'] = (
'attachment; filename=topic_similarities.csv')
self.response.write(
recommendations_services.get_topic_similarities_as_csv())
| {
"content_hash": "734ac99c40717eaf4c74cf29a9286a97",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 79,
"avg_line_length": 43.08755760368663,
"alnum_prop": 0.5922994652406417,
"repo_name": "michaelWagner/oppia",
"id": "dad98ed9454d92e2a28af7f8983c7119a70348e3",
"size": "9955",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "core/controllers/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "363"
},
{
"name": "CSS",
"bytes": "62765"
},
{
"name": "HTML",
"bytes": "358956"
},
{
"name": "JavaScript",
"bytes": "1572205"
},
{
"name": "Python",
"bytes": "1969372"
},
{
"name": "Shell",
"bytes": "31108"
}
],
"symlink_target": ""
} |
"""Generic module is the parent module of all other module"""
import os
import shutil
import io
import base64
from jinja2 import Environment, PackageLoader
# from reports import HTMLTable
from sequana.utils import config
import colorlog
logger = colorlog.getLogger(__name__)
__all__ = ["SequanaBaseModule"]
class SequanaBaseModule(object):
"""Generic Module to write HTML reports.
# to add a TOC, add this code::
<div id="tocDiv">
<ul id="tocList"> </ul>
</div>
"""
def __init__(self, template_fn="standard.html", required_dir=None):
if required_dir is None:
self.required_dir = ("css", "js", "images")
else:
self.required_dir = required_dir
self.output_dir = config.output_dir
self.path = "./"
# Initiate jinja template
env = Environment(loader=PackageLoader("sequana", "resources/templates/"))
self.template = env.get_template(template_fn)
self._init_report()
self._fotorama_js_added = False
def _init_report(self):
"""Create the report directory. All necessary directories are copied
in working directory.
"""
# Be aware of #465 issue. We need to check that the target file is
# valid, in which case there is no need to copy the files.
# Create report directory
if os.path.isdir(config.output_dir) is False:
os.mkdir(self.output_dir)
for directory in self.required_dir:
complete_directory = os.sep.join([self.output_dir, directory])
if os.path.isdir(complete_directory) is False:
os.mkdir(complete_directory)
# Copy css/js necessary files
for filename in config.css_list:
target = os.sep.join([self.output_dir, "css"])
if os.path.isfile(target) is False:
shutil.copy(filename, target)
for filename in config.js_list:
target = os.sep.join([self.output_dir, "js"])
if os.path.isfile(target) is False:
shutil.copy(filename, target)
def create_html(self, output_filename):
"""Create HTML file with Jinja2.
:param str output_filename: HTML output filename
"""
if output_filename is None:
return
report_output = self.template.render(config=config, module=self)
with open(os.sep.join([config.output_dir, output_filename]), "w") as fp:
print(report_output, file=fp)
def create_link(self, name, target, newtab=True, download=False):
"""Create an HTML hyperlink with name and target.
:param str target: the target url.
:param bool newtab: open html page in a new tab.
:param bool download: download the target.
Return as string the HTML hyperlink to the target.
"""
link = '<a href="{0}" '
if newtab:
link += 'target="_blank" '
if download:
link += 'download="{0}" '
link += ">{1}</a>"
return link.format(target, name)
def create_hide_section(self, html_id, name, content, hide=False):
"""Create an hideable section.
:param str html_id: add short id to connect all elements.
:param str name: name of the hyperlink to hide or display the content.
:param str content: hideable HTML content.
:param bool hide: set if the first state is hiding or not.
Return tuple that contains HTML hyperlink and hideable section.
"""
link = "<a href='#1' class='show_hide{0}'>{1}</a>".format(html_id, name)
content = "<div class='slidingDiv{0}'>\n{1}\n</div>".format(html_id, content)
hidden = ""
if hide:
hidden = '\n$(".slidingDiv{0}").hide();'.format(html_id)
js = """
<script type="text/javascript">
$(document).ready(function(){{{1}
$(".show_hide{0}").click(function(){{
$(".slidingDiv{0}").slideToggle();
}});
}});
</script>
""".format(
html_id, hidden
)
content = js + content
return link, content
def copy_file(self, filename, target_dir):
"""Copy a file to a target directory in report dir. Return the
relative path of your file.
:param str filename: file to copy.
:param str target_dir: directory where to copy.
Return relative path of the new file location.
"""
directory = config.output_dir + os.sep + target_dir
try:
os.makedirs(directory)
except FileExistsError:
if os.path.isdir(directory):
pass
else:
msg = "{0} exist and it is not a directory".format(directory)
logger.error(msg)
raise FileExistsError
try:
shutil.copy(filename, directory)
except FileNotFoundError:
msg = "{0} doesn't exist".format(filename)
raise FileNotFoundError(msg)
return target_dir + os.sep + os.path.basename(filename)
def add_float_right(self, content):
"""Align a content to right."""
return '<div style="float:right">{0}</div>'.format(content)
def add_code_section(self, content, language):
"""Add code in your html."""
html = '<div class="code"><pre><code class="{0}">{1}' "</code></pre></div>"
return html.format(language, content)
def include_svg_image(self, filename, alt="undefined"):
"""Include SVG image in the html."""
html = (
'<object data="{0}" type="image/svg+xml">\n'
'<img src="{0}" alt={1}></object>'
)
return html.format(filename, alt)
def png_to_embedded_png(self, png, style=None, alt="", title=""):
"""Include a PNG file as embedded file."""
import base64
with open(png, "rb") as fp:
png = base64.b64encode(fp.read()).decode()
if style:
html = '<img style="{0}" alt="{1}" title="{2}"'.format(style, alt, title)
else:
html = '<img alt="{}" title="{}"'.format(alt, title)
return '{0} src="data:image/png;base64,{1}">'.format(html, png)
def create_embedded_png(self, plot_function, input_arg, style=None, **kwargs):
"""Take as a plot function as input and create a html embedded png
image. You must set the arguments name for the output to connect
buffer.
"""
buf = io.BytesIO()
# add buffer as output of the plot function
kwargs = dict({input_arg: buf}, **kwargs)
try:
plot_function(**kwargs)
html = "<img "
if style:
html += 'style="{0}"'.format(style)
html += 'src="data:image/png;base64,{0}"/>'.format(
base64.b64encode(buf.getvalue()).decode("utf-8")
)
buf.close()
except Exception as err:
print(err)
html = "image not created"
return html
def create_combobox(self, path_list, html_id, newtab=True):
"""Create a dropdown menu with QueryJS.
:param list path_list: list of links.
return html div and js script as string.
"""
option_list = (
"<li>{0}</li>\n".format(
self.create_link(os.path.basename(path), path, newtab)
)
for path in path_list
)
html = """
<div id="jq-dropdown-{1}" class="jq-dropdown jq-dropdown-tip jq-dropdown-scroll">
<ul class="jq-dropdown-menu">
{0}
</ul>
</div>
<a href="#" data-jq-dropdown="#jq-dropdown-{1}">Subchromosome</a>
""".format(
"\n".join(option_list), html_id
)
return html
def add_fotorama(
self,
files,
width=600,
height=800,
loop=True,
thumbnails=True,
file_thumbnails=None,
captions=None,
):
if self._fotorama_js_added is False:
script = """
<!-- jQuery 1.8 or later, 33 KB -->
<!--<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>-->
<!-- Fotorama from CDNJS, 19 KB -->
<link href="https://cdnjs.cloudflare.com/ajax/libs/fotorama/4.6.4/fotorama.css" rel="stylesheet">
<script src="https://cdnjs.cloudflare.com/ajax/libs/fotorama/4.6.4/fotorama.js"></script>
"""
self._fotorama_js_added = True
else:
script = ""
if captions:
if len(files) != len(captions):
raise ValueError(
"captions and files must be of same length with 1-to-1 mapping"
)
else:
captions = [filename.split("/")[-1] for filename in files]
script += '<div class="fotorama" fzyz-keyboard="true" '
if thumbnails is True:
script += ' data-nav="thumbs"'
if loop is True:
script += ' data-loop="true"'
script += ' data-width="{}" data-height="{}"'.format(width, height)
script += ">"
for filename, caption in zip(files, captions):
script += '<img src="{}" data-caption="{}">'.format(filename, caption)
script += "</div>"
return script
| {
"content_hash": "4af8f19ca5e95054d68b613cf923fdf3",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 106,
"avg_line_length": 34.16483516483517,
"alnum_prop": 0.5588077624102069,
"repo_name": "sequana/sequana",
"id": "ed24533d8f5b1fa527a8f18a0f61f60762fae006",
"size": "9884",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sequana/modules_report/base_module.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6314"
},
{
"name": "Dockerfile",
"bytes": "1693"
},
{
"name": "HTML",
"bytes": "5379"
},
{
"name": "JavaScript",
"bytes": "686"
},
{
"name": "Jupyter Notebook",
"bytes": "1990042"
},
{
"name": "Python",
"bytes": "1509148"
},
{
"name": "R",
"bytes": "60806"
},
{
"name": "Shell",
"bytes": "2553"
},
{
"name": "Singularity",
"bytes": "4235"
}
],
"symlink_target": ""
} |
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
Test.Summary = '''
Test tls server certificate verification options
'''
# Define default ATS
ts = Test.MakeATSProcess("ts", select_ports=True, enable_tls=True)
server_foo = Test.MakeOriginServer("server_foo", ssl=True, options = {"--key": "{0}/signed-foo.key".format(Test.RunDirectory), "--cert": "{0}/signed-foo.pem".format(Test.RunDirectory)})
server_bar = Test.MakeOriginServer("server_bar", ssl=True, options = {"--key": "{0}/signed-bar.key".format(Test.RunDirectory), "--cert": "{0}/signed-bar.pem".format(Test.RunDirectory)})
server = Test.MakeOriginServer("server", ssl=True)
request_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
server_foo.addResponse("sessionlog.json", request_foo_header, response_header)
server_foo.addResponse("sessionlog.json", request_bad_foo_header, response_header)
server_bar.addResponse("sessionlog.json", request_bar_header, response_header)
server_bar.addResponse("sessionlog.json", request_bad_bar_header, response_header)
# add ssl materials like key, certificates for the server
ts.addSSLfile("ssl/signed-foo.pem")
ts.addSSLfile("ssl/signed-foo.key")
ts.addSSLfile("ssl/signed-bar.pem")
ts.addSSLfile("ssl/signed-bar.key")
ts.addSSLfile("ssl/server.pem")
ts.addSSLfile("ssl/server.key")
ts.addSSLfile("ssl/signer.pem")
ts.addSSLfile("ssl/signer.key")
ts.Disk.remap_config.AddLine(
'map https://foo.com/ https://127.0.0.1:{0}'.format(server_foo.Variables.SSL_Port))
ts.Disk.remap_config.AddLine(
'map https://bad_foo.com/ https://127.0.0.1:{0}'.format(server_foo.Variables.SSL_Port))
ts.Disk.remap_config.AddLine(
'map https://bar.com/ https://127.0.0.1:{0}'.format(server_bar.Variables.SSL_Port))
ts.Disk.remap_config.AddLine(
'map https://bad_bar.com/ https://127.0.0.1:{0}'.format(server_bar.Variables.SSL_Port))
ts.Disk.remap_config.AddLine(
'map / https://127.0.0.1:{0}'.format(server.Variables.SSL_Port))
ts.Disk.ssl_multicert_config.AddLine(
'dest_ip=* ssl_cert_name=server.pem ssl_key_name=server.key'
)
# Case 1, global config policy=permissive properties=signature
# override for foo.com policy=enforced properties=all
ts.Disk.records_config.update({
'proxy.config.ssl.server.cert.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.private_key.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.cipher_suite': 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES256-GCM-SHA384:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA:RC4-SHA:RC4-MD5:AES128-SHA:AES256-SHA:DES-CBC3-SHA!SRP:!DSS:!PSK:!aNULL:!eNULL:!SSLv2',
# set global policy
'proxy.config.ssl.client.verify.server.policy': 'ENFORCED',
'proxy.config.ssl.client.verify.server.properties': 'ALL',
'proxy.config.ssl.client.CA.cert.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.client.CA.cert.filename': 'signer.pem',
'proxy.config.exec_thread.autoconfig.scale': 1.0,
'proxy.config.url_remap.pristine_host_hdr': 1
})
ts.Disk.sni_yaml.AddLine(
'sni:')
ts.Disk.sni_yaml.AddLine(
'- fqdn: bar.com')
ts.Disk.sni_yaml.AddLine(
' verify_server_policy: PERMISSIVE')
ts.Disk.sni_yaml.AddLine(
' verify_server_properties: SIGNATURE')
ts.Disk.sni_yaml.AddLine(
'- fqdn: bad_bar.com')
ts.Disk.sni_yaml.AddLine(
' verify_server_policy: PERMISSIVE')
ts.Disk.sni_yaml.AddLine(
' verify_server_properties: SIGNATURE')
ts.Disk.sni_yaml.AddLine(
'- fqdn: random.com')
ts.Disk.sni_yaml.AddLine(
' verify_server_policy: DISABLED')
tr = Test.AddTestRun("default-enforce")
tr.Setup.Copy("ssl/signed-foo.key")
tr.Setup.Copy("ssl/signed-foo.pem")
tr.Setup.Copy("ssl/signed-bar.key")
tr.Setup.Copy("ssl/signed-bar.pem")
tr.Processes.Default.Command = "curl -k -H \"host: foo.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr.ReturnCode = 0
tr.Processes.Default.StartBefore(server_foo)
tr.Processes.Default.StartBefore(server_bar)
tr.Processes.Default.StartBefore(server)
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.StillRunningAfter = server
tr.StillRunningAfter = ts
tr.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr2 = Test.AddTestRun("override-disabled")
tr2.Processes.Default.Command = "curl -k -H \"host: random.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr2.ReturnCode = 0
tr2.StillRunningAfter = server
tr2.StillRunningAfter = ts
tr2.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr3 = Test.AddTestRun("override-permissive")
tr3.Processes.Default.Command = "curl -k -H \"host: bar.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr3.ReturnCode = 0
tr3.StillRunningAfter = server
tr3.StillRunningAfter = ts
tr3.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr4 = Test.AddTestRun("override-permissive-bad-name")
tr4.Processes.Default.Command = "curl -k -H \"host: bad_bar.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr4.ReturnCode = 0
tr4.StillRunningAfter = server
tr4.StillRunningAfter = ts
tr4.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr5 = Test.AddTestRun("default-enforce-bad-sig")
tr5.Processes.Default.Command = "curl -k -H \"host: random2.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr5.ReturnCode = 0
tr5.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have failed")
tr5.StillRunningAfter = server
tr5.StillRunningAfter = ts
tr6 = Test.AddTestRun("default-enforce-fail")
tr6.Processes.Default.Command = "curl -k -H \"host: bad_foo.com\" https://127.0.0.1:{0}".format(ts.Variables.ssl_port)
tr6.ReturnCode = 0
tr6.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have failed")
tr6.StillRunningAfter = server
tr6.StillRunningAfter = ts
# No name checking for the sig-only permissive override for bad_bar
ts.Disk.diags_log.Content += Testers.ExcludesExpression("WARNING: SNI \(bad_bar.com\) not in certificate", "bad_bar name checked should be skipped.")
ts.Disk.diags_log.Content = Testers.ExcludesExpression("WARNING: SNI \(foo.com\) not in certificate", "foo name checked should be skipped.")
# No checking for the self-signed on random.com. No messages
ts.Disk.diags_log.Content += Testers.ExcludesExpression("WARNING: Core server certificate verification failed for \(random.com\)", "signature check for random.com should be skipped")
ts.Disk.diags_log.Content += Testers.ContainsExpression("WARNING: Core server certificate verification failed for \(random2.com\)", "signature check for random.com should fail'")
ts.Disk.diags_log.Content += Testers.ContainsExpression("WARNING: SNI \(bad_foo.com\) not in certificate", "bad_foo name checked should be checked.")
| {
"content_hash": "29a97eeb3927dbcf6d1dcbf1e7c6874a",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 332,
"avg_line_length": 53.71153846153846,
"alnum_prop": 0.7373194891991884,
"repo_name": "dyrock/trafficserver",
"id": "e141fd44d06017d9128974e35032d85e888517e8",
"size": "8379",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/gold_tests/tls/tls_verify2.test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1350784"
},
{
"name": "C++",
"bytes": "12710340"
},
{
"name": "CMake",
"bytes": "18461"
},
{
"name": "Dockerfile",
"bytes": "3324"
},
{
"name": "Java",
"bytes": "9881"
},
{
"name": "JavaScript",
"bytes": "1609"
},
{
"name": "Lua",
"bytes": "46851"
},
{
"name": "M4",
"bytes": "186665"
},
{
"name": "Makefile",
"bytes": "196543"
},
{
"name": "Objective-C",
"bytes": "15182"
},
{
"name": "Perl",
"bytes": "119190"
},
{
"name": "Python",
"bytes": "711895"
},
{
"name": "Shell",
"bytes": "124230"
},
{
"name": "TSQL",
"bytes": "5188"
},
{
"name": "Vim script",
"bytes": "192"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from six import binary_type, text_type
def to_unicode(s):
if isinstance(s, binary_type):
return s.decode('ascii')
return s
def to_bytes(s, charset='ascii'):
if isinstance(s, text_type):
return s.encode(charset)
return s
| {
"content_hash": "4ecc61add009e56a02195924b2632bd5",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 39,
"avg_line_length": 19.8,
"alnum_prop": 0.6531986531986532,
"repo_name": "bechtoldt/imapclient",
"id": "923961cd5172def706d51e7dd8d98990c43a9996",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imapclient/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "232540"
},
{
"name": "Shell",
"bytes": "605"
}
],
"symlink_target": ""
} |
'''
Created on Jul 2, 2014
@author: yury
'''
from bboxcoverage import BBoxCoverage
from interfaces.adb_interface import *
from interfaces.apktool_interface import *
from interfaces.dex2jar_interface import *
from interfaces.dx_interface import *
from interfaces.emma_interface import *
from utils.android_manifest import *
def main():
bboxcoverage = BBoxCoverage()
#bboxcoverage.instrumentApkForCoverage(pathToOrigApk="/home/yury/TMP/BBoxTester/Notepad.apk", resultsDir="/home/yury/TMP/BBoxTester/results/", tmpDir="/home/yury/TMP/BBoxTester/tmp", overwriteExisting=True, removeApkTmpDirAfterInstr=False, copyApkToRes=True)
#bboxcoverage.installApkOnDevice()
#bboxcoverage.startTesting()
#time.sleep(30)
#localReport = bboxcoverage.stopTesting()
#bboxcoverage.generateReport([localReport], EMMA_REPORT.XML)
#bboxcoverage.instrumentApkForCoverage(pathToOrigApk="/home/yury/PROJECTS/BBOXTESTING2/app/com.markuspage.android.atimetracker.apk", resultsDir="/home/yury/PROJECTS/BBOXTESTING2/app/results", tmpDir="/home/yury/TMP/BBoxTester/tmp", removeApkTmpDirAfterInstr=False, copyApkToRes=True)
#bboxcoverage.installApkOnDevice()
#bboxcoverage.startTesting()
#time.sleep(30)
#localReport = bboxcoverage.stopTesting()
#bboxcoverage.generateReport([localReport], EMMA_REPORT.XML)
# bboxcoverage.instrumentApkForCoverage(pathToOrigApk="/home/yury/PROJECTS/BBOXTESTING2/app/com.markuspage.android.atimetracker_17.apk", resultsDir="/home/yury/PROJECTS/BBOXTESTING2/app/results_packed", tmpDir="/home/yury/TMP/BBoxTester/tmp", removeApkTmpDirAfterInstr=False, copyApkToRes=True)
# bboxcoverage.installApkOnDevice()
# bboxcoverage.startTesting()
# time.sleep(30)
# localReport = bboxcoverage.stopTesting()
bboxcoverage._signApk(bboxcoverage.bboxInstrumenter, "/home/yury/PROJECTS/BBOXTESTING2/app/com.markuspage.android.atimetracker_17_aligned.apk", "/home/yury/PROJECTS/BBOXTESTING2/app/com.markuspage.android.atimetracker_17_aligned_signed.apk")
bboxcoverage.initAlreadyInstrApkEnv(pathToInstrApk="/home/yury/PROJECTS/BBOXTESTING2/app/com.markuspage.android.atimetracker_17_aligned_signed.apk", resultsDir="/home/yury/PROJECTS/BBOXTESTING2/app/results_packed/com.markuspage.android.atimetracker_17/")
bboxcoverage.installApkOnDevice()
bboxcoverage.startTesting()
time.sleep(30)
localReport = bboxcoverage.stopTesting()
# bboxcoverage2 = BBoxCoverage()
# bboxcoverage2.initAlreadyInstrApkEnv(pathToInstrApk="/home/yury/TMP/BBoxTester/Notepad_instr_final.apk", resultsDir="/home/yury/TMP/BBoxTester/results/Notepad")
# bboxcoverage2.startTesting()
# time.sleep(20)
# lRep = bboxcoverage2.stopTesting()
# bboxcoverage2.generateReport([lRep], EMMA_REPORT.XML)
if __name__ == '__main__':
main() | {
"content_hash": "c99401c6dceb6603e8d95c9a090428f7",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 298,
"avg_line_length": 51.54545454545455,
"alnum_prop": 0.7721340388007054,
"repo_name": "zyrikby/BBoxTester",
"id": "7f91bcca376c2116de3f953adcbfdcabd7f11bc6",
"size": "2835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BBoxTester/bboxtester.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11776"
},
{
"name": "Java",
"bytes": "11973"
},
{
"name": "Python",
"bytes": "179016"
},
{
"name": "Shell",
"bytes": "15666"
}
],
"symlink_target": ""
} |
"""
Dummy node for bee.segments.modifier in the GUI
"""
class modifier(object):
guiparams = dict(
__beename__="modifier",
antennas=dict(
trigger=("push", "trigger"),
),
outputs=dict(
on_trigger=("push", "trigger"),
pre_trigger=("push", "trigger"),
),
parameters={"code": "pythoncode"},
)
| {
"content_hash": "993069b8fccc5afb245e67e4234316a9",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 47,
"avg_line_length": 21.27777777777778,
"alnum_prop": 0.5013054830287206,
"repo_name": "agoose77/hivesystem",
"id": "22c7d9420450e4e6282164a98e98450d33a93509",
"size": "383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hiveguilib/segments/modifier.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2491478"
},
{
"name": "Shell",
"bytes": "1164"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import os
import nox
LOCAL_DEPS = (
os.path.join('..', 'api_core'),
)
def default(session):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
that the current ``python`` (on the ``PATH``) or the version of
Python corresponding to the ``nox`` binary on the ``PATH`` can
run the tests.
"""
session.install('mock', 'pytest', 'pytest-cov')
for local_dep in LOCAL_DEPS:
session.install('-e', local_dep)
session.install('-e', '.')
# Run py.test against the unit tests.
session.run(
'py.test',
'--quiet',
'--cov=google.cloud.container',
'--cov=google.cloud.container_v1',
'--cov=tests.unit',
'--cov-append',
'--cov-config=.coveragerc',
'--cov-report=',
os.path.join('tests', 'unit'),
*session.posargs
)
@nox.session(python=['2.7', '3.5', '3.6', '3.7'])
def unit(session):
"""Default unit test session."""
# Install all test dependencies, then install local packages in-place.
default(session)
@nox.session(python=['2.7', '3.6'])
def system(session):
"""Run the system test suite."""
# Sanity check: Only run system tests if the environment variable is set.
if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):
session.skip('Credentials must be set via environment variable.')
# Use pre-release gRPC for system tests.
session.install('--pre', 'grpcio')
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install('mock', 'pytest')
for local_dep in LOCAL_DEPS:
session.install('-e', local_dep)
session.install('-e', '../test_utils/')
session.install('-e', '.')
# Run py.test against the system tests.
session.run('py.test', '--quiet', 'tests/system/')
@nox.session(python='3.6')
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install('docutils', 'pygments')
session.run('python', 'setup.py', 'check', '--restructuredtext',
'--strict')
| {
"content_hash": "cd0166bf687ef22b30f795320b25d7f8",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 77,
"avg_line_length": 30.26388888888889,
"alnum_prop": 0.6126663607159247,
"repo_name": "jonparrott/gcloud-python",
"id": "37ad9c7cbad17cd33607e8cd6a17d7709fb324aa",
"size": "2776",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "container/noxfile.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "62009"
},
{
"name": "Python",
"bytes": "3459300"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
} |
"""
Tests for L{twisted.conch.checkers}.
"""
try:
import crypt
except ImportError:
cryptSkip = 'cannot run without crypt module'
else:
cryptSkip = None
import os, base64
from twisted.python import util
from twisted.python.failure import Failure
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.cred.credentials import UsernamePassword, IUsernamePassword, \
SSHPrivateKey, ISSHPrivateKey
from twisted.cred.error import UnhandledCredentials, UnauthorizedLogin
from twisted.python.fakepwd import UserDatabase, ShadowDatabase
from twisted.test.test_process import MockOS
try:
import Crypto.Cipher.DES3
import pyasn1
except ImportError:
dependencySkip = "can't run without Crypto and PyASN1"
else:
dependencySkip = None
from twisted.conch.ssh import keys
from twisted.conch import checkers
from twisted.conch.error import NotEnoughAuthentication, ValidPublicKey
from twisted.conch.test import keydata
if getattr(os, 'geteuid', None) is None:
euidSkip = "Cannot run without effective UIDs (questionable)"
else:
euidSkip = None
class HelperTests(TestCase):
"""
Tests for helper functions L{verifyCryptedPassword}, L{_pwdGetByName} and
L{_shadowGetByName}.
"""
skip = cryptSkip or dependencySkip
def setUp(self):
self.mockos = MockOS()
def test_verifyCryptedPassword(self):
"""
L{verifyCryptedPassword} returns C{True} if the plaintext password
passed to it matches the encrypted password passed to it.
"""
password = 'secret string'
salt = 'salty'
crypted = crypt.crypt(password, salt)
self.assertTrue(
checkers.verifyCryptedPassword(crypted, password),
'%r supposed to be valid encrypted password for %r' % (
crypted, password))
def test_verifyCryptedPasswordMD5(self):
"""
L{verifyCryptedPassword} returns True if the provided cleartext password
matches the provided MD5 password hash.
"""
password = 'password'
salt = '$1$salt'
crypted = crypt.crypt(password, salt)
self.assertTrue(
checkers.verifyCryptedPassword(crypted, password),
'%r supposed to be valid encrypted password for %s' % (
crypted, password))
def test_refuteCryptedPassword(self):
"""
L{verifyCryptedPassword} returns C{False} if the plaintext password
passed to it does not match the encrypted password passed to it.
"""
password = 'string secret'
wrong = 'secret string'
crypted = crypt.crypt(password, password)
self.assertFalse(
checkers.verifyCryptedPassword(crypted, wrong),
'%r not supposed to be valid encrypted password for %s' % (
crypted, wrong))
def test_pwdGetByName(self):
"""
L{_pwdGetByName} returns a tuple of items from the UNIX /etc/passwd
database if the L{pwd} module is present.
"""
userdb = UserDatabase()
userdb.addUser(
'alice', 'secrit', 1, 2, 'first last', '/foo', '/bin/sh')
self.patch(checkers, 'pwd', userdb)
self.assertEquals(
checkers._pwdGetByName('alice'), userdb.getpwnam('alice'))
def test_pwdGetByNameWithoutPwd(self):
"""
If the C{pwd} module isn't present, L{_pwdGetByName} returns C{None}.
"""
self.patch(checkers, 'pwd', None)
self.assertIdentical(checkers._pwdGetByName('alice'), None)
def test_shadowGetByName(self):
"""
L{_shadowGetByName} returns a tuple of items from the UNIX /etc/shadow
database if the L{spwd} is present.
"""
userdb = ShadowDatabase()
userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7)
self.patch(checkers, 'spwd', userdb)
self.mockos.euid = 2345
self.mockos.egid = 1234
self.patch(checkers, 'os', self.mockos)
self.patch(util, 'os', self.mockos)
self.assertEquals(
checkers._shadowGetByName('bob'), userdb.getspnam('bob'))
self.assertEquals(self.mockos.seteuidCalls, [0, 2345])
self.assertEquals(self.mockos.setegidCalls, [0, 1234])
def test_shadowGetByNameWithoutSpwd(self):
"""
L{_shadowGetByName} uses the C{shadow} module to return a tuple of items
from the UNIX /etc/shadow database if the C{spwd} module is not present
and the C{shadow} module is.
"""
userdb = ShadowDatabase()
userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7)
self.patch(checkers, 'spwd', None)
self.patch(checkers, 'shadow', userdb)
self.patch(checkers, 'os', self.mockos)
self.patch(util, 'os', self.mockos)
self.mockos.euid = 2345
self.mockos.egid = 1234
self.assertEquals(
checkers._shadowGetByName('bob'), userdb.getspnam('bob'))
self.assertEquals(self.mockos.seteuidCalls, [0, 2345])
self.assertEquals(self.mockos.setegidCalls, [0, 1234])
def test_shadowGetByNameWithoutEither(self):
"""
L{_shadowGetByName} returns C{None} if neither C{spwd} nor C{shadow} is
present.
"""
self.patch(checkers, 'spwd', None)
self.patch(checkers, 'shadow', None)
self.patch(checkers, 'os', self.mockos)
self.assertIdentical(checkers._shadowGetByName('bob'), None)
self.assertEquals(self.mockos.seteuidCalls, [])
self.assertEquals(self.mockos.setegidCalls, [])
class SSHPublicKeyDatabaseTestCase(TestCase):
"""
Tests for L{SSHPublicKeyDatabase}.
"""
skip = euidSkip or dependencySkip
def setUp(self):
self.checker = checkers.SSHPublicKeyDatabase()
self.key1 = base64.encodestring("foobar")
self.key2 = base64.encodestring("eggspam")
self.content = "t1 %s foo\nt2 %s egg\n" % (self.key1, self.key2)
self.mockos = MockOS()
self.mockos.path = FilePath(self.mktemp())
self.mockos.path.makedirs()
self.patch(checkers, 'os', self.mockos)
self.patch(util, 'os', self.mockos)
self.sshDir = self.mockos.path.child('.ssh')
self.sshDir.makedirs()
userdb = UserDatabase()
userdb.addUser(
'user', 'password', 1, 2, 'first last',
self.mockos.path.path, '/bin/shell')
self.checker._userdb = userdb
def _testCheckKey(self, filename):
self.sshDir.child(filename).setContent(self.content)
user = UsernamePassword("user", "password")
user.blob = "foobar"
self.assertTrue(self.checker.checkKey(user))
user.blob = "eggspam"
self.assertTrue(self.checker.checkKey(user))
user.blob = "notallowed"
self.assertFalse(self.checker.checkKey(user))
def test_checkKey(self):
"""
L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
authorized_keys file and check the keys against that file.
"""
self._testCheckKey("authorized_keys")
self.assertEqual(self.mockos.seteuidCalls, [])
self.assertEqual(self.mockos.setegidCalls, [])
def test_checkKey2(self):
"""
L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
authorized_keys2 file and check the keys against that file.
"""
self._testCheckKey("authorized_keys2")
self.assertEqual(self.mockos.seteuidCalls, [])
self.assertEqual(self.mockos.setegidCalls, [])
def test_checkKeyAsRoot(self):
"""
If the key file is readable, L{SSHPublicKeyDatabase.checkKey} should
switch its uid/gid to the ones of the authenticated user.
"""
keyFile = self.sshDir.child("authorized_keys")
keyFile.setContent(self.content)
# Fake permission error by changing the mode
keyFile.chmod(0000)
self.addCleanup(keyFile.chmod, 0777)
# And restore the right mode when seteuid is called
savedSeteuid = self.mockos.seteuid
def seteuid(euid):
keyFile.chmod(0777)
return savedSeteuid(euid)
self.mockos.euid = 2345
self.mockos.egid = 1234
self.patch(self.mockos, "seteuid", seteuid)
self.patch(checkers, 'os', self.mockos)
self.patch(util, 'os', self.mockos)
user = UsernamePassword("user", "password")
user.blob = "foobar"
self.assertTrue(self.checker.checkKey(user))
self.assertEqual(self.mockos.seteuidCalls, [0, 1, 0, 2345])
self.assertEqual(self.mockos.setegidCalls, [2, 1234])
def test_requestAvatarId(self):
"""
L{SSHPublicKeyDatabase.requestAvatarId} should return the avatar id
passed in if its C{_checkKey} method returns True.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
'test', 'ssh-rsa', keydata.publicRSA_openssh, 'foo',
keys.Key.fromString(keydata.privateRSA_openssh).sign('foo'))
d = self.checker.requestAvatarId(credentials)
def _verify(avatarId):
self.assertEqual(avatarId, 'test')
return d.addCallback(_verify)
def test_requestAvatarIdWithoutSignature(self):
"""
L{SSHPublicKeyDatabase.requestAvatarId} should raise L{ValidPublicKey}
if the credentials represent a valid key without a signature. This
tells the user that the key is valid for login, but does not actually
allow that user to do so without a signature.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
'test', 'ssh-rsa', keydata.publicRSA_openssh, None, None)
d = self.checker.requestAvatarId(credentials)
return self.assertFailure(d, ValidPublicKey)
def test_requestAvatarIdInvalidKey(self):
"""
If L{SSHPublicKeyDatabase.checkKey} returns False,
C{_cbRequestAvatarId} should raise L{UnauthorizedLogin}.
"""
def _checkKey(ignored):
return False
self.patch(self.checker, 'checkKey', _checkKey)
d = self.checker.requestAvatarId(None);
return self.assertFailure(d, UnauthorizedLogin)
def test_requestAvatarIdInvalidSignature(self):
"""
Valid keys with invalid signatures should cause
L{SSHPublicKeyDatabase.requestAvatarId} to return a {UnauthorizedLogin}
failure
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
'test', 'ssh-rsa', keydata.publicRSA_openssh, 'foo',
keys.Key.fromString(keydata.privateDSA_openssh).sign('foo'))
d = self.checker.requestAvatarId(credentials)
return self.assertFailure(d, UnauthorizedLogin)
def test_requestAvatarIdNormalizeException(self):
"""
Exceptions raised while verifying the key should be normalized into an
C{UnauthorizedLogin} failure.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey('test', None, 'blob', 'sigData', 'sig')
d = self.checker.requestAvatarId(credentials)
def _verifyLoggedException(failure):
errors = self.flushLoggedErrors(keys.BadKeyError)
self.assertEqual(len(errors), 1)
return failure
d.addErrback(_verifyLoggedException)
return self.assertFailure(d, UnauthorizedLogin)
class SSHProtocolCheckerTestCase(TestCase):
"""
Tests for L{SSHProtocolChecker}.
"""
skip = dependencySkip
def test_registerChecker(self):
"""
L{SSHProcotolChecker.registerChecker} should add the given checker to
the list of registered checkers.
"""
checker = checkers.SSHProtocolChecker()
self.assertEqual(checker.credentialInterfaces, [])
checker.registerChecker(checkers.SSHPublicKeyDatabase(), )
self.assertEqual(checker.credentialInterfaces, [ISSHPrivateKey])
self.assertIsInstance(checker.checkers[ISSHPrivateKey],
checkers.SSHPublicKeyDatabase)
def test_registerCheckerWithInterface(self):
"""
If a apecific interface is passed into
L{SSHProtocolChecker.registerChecker}, that interface should be
registered instead of what the checker specifies in
credentialIntefaces.
"""
checker = checkers.SSHProtocolChecker()
self.assertEqual(checker.credentialInterfaces, [])
checker.registerChecker(checkers.SSHPublicKeyDatabase(),
IUsernamePassword)
self.assertEqual(checker.credentialInterfaces, [IUsernamePassword])
self.assertIsInstance(checker.checkers[IUsernamePassword],
checkers.SSHPublicKeyDatabase)
def test_requestAvatarId(self):
"""
L{SSHProtocolChecker.requestAvatarId} should defer to one if its
registered checkers to authenticate a user.
"""
checker = checkers.SSHProtocolChecker()
passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
passwordDatabase.addUser('test', 'test')
checker.registerChecker(passwordDatabase)
d = checker.requestAvatarId(UsernamePassword('test', 'test'))
def _callback(avatarId):
self.assertEqual(avatarId, 'test')
return d.addCallback(_callback)
def test_requestAvatarIdWithNotEnoughAuthentication(self):
"""
If the client indicates that it is never satisfied, by always returning
False from _areDone, then L{SSHProtocolChecker} should raise
L{NotEnoughAuthentication}.
"""
checker = checkers.SSHProtocolChecker()
def _areDone(avatarId):
return False
self.patch(checker, 'areDone', _areDone)
passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
passwordDatabase.addUser('test', 'test')
checker.registerChecker(passwordDatabase)
d = checker.requestAvatarId(UsernamePassword('test', 'test'))
return self.assertFailure(d, NotEnoughAuthentication)
def test_requestAvatarIdInvalidCredential(self):
"""
If the passed credentials aren't handled by any registered checker,
L{SSHProtocolChecker} should raise L{UnhandledCredentials}.
"""
checker = checkers.SSHProtocolChecker()
d = checker.requestAvatarId(UsernamePassword('test', 'test'))
return self.assertFailure(d, UnhandledCredentials)
def test_areDone(self):
"""
The default L{SSHProcotolChecker.areDone} should simply return True.
"""
self.assertEquals(checkers.SSHProtocolChecker().areDone(None), True)
class UNIXPasswordDatabaseTests(TestCase):
"""
Tests for L{UNIXPasswordDatabase}.
"""
skip = cryptSkip or dependencySkip
def assertLoggedIn(self, d, username):
"""
Assert that the L{Deferred} passed in is called back with the value
'username'. This represents a valid login for this TestCase.
NOTE: To work, this method's return value must be returned from the
test method, or otherwise hooked up to the test machinery.
@param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
@type d: L{Deferred}
@rtype: L{Deferred}
"""
result = []
d.addBoth(result.append)
self.assertEquals(len(result), 1, "login incomplete")
if isinstance(result[0], Failure):
result[0].raiseException()
self.assertEquals(result[0], username)
def test_defaultCheckers(self):
"""
L{UNIXPasswordDatabase} with no arguments has checks the C{pwd} database
and then the C{spwd} database.
"""
checker = checkers.UNIXPasswordDatabase()
def crypted(username, password):
salt = crypt.crypt(password, username)
crypted = crypt.crypt(password, '$1$' + salt)
return crypted
pwd = UserDatabase()
pwd.addUser('alice', crypted('alice', 'password'),
1, 2, 'foo', '/foo', '/bin/sh')
# x and * are convention for "look elsewhere for the password"
pwd.addUser('bob', 'x', 1, 2, 'bar', '/bar', '/bin/sh')
spwd = ShadowDatabase()
spwd.addUser('alice', 'wrong', 1, 2, 3, 4, 5, 6, 7)
spwd.addUser('bob', crypted('bob', 'password'),
8, 9, 10, 11, 12, 13, 14)
self.patch(checkers, 'pwd', pwd)
self.patch(checkers, 'spwd', spwd)
mockos = MockOS()
self.patch(checkers, 'os', mockos)
self.patch(util, 'os', mockos)
mockos.euid = 2345
mockos.egid = 1234
cred = UsernamePassword("alice", "password")
self.assertLoggedIn(checker.requestAvatarId(cred), 'alice')
self.assertEquals(mockos.seteuidCalls, [])
self.assertEquals(mockos.setegidCalls, [])
cred.username = "bob"
self.assertLoggedIn(checker.requestAvatarId(cred), 'bob')
self.assertEquals(mockos.seteuidCalls, [0, 2345])
self.assertEquals(mockos.setegidCalls, [0, 1234])
def assertUnauthorizedLogin(self, d):
"""
Asserts that the L{Deferred} passed in is erred back with an
L{UnauthorizedLogin} L{Failure}. This reprsents an invalid login for
this TestCase.
NOTE: To work, this method's return value must be returned from the
test method, or otherwise hooked up to the test machinery.
@param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
@type d: L{Deferred}
@rtype: L{None}
"""
self.assertRaises(
checkers.UnauthorizedLogin, self.assertLoggedIn, d, 'bogus value')
def test_passInCheckers(self):
"""
L{UNIXPasswordDatabase} takes a list of functions to check for UNIX
user information.
"""
password = crypt.crypt('secret', 'secret')
userdb = UserDatabase()
userdb.addUser('anybody', password, 1, 2, 'foo', '/bar', '/bin/sh')
checker = checkers.UNIXPasswordDatabase([userdb.getpwnam])
self.assertLoggedIn(
checker.requestAvatarId(UsernamePassword('anybody', 'secret')),
'anybody')
def test_verifyPassword(self):
"""
If the encrypted password provided by the getpwnam function is valid
(verified by the L{verifyCryptedPassword} function), we callback the
C{requestAvatarId} L{Deferred} with the username.
"""
def verifyCryptedPassword(crypted, pw):
return crypted == pw
def getpwnam(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword('username', 'username')
self.assertLoggedIn(checker.requestAvatarId(credential), 'username')
def test_failOnKeyError(self):
"""
If the getpwnam function raises a KeyError, the login fails with an
L{UnauthorizedLogin} exception.
"""
def getpwnam(username):
raise KeyError(username)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword('username', 'username')
self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
def test_failOnBadPassword(self):
"""
If the verifyCryptedPassword function doesn't verify the password, the
login fails with an L{UnauthorizedLogin} exception.
"""
def verifyCryptedPassword(crypted, pw):
return False
def getpwnam(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword('username', 'username')
self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
def test_loopThroughFunctions(self):
"""
UNIXPasswordDatabase.requestAvatarId loops through each getpwnam
function associated with it and returns a L{Deferred} which fires with
the result of the first one which returns a value other than None.
ones do not verify the password.
"""
def verifyCryptedPassword(crypted, pw):
return crypted == pw
def getpwnam1(username):
return [username, 'not the password']
def getpwnam2(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam1, getpwnam2])
credential = UsernamePassword('username', 'username')
self.assertLoggedIn(checker.requestAvatarId(credential), 'username')
def test_failOnSpecial(self):
"""
If the password returned by any function is C{""}, C{"x"}, or C{"*"} it
is not compared against the supplied password. Instead it is skipped.
"""
pwd = UserDatabase()
pwd.addUser('alice', '', 1, 2, '', 'foo', 'bar')
pwd.addUser('bob', 'x', 1, 2, '', 'foo', 'bar')
pwd.addUser('carol', '*', 1, 2, '', 'foo', 'bar')
self.patch(checkers, 'pwd', pwd)
checker = checkers.UNIXPasswordDatabase([checkers._pwdGetByName])
cred = UsernamePassword('alice', '')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
cred = UsernamePassword('bob', 'x')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
cred = UsernamePassword('carol', '*')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
| {
"content_hash": "ba651c928810ee4207616fd93ef7711c",
"timestamp": "",
"source": "github",
"line_count": 606,
"max_line_length": 80,
"avg_line_length": 36.83993399339934,
"alnum_prop": 0.6388801791713326,
"repo_name": "nlloyd/SubliminalCollaborator",
"id": "9c85050bc137c592915377822417069cff8a996f",
"size": "22398",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "libs/twisted/conch/test/test_checkers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "510300"
},
{
"name": "Puppet",
"bytes": "6275"
},
{
"name": "Python",
"bytes": "10991491"
},
{
"name": "Shell",
"bytes": "2433"
}
],
"symlink_target": ""
} |
print ("project Euler problem 2: find the sum of the even valued terms of the fibonacci sequence")
total=1
dunnoyet=2
m=1
n=2
while total < 4000000:
total=m+n
m= n
n= total
#print ("fibonnaci", total)
if n%2==0:
dunnoyet= dunnoyet + n
#print ("sum", dunnoyet)
continue
else: continue
print (dunnoyet)
| {
"content_hash": "9c847ef2940002e94b6649b15b3c6a33",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 98,
"avg_line_length": 17.88888888888889,
"alnum_prop": 0.6770186335403726,
"repo_name": "Greh/Project-Euler",
"id": "92f2979dfb4bdcdf07fc4748e8495fa44f93ef82",
"size": "322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "euler2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8717"
}
],
"symlink_target": ""
} |
import zlib
import datetime
import calendar
import os
import logging
try:
# py2
from cStringIO import StringIO
except ImportError:
try:
# py2 on some platform without cStringIO
from StringIO import StringIO
except ImportError:
# py3k
from io import StringIO
class classproperty(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
def split_by(iterable, chunk_size):
return (iterable[pos:pos+chunk_size] for pos in xrange(0, len(iterable), chunk_size))
def crc32(data):
return zlib.crc32(data) & 0xFFFFFFFF
def unpack_key(key):
return bytearray(key.strip().decode('base64'))
def timestamp_to_datetime(ts):
return datetime.datetime.utcfromtimestamp(float(ts))
def datetime_to_timestamp(dt):
return int(calendar.timegm(dt.utctimetuple()))
def pad_null_string(s, size):
return s + ('\x00' * (size - len(s)))
def trim_null_string(s):
return s.rstrip('\x00')
# NullHandler was added in py2.7
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
LOG_FORMAT = logging.Formatter('[%(asctime)s] %(levelname)8s - %(name)s: %(message)s')
| {
"content_hash": "56544a7d982a36831ff4b684ae2de1e5",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 89,
"avg_line_length": 23.47457627118644,
"alnum_prop": 0.6584837545126354,
"repo_name": "aheadley/python-naabal",
"id": "dd0efba8bd41db979942ed3d1cce3f03680a56be",
"size": "2558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "naabal/util/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "125096"
}
],
"symlink_target": ""
} |
from tincan.serializable_base import SerializableBase
from tincan.version import Version
from tincan.extensions import Extensions
class About(SerializableBase):
"""Stores info about this installation of `tincan`.
:param version: The versions supported. This attribute is required.
:type version: list of unicode
:param extensions: Custom user data. This attribute is optional.
:type extensions: :class:`tincan.Extensions`
"""
_props_req = [
'version',
]
_props = [
'extensions',
]
_props.extend(_props_req)
def __init__(self, *args, **kwargs):
self._version = None
self._extensions = None
super(About, self).__init__(*args, **kwargs)
@property
def version(self):
"""Version for About
:setter: Sets the version. If None is provided, defaults to
`[tincan.Version.latest]`. If a string is provided,
makes a 1-element list containing the string.
:setter type: list | tuple | str | unicode | None
:rtype: list
"""
return self._version
@version.setter
def version(self, value):
def check_version(version):
"""Checks a single version string for validity. Raises
if invalid.
:param version: the version string to check
:type version: list of str or tuple of str or basestring or unicode
:raises: ValueError
"""
if version in ['1.0.3', '1.0.2', '1.0.1', '1.0.0', '0.95', '0.9']:
return
# Construct the error message
if isinstance(value, (list, tuple)):
value_str = repr(version) + ' in ' + repr(value)
else:
value_str = repr(version)
msg = (
f"Tried to set property 'version' in a 'tincan.{self.__class__.__name__}' object "
f"with an invalid value: {value_str}\n"
f"Allowed versions are: {', '.join(map(repr, Version.supported))}"
)
raise ValueError(msg)
if value is None:
self._version = [Version.latest]
elif isinstance(value, str):
check_version(value)
self._version = [value]
elif isinstance(value, (list, tuple)):
for v in value:
check_version(v)
self._version = list(value)
else:
raise TypeError(
f"Property 'version' in a 'tincan.{self.__class__.__name__}' object must be set with a "
f"list, tuple, str, unicode or None. Tried to set it with: {repr(value)}"
)
@property
def extensions(self):
"""Extensions for About
:setter: Tries to convert to :class:`tincan.Extensions`. If None is provided,
sets to an empty :class:`tincan.Extensions` dict.
:setter type: :class:`tincan.Extensions` | dict | None
:rtype: :class:`tincan.Extensions`
"""
return self._extensions
@extensions.setter
def extensions(self, value):
if isinstance(value, Extensions):
self._extensions = value
elif value is None:
self._extensions = Extensions()
else:
try:
self._extensions = Extensions(value)
except Exception as e:
msg = (
f"Property 'extensions' in a 'tincan.{self.__class__.__name__} object must be set with a "
f"tincan.Extensions, dict, or None.\n\n"
)
msg += repr(e)
raise TypeError(msg)
@extensions.deleter
def extensions(self):
del self._extensions
| {
"content_hash": "1e58e69456eb05012d754a7578207787",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 110,
"avg_line_length": 32.94690265486726,
"alnum_prop": 0.5508998119795864,
"repo_name": "RusticiSoftware/TinCanPython",
"id": "6f3c0154b55e4f3031cf0407be2e42e40f194412",
"size": "4327",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.x",
"path": "tincan/about.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "380360"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView, ListView
from django.views.generic.dates import ArchiveIndexView
# BLOG
from .rssfeeds import LatestBlogFeed
from .models import BlogItem, Theme
from .views import (
blog_category_view,
BlogItemDetailView,
BlogItemCreateView,
BlogItemUpdateView,
ThemeCreateView,
ThemeDetailView,
ThemeUpdateView,
FrontPageView
)
blog_archive_view = ArchiveIndexView.as_view(
queryset=BlogItem.objects.filter(display=True),
paginate_by=10,
allow_empty=True,
date_field="created",
template_name='blog/index.html')
theme_list_view = ListView.as_view(
queryset=Theme.objects.filter(display=True),
paginate_by=10,
template_name='themes/index.html')
urlpatterns = patterns('',
# blog feed
(r'^feed/$', LatestBlogFeed()),
# blog categories
url(r'^blog/category/(?P<slug>\w+)/$', view=blog_category_view,
name='blog_category'),
url(r'^blog/category/(?P<slug>\w+)/page/(?P<page>\d+)/$', view=blog_category_view,
name='blog_category_paginated'),
# blog items
url(r'^blog/create/$',
view=BlogItemCreateView.as_view(), name='create_blog_item'),
url(r'^blog/(?P<pk>\d+)/$',
view=BlogItemDetailView.as_view(), name='blog_item'),
url(r'^blog/update/(?P<pk>\d+)/$',
view=BlogItemUpdateView.as_view(), name='update_blog_item'),
url(r'^blog/$', view=blog_archive_view, name='current_blog'),
url(r'^blog/page/(?P<page>\d+)/$', view=blog_archive_view,
name='current_blog_paginated'),
# themes
url(r'^theme/create/$',
view=ThemeCreateView.as_view(), name='create_theme'),
url(r'^theme/(?P<pk>\d+)/$',
view=ThemeDetailView.as_view(), name='theme'),
url(r'^theme/update/(?P<pk>\d+)/$',
view=ThemeUpdateView.as_view(), name='update_theme'),
url(r'^themes/$', view=theme_list_view, name='theme_index'),
url(r'^themes/page/(?P<page>\d+)/$', view=theme_list_view,
name='theme_index_paginated'),
# front
url(r'^$', FrontPageView.as_view()),
)
| {
"content_hash": "9b013c6a658e9003769ff92e7360e2be",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 86,
"avg_line_length": 37.9344262295082,
"alnum_prop": 0.5933448573898013,
"repo_name": "valuesandvalue/valuesandvalue",
"id": "2b9e6824965fb8bd9e2de8c6e49e169a5661dfd3",
"size": "2336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vavs_project/blog/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21480"
},
{
"name": "JavaScript",
"bytes": "80469"
},
{
"name": "Python",
"bytes": "315797"
}
],
"symlink_target": ""
} |
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
import os.path
import torndb, redis
from tornado.options import define, options
import pickle
from session import RedisRecommendationSessionHandler
from engine import LinRelRecommender, QueryBasedRecommender, Recommender
from base_handlers import BaseHandler
from data import kw2doc_matrix, KwDocData
from util import get_weights, iter_summary
from model import Document
from filters import make_threshold_filter
define("port", default=8000, help="run on the given port", type=int)
define("mysql_port", default=3306, help="db's port", type=int)
define("mysql_host", default="193.167.138.8", help="db database host")
define("mysql_user", default="hxiao", help="db database user")
define("mysql_password", default="xh24206688", help="db database password")
define("mysql_database", default="archive", help="db database name")
define("redis_port", default=6379, help="redis' port", type=int)
define("redis_host", default="ugluk", help="key-value cache host")
define("redis_db", default="scinet3", help="key-value db")
define("table", default='john', help="db table to be used")
define("refresh_pickle", default=False, help="refresh pickle or not")
define("recom_kw_num", default=5, help="recommended keyword number at each iter")
define("recom_doc_num", default=10, help="recommended document number at each iter")
define("samp_kw_num", default=5, help="sampled keyword number from documents")
define("samp_doc_num", default=5, help="extra document number apart from the recommended ones")
define("linrel_kw_mu", default=1, help="Value for \mu in the linrel algorithm for keyword")
define("linrel_kw_c", default=0.2, help="Value for c in the linrel algorithm for keyword")
define("linrel_doc_mu", default=1, help="Value for \mu in the linrel algorithm for document")
define("linrel_doc_c", default=0.2, help="Value for c in the linrel algorithm for document")
define("kw_fb_threshold", default= 0.01, help="The feedback threshold used when filtering keywords")
define("kw_fb_from_docs_threshold", default= 0.01, help="The feedback(from documents) threshold used when filtering keywords")
define("doc_fb_threshold", default= 0.01, help="The feedback threshold used when filtering documents")
define("doc_fb_from_kws_threshold", default= 0.01, help="The feedback(from keywords) threshold used when filtering documents")
ERR_INVALID_POST_DATA = 1001
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", MainHandler),
(r"/api/1.0/recommend",RecommandHandler)
]
settings = dict(
cookie_secret = "Put in your secret cookie here! (using the generator)",
template_path = os.path.join(os.path.dirname(__file__), "templates"),
static_path = os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies = False,
debug = True,
)
tornado.web.Application.__init__(self, handlers, **settings)
self.db = torndb.Connection("%s:%s" % (options.mysql_host, options.mysql_port), options.mysql_database, options.mysql_user, options.mysql_password)
self.redis = redis.StrictRedis(host=options.redis_host, port=options.redis_port, db=options.redis_db)
self.kwdoc_data = kw2doc_matrix(table=options.table, keyword_field_name = 'keywords', refresh = options.refresh_pickle)
#config LinRel recommender
Recommender.init(self.db, options.table, **self.kwdoc_data.__dict__)
class RecommandHandler(BaseHandler):
def post(self):
try:
data = tornado.escape.json_decode(self.request.body)
except:
data = {}
self.session_id = data.get('session_id', '')
query = data.get('query', '')
kw_fb = dict([(fb['id'], fb['score']) for fb in data.get('kw_fb', [])])
doc_fb = dict([(fb['id'], fb['score']) for fb in data.get('doc_fb', [])])
session = RedisRecommendationSessionHandler.get_session(self.redis, self.session_id)
if not self.session_id: #if no session id, start a new one
print 'start a session..', session.session_id
print 'Query: ', query
engine = QueryBasedRecommender()
rec_docs = engine.recommend_documents(query, options.recom_doc_num)
rec_kws = engine.recommend_keywords(rec_docs, options.recom_kw_num, options.samp_kw_num)
extra_docs = engine.associated_documents_by_keywords([kw #only those extra keywords
for kw in rec_kws
if not kw['recommended']],
options.recom_doc_num - options.samp_doc_num)
rec_docs = rec_docs + extra_docs
else:#else we are in a session
print 'continue the session..', session.session_id
if not kw_fb or not doc_fb:
self.json_fail(ERR_INVALID_POST_DATA, 'Since you are in a session, please give the feedbacks for both keywords and documents')
engine = LinRelRecommender(session)
fb_filter = make_threshold_filter(lambda o: o.fb(session), options.kw_fb_threshold)
fb_from_kws_filter = make_threshold_filter(lambda o: o.fb_from_kws(session), options.kw_fb_threshold)
fb_from_docs_filter = make_threshold_filter(lambda o: o.fb_from_docs(session), options.kw_fb_threshold)
rec_kws = engine.recommend_keywords(options.recom_kw_num,
options.linrel_kw_mu, options.linrel_kw_c,
filters = [fb_filter, fb_from_docs_filter],
feedbacks = kw_fb)
rec_docs = engine.recommend_documents(options.recom_doc_num,
options.linrel_doc_mu, options.linrel_doc_c,
filters = [fb_filter, fb_from_kws_filter],
feedbacks = doc_fb)
#add the display flag for kws
print rec_kws
for rec_kw in rec_kws: #they are displayed
rec_kw['display'] = True
#fill in the weights for both kws and docs
self._fill_doc_weight(rec_docs)
self._fill_kw_weight(rec_kws, rec_docs)
#get associated keywords
extra_kws = list(set([kw
for doc in rec_docs
for kw in doc['keywords']
if kw not in rec_kws]))
for kw in extra_kws:
kw['display'] = False
kw['score'] = 0
self._fill_kw_weight(extra_kws, rec_docs)
# kws = dict([(kw, kw) for kw in self.kwdoc_data._kw_ind.keys()])
# doc_dict = dict([(doc_id, Document(self._get_doc(doc_id))) for doc_id in self.kwdoc_data._doc_ind.keys()])
# # print the summary
# iter_summary(kw_dict = kw_dict,
# doc_dict = doc_dict,
# **session.data)
print "Recommended documents:"
for doc in rec_docs:
print doc
print "Recommended keywords:"
for kw in rec_kws:
print kw,
print
print 'extra_kws:', extra_kws
print 'all keywords:\n', [kw.dict for kw in (rec_kws + extra_kws)]
print 'all documents:\n', [doc.dict for doc in rec_docs]
self.json_ok({'session_id': session.session_id,
'kws': [kw.dict for kw in (rec_kws + extra_kws)],
'docs': [doc.dict for doc in rec_docs]})
class MainHandler(BaseHandler):
def get(self):
self.render("index.html")
def main():
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
tornado.autoreload.add_reload_hook(main)
tornado.autoreload.start()
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| {
"content_hash": "f27ad7013cc99dc1933b1e3d732000d0",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 155,
"avg_line_length": 45.20652173913044,
"alnum_prop": 0.5973791776869439,
"repo_name": "xiaohan2012/rl-search",
"id": "1fb1fab3d7d1ced7d8872501386ce0be0587f89f",
"size": "8340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "272"
},
{
"name": "Java",
"bytes": "2014"
},
{
"name": "JavaScript",
"bytes": "36144"
},
{
"name": "Python",
"bytes": "175298"
},
{
"name": "Shell",
"bytes": "576"
},
{
"name": "TeX",
"bytes": "11128"
}
],
"symlink_target": ""
} |
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.5.1-pre.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from swaggyjenkins.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from swaggyjenkins.exceptions import ApiAttributeError
def lazy_import():
from swaggyjenkins.model.free_style_build import FreeStyleBuild
from swaggyjenkins.model.free_style_projectactions import FreeStyleProjectactions
from swaggyjenkins.model.free_style_projecthealth_report import FreeStyleProjecthealthReport
from swaggyjenkins.model.null_scm import NullSCM
globals()['FreeStyleBuild'] = FreeStyleBuild
globals()['FreeStyleProjectactions'] = FreeStyleProjectactions
globals()['FreeStyleProjecthealthReport'] = FreeStyleProjecthealthReport
globals()['NullSCM'] = NullSCM
class FreeStyleProject(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'_class': (str,), # noqa: E501
'name': (str,), # noqa: E501
'url': (str,), # noqa: E501
'color': (str,), # noqa: E501
'actions': ([FreeStyleProjectactions],), # noqa: E501
'description': (str,), # noqa: E501
'display_name': (str,), # noqa: E501
'display_name_or_null': (str,), # noqa: E501
'full_display_name': (str,), # noqa: E501
'full_name': (str,), # noqa: E501
'buildable': (bool,), # noqa: E501
'builds': ([FreeStyleBuild],), # noqa: E501
'first_build': (FreeStyleBuild,), # noqa: E501
'health_report': ([FreeStyleProjecthealthReport],), # noqa: E501
'in_queue': (bool,), # noqa: E501
'keep_dependencies': (bool,), # noqa: E501
'last_build': (FreeStyleBuild,), # noqa: E501
'last_completed_build': (FreeStyleBuild,), # noqa: E501
'last_failed_build': (str,), # noqa: E501
'last_stable_build': (FreeStyleBuild,), # noqa: E501
'last_successful_build': (FreeStyleBuild,), # noqa: E501
'last_unstable_build': (str,), # noqa: E501
'last_unsuccessful_build': (str,), # noqa: E501
'next_build_number': (int,), # noqa: E501
'queue_item': (str,), # noqa: E501
'concurrent_build': (bool,), # noqa: E501
'scm': (NullSCM,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'_class': '_class', # noqa: E501
'name': 'name', # noqa: E501
'url': 'url', # noqa: E501
'color': 'color', # noqa: E501
'actions': 'actions', # noqa: E501
'description': 'description', # noqa: E501
'display_name': 'displayName', # noqa: E501
'display_name_or_null': 'displayNameOrNull', # noqa: E501
'full_display_name': 'fullDisplayName', # noqa: E501
'full_name': 'fullName', # noqa: E501
'buildable': 'buildable', # noqa: E501
'builds': 'builds', # noqa: E501
'first_build': 'firstBuild', # noqa: E501
'health_report': 'healthReport', # noqa: E501
'in_queue': 'inQueue', # noqa: E501
'keep_dependencies': 'keepDependencies', # noqa: E501
'last_build': 'lastBuild', # noqa: E501
'last_completed_build': 'lastCompletedBuild', # noqa: E501
'last_failed_build': 'lastFailedBuild', # noqa: E501
'last_stable_build': 'lastStableBuild', # noqa: E501
'last_successful_build': 'lastSuccessfulBuild', # noqa: E501
'last_unstable_build': 'lastUnstableBuild', # noqa: E501
'last_unsuccessful_build': 'lastUnsuccessfulBuild', # noqa: E501
'next_build_number': 'nextBuildNumber', # noqa: E501
'queue_item': 'queueItem', # noqa: E501
'concurrent_build': 'concurrentBuild', # noqa: E501
'scm': 'scm', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""FreeStyleProject - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
_class (str): [optional] # noqa: E501
name (str): [optional] # noqa: E501
url (str): [optional] # noqa: E501
color (str): [optional] # noqa: E501
actions ([FreeStyleProjectactions]): [optional] # noqa: E501
description (str): [optional] # noqa: E501
display_name (str): [optional] # noqa: E501
display_name_or_null (str): [optional] # noqa: E501
full_display_name (str): [optional] # noqa: E501
full_name (str): [optional] # noqa: E501
buildable (bool): [optional] # noqa: E501
builds ([FreeStyleBuild]): [optional] # noqa: E501
first_build (FreeStyleBuild): [optional] # noqa: E501
health_report ([FreeStyleProjecthealthReport]): [optional] # noqa: E501
in_queue (bool): [optional] # noqa: E501
keep_dependencies (bool): [optional] # noqa: E501
last_build (FreeStyleBuild): [optional] # noqa: E501
last_completed_build (FreeStyleBuild): [optional] # noqa: E501
last_failed_build (str): [optional] # noqa: E501
last_stable_build (FreeStyleBuild): [optional] # noqa: E501
last_successful_build (FreeStyleBuild): [optional] # noqa: E501
last_unstable_build (str): [optional] # noqa: E501
last_unsuccessful_build (str): [optional] # noqa: E501
next_build_number (int): [optional] # noqa: E501
queue_item (str): [optional] # noqa: E501
concurrent_build (bool): [optional] # noqa: E501
scm (NullSCM): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""FreeStyleProject - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
_class (str): [optional] # noqa: E501
name (str): [optional] # noqa: E501
url (str): [optional] # noqa: E501
color (str): [optional] # noqa: E501
actions ([FreeStyleProjectactions]): [optional] # noqa: E501
description (str): [optional] # noqa: E501
display_name (str): [optional] # noqa: E501
display_name_or_null (str): [optional] # noqa: E501
full_display_name (str): [optional] # noqa: E501
full_name (str): [optional] # noqa: E501
buildable (bool): [optional] # noqa: E501
builds ([FreeStyleBuild]): [optional] # noqa: E501
first_build (FreeStyleBuild): [optional] # noqa: E501
health_report ([FreeStyleProjecthealthReport]): [optional] # noqa: E501
in_queue (bool): [optional] # noqa: E501
keep_dependencies (bool): [optional] # noqa: E501
last_build (FreeStyleBuild): [optional] # noqa: E501
last_completed_build (FreeStyleBuild): [optional] # noqa: E501
last_failed_build (str): [optional] # noqa: E501
last_stable_build (FreeStyleBuild): [optional] # noqa: E501
last_successful_build (FreeStyleBuild): [optional] # noqa: E501
last_unstable_build (str): [optional] # noqa: E501
last_unsuccessful_build (str): [optional] # noqa: E501
next_build_number (int): [optional] # noqa: E501
queue_item (str): [optional] # noqa: E501
concurrent_build (bool): [optional] # noqa: E501
scm (NullSCM): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| {
"content_hash": "8cc6668a1c718d81696c12ba6149d84c",
"timestamp": "",
"source": "github",
"line_count": 380,
"max_line_length": 121,
"avg_line_length": 47.69736842105263,
"alnum_prop": 0.5451586206896551,
"repo_name": "cliffano/swaggy-jenkins",
"id": "d1caac7c11645c2f2ff1bf3e8b402bcd5e790d88",
"size": "18125",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "clients/python/generated/swaggyjenkins/model/free_style_project.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "569823"
},
{
"name": "Apex",
"bytes": "741346"
},
{
"name": "Batchfile",
"bytes": "14792"
},
{
"name": "C",
"bytes": "971274"
},
{
"name": "C#",
"bytes": "5131336"
},
{
"name": "C++",
"bytes": "7799032"
},
{
"name": "CMake",
"bytes": "20609"
},
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Clojure",
"bytes": "129018"
},
{
"name": "Crystal",
"bytes": "864941"
},
{
"name": "Dart",
"bytes": "876777"
},
{
"name": "Dockerfile",
"bytes": "7385"
},
{
"name": "Eiffel",
"bytes": "424642"
},
{
"name": "Elixir",
"bytes": "139252"
},
{
"name": "Elm",
"bytes": "187067"
},
{
"name": "Emacs Lisp",
"bytes": "191"
},
{
"name": "Erlang",
"bytes": "373074"
},
{
"name": "F#",
"bytes": "556012"
},
{
"name": "Gherkin",
"bytes": "951"
},
{
"name": "Go",
"bytes": "345227"
},
{
"name": "Groovy",
"bytes": "89524"
},
{
"name": "HTML",
"bytes": "2367424"
},
{
"name": "Haskell",
"bytes": "680841"
},
{
"name": "Java",
"bytes": "12164874"
},
{
"name": "JavaScript",
"bytes": "1959006"
},
{
"name": "Kotlin",
"bytes": "1280953"
},
{
"name": "Lua",
"bytes": "322316"
},
{
"name": "Makefile",
"bytes": "11882"
},
{
"name": "Nim",
"bytes": "65818"
},
{
"name": "OCaml",
"bytes": "94665"
},
{
"name": "Objective-C",
"bytes": "464903"
},
{
"name": "PHP",
"bytes": "4383673"
},
{
"name": "Perl",
"bytes": "743304"
},
{
"name": "PowerShell",
"bytes": "678274"
},
{
"name": "Python",
"bytes": "5529523"
},
{
"name": "QMake",
"bytes": "6915"
},
{
"name": "R",
"bytes": "840841"
},
{
"name": "Raku",
"bytes": "10945"
},
{
"name": "Ruby",
"bytes": "328360"
},
{
"name": "Rust",
"bytes": "1735375"
},
{
"name": "Scala",
"bytes": "1387368"
},
{
"name": "Shell",
"bytes": "407167"
},
{
"name": "Swift",
"bytes": "342562"
},
{
"name": "TypeScript",
"bytes": "3060093"
}
],
"symlink_target": ""
} |
from .test import test_name
from .collect import collect_tests
from .run import TestDispatcher
from .report import generate_known_failures, load_known_failures
from .prepare import prepare
| {
"content_hash": "5ea7f1d2aa771a5f229e4ee67084f3bd",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 64,
"avg_line_length": 37.8,
"alnum_prop": 0.8253968253968254,
"repo_name": "Sean-Der/thrift",
"id": "584cc070fbd3eda766f1e121b3ed1b103d6666e5",
"size": "975",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/crossrunner/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "75532"
},
{
"name": "Batchfile",
"bytes": "5757"
},
{
"name": "C",
"bytes": "679286"
},
{
"name": "C#",
"bytes": "388334"
},
{
"name": "C++",
"bytes": "3844871"
},
{
"name": "CMake",
"bytes": "97888"
},
{
"name": "CSS",
"bytes": "1070"
},
{
"name": "D",
"bytes": "645069"
},
{
"name": "Dart",
"bytes": "146402"
},
{
"name": "Emacs Lisp",
"bytes": "5361"
},
{
"name": "Erlang",
"bytes": "310672"
},
{
"name": "Go",
"bytes": "450769"
},
{
"name": "HTML",
"bytes": "23089"
},
{
"name": "Haskell",
"bytes": "122881"
},
{
"name": "Haxe",
"bytes": "304443"
},
{
"name": "Java",
"bytes": "963544"
},
{
"name": "JavaScript",
"bytes": "352234"
},
{
"name": "LLVM",
"bytes": "16087"
},
{
"name": "Lua",
"bytes": "48477"
},
{
"name": "Makefile",
"bytes": "15010"
},
{
"name": "OCaml",
"bytes": "39241"
},
{
"name": "Objective-C",
"bytes": "153651"
},
{
"name": "PHP",
"bytes": "314576"
},
{
"name": "Pascal",
"bytes": "387563"
},
{
"name": "Perl",
"bytes": "119756"
},
{
"name": "Python",
"bytes": "325958"
},
{
"name": "Ruby",
"bytes": "389649"
},
{
"name": "Shell",
"bytes": "28816"
},
{
"name": "Smalltalk",
"bytes": "22944"
},
{
"name": "Swift",
"bytes": "28538"
},
{
"name": "Thrift",
"bytes": "310999"
},
{
"name": "VimL",
"bytes": "2846"
},
{
"name": "Yacc",
"bytes": "26807"
}
],
"symlink_target": ""
} |
"""For reading and writing TFRecords files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.framework import errors
from tensorflow.python.util import compat
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export(
"io.TFRecordCompressionType",
v1=["io.TFRecordCompressionType", "python_io.TFRecordCompressionType"])
@deprecation.deprecated_endpoints("python_io.TFRecordCompressionType")
class TFRecordCompressionType(object):
"""The type of compression for the record."""
NONE = 0
ZLIB = 1
GZIP = 2
@tf_export(
"io.TFRecordOptions",
v1=["io.TFRecordOptions", "python_io.TFRecordOptions"])
@deprecation.deprecated_endpoints("python_io.TFRecordOptions")
class TFRecordOptions(object):
"""Options used for manipulating TFRecord files."""
compression_type_map = {
TFRecordCompressionType.ZLIB: "ZLIB",
TFRecordCompressionType.GZIP: "GZIP",
TFRecordCompressionType.NONE: ""
}
def __init__(self,
compression_type=None,
flush_mode=None,
input_buffer_size=None,
output_buffer_size=None,
window_bits=None,
compression_level=None,
compression_method=None,
mem_level=None,
compression_strategy=None):
# pylint: disable=line-too-long
"""Creates a `TFRecordOptions` instance.
Options only effect TFRecordWriter when compression_type is not `None`.
Documentation, details, and defaults can be found in
[`zlib_compression_options.h`](https://www.tensorflow.org/code/tensorflow/core/lib/io/zlib_compression_options.h)
and in the [zlib manual](http://www.zlib.net/manual.html).
Leaving an option as `None` allows C++ to set a reasonable default.
Args:
compression_type: `TFRecordCompressionType` or `None`.
flush_mode: flush mode or `None`, Default: Z_NO_FLUSH.
input_buffer_size: int or `None`.
output_buffer_size: int or `None`.
window_bits: int or `None`.
compression_level: 0 to 9, or `None`.
compression_method: compression method or `None`.
mem_level: 1 to 9, or `None`.
compression_strategy: strategy or `None`. Default: Z_DEFAULT_STRATEGY.
Returns:
A `TFRecordOptions` object.
Raises:
ValueError: If compression_type is invalid.
"""
# pylint: enable=line-too-long
# Check compression_type is valid, but for backwards compatibility don't
# immediately convert to a string.
self.get_compression_type_string(compression_type)
self.compression_type = compression_type
self.flush_mode = flush_mode
self.input_buffer_size = input_buffer_size
self.output_buffer_size = output_buffer_size
self.window_bits = window_bits
self.compression_level = compression_level
self.compression_method = compression_method
self.mem_level = mem_level
self.compression_strategy = compression_strategy
@classmethod
def get_compression_type_string(cls, options):
"""Convert various option types to a unified string.
Args:
options: `TFRecordOption`, `TFRecordCompressionType`, or string.
Returns:
Compression type as string (e.g. `'ZLIB'`, `'GZIP'`, or `''`).
Raises:
ValueError: If compression_type is invalid.
"""
if not options:
return ""
elif isinstance(options, TFRecordOptions):
return cls.get_compression_type_string(options.compression_type)
elif isinstance(options, TFRecordCompressionType):
return cls.compression_type_map[options]
elif options in TFRecordOptions.compression_type_map:
return cls.compression_type_map[options]
elif options in TFRecordOptions.compression_type_map.values():
return options
else:
raise ValueError('Not a valid compression_type: "{}"'.format(options))
def _as_record_writer_options(self):
"""Convert to RecordWriterOptions for use with PyRecordWriter."""
options = pywrap_tensorflow.RecordWriterOptions_CreateRecordWriterOptions(
compat.as_bytes(
self.get_compression_type_string(self.compression_type)))
if self.flush_mode is not None:
options.zlib_options.flush_mode = self.flush_mode
if self.input_buffer_size is not None:
options.zlib_options.input_buffer_size = self.input_buffer_size
if self.output_buffer_size is not None:
options.zlib_options.output_buffer_size = self.output_buffer_size
if self.window_bits is not None:
options.zlib_options.window_bits = self.window_bits
if self.compression_level is not None:
options.zlib_options.compression_level = self.compression_level
if self.compression_method is not None:
options.zlib_options.compression_method = self.compression_method
if self.mem_level is not None:
options.zlib_options.mem_level = self.mem_level
if self.compression_strategy is not None:
options.zlib_options.compression_strategy = self.compression_strategy
return options
@tf_export(v1=["io.tf_record_iterator", "python_io.tf_record_iterator"])
@deprecation.deprecated(
date=None,
instructions=("Use eager execution and: \n"
"`tf.data.TFRecordDataset(path)`"))
def tf_record_iterator(path, options=None):
"""An iterator that read the records from a TFRecords file.
Args:
path: The path to the TFRecords file.
options: (optional) A TFRecordOptions object.
Yields:
Strings.
Raises:
IOError: If `path` cannot be opened for reading.
"""
compression_type = TFRecordOptions.get_compression_type_string(options)
with errors.raise_exception_on_not_ok_status() as status:
reader = pywrap_tensorflow.PyRecordReader_New(
compat.as_bytes(path), 0, compat.as_bytes(compression_type), status)
if reader is None:
raise IOError("Could not open %s." % path)
try:
while True:
try:
reader.GetNext()
except errors.OutOfRangeError:
break
yield reader.record()
finally:
reader.Close()
@tf_export(
"io.TFRecordWriter", v1=["io.TFRecordWriter", "python_io.TFRecordWriter"])
@deprecation.deprecated_endpoints("python_io.TFRecordWriter")
class TFRecordWriter(object):
"""A class to write records to a TFRecords file.
This class implements `__enter__` and `__exit__`, and can be used
in `with` blocks like a normal file.
"""
# TODO(josh11b): Support appending?
def __init__(self, path, options=None):
"""Opens file `path` and creates a `TFRecordWriter` writing to it.
Args:
path: The path to the TFRecords file.
options: (optional) String specifying compression type,
`TFRecordCompressionType`, or `TFRecordOptions` object.
Raises:
IOError: If `path` cannot be opened for writing.
ValueError: If valid compression_type can't be determined from `options`.
"""
if not isinstance(options, TFRecordOptions):
options = TFRecordOptions(compression_type=options)
with errors.raise_exception_on_not_ok_status() as status:
# pylint: disable=protected-access
self._writer = pywrap_tensorflow.PyRecordWriter_New(
compat.as_bytes(path), options._as_record_writer_options(), status)
# pylint: enable=protected-access
def __enter__(self):
"""Enter a `with` block."""
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
"""Exit a `with` block, closing the file."""
self.close()
def write(self, record):
"""Write a string record to the file.
Args:
record: str
"""
with errors.raise_exception_on_not_ok_status() as status:
self._writer.WriteRecord(record, status)
def flush(self):
"""Flush the file."""
with errors.raise_exception_on_not_ok_status() as status:
self._writer.Flush(status)
def close(self):
"""Close the file."""
with errors.raise_exception_on_not_ok_status() as status:
self._writer.Close(status)
| {
"content_hash": "69759c411d23e8ccb92a24c7a12b4f2d",
"timestamp": "",
"source": "github",
"line_count": 231,
"max_line_length": 117,
"avg_line_length": 35.16017316017316,
"alnum_prop": 0.688869736518099,
"repo_name": "asimshankar/tensorflow",
"id": "43086ab18d7774f54be2b393deccec6be180801f",
"size": "8812",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tensorflow/python/lib/io/tf_record.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4882"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "490070"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "52677142"
},
{
"name": "CMake",
"bytes": "207176"
},
{
"name": "Dockerfile",
"bytes": "39454"
},
{
"name": "Go",
"bytes": "1290930"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "890529"
},
{
"name": "Jupyter Notebook",
"bytes": "2618412"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "68402"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102518"
},
{
"name": "PHP",
"bytes": "5172"
},
{
"name": "Pascal",
"bytes": "221"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "43038983"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "497659"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
from migrate.changeset import UniqueConstraint
from migrate import ForeignKeyConstraint
from oslo_log import log as logging
from sqlalchemy import Boolean, BigInteger, Column, DateTime, Enum, Float
from sqlalchemy import dialects
from sqlalchemy import ForeignKey, Index, Integer, MetaData, String, Table
from sqlalchemy import Text
from sqlalchemy.types import NullType
from jacket.i18n import _LE
LOG = logging.getLogger(__name__)
# Note on the autoincrement flag: this is defaulted for primary key columns
# of integral type, so is no longer set explicitly in such cases.
# NOTE(dprince): This wrapper allows us to easily match the Folsom MySQL
# Schema. In Folsom we created tables as latin1 and converted them to utf8
# later. This conversion causes some of the Text columns on MySQL to get
# created as mediumtext instead of just text.
def MediumText():
return Text().with_variant(dialects.mysql.MEDIUMTEXT(), 'mysql')
def Inet():
return String(length=43).with_variant(dialects.postgresql.INET(),
'postgresql')
def InetSmall():
return String(length=39).with_variant(dialects.postgresql.INET(),
'postgresql')
def _create_shadow_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = list(meta.tables.keys())
meta.bind = migrate_engine
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
columns = []
for column in table.columns:
column_copy = None
# NOTE(boris-42): BigInteger is not supported by sqlite, so
# after copy it will have NullType, other
# types that are used in Nova are supported by
# sqlite.
if isinstance(column.type, NullType):
column_copy = Column(column.name, BigInteger(), default=0)
if table_name == 'instances' and column.name == 'locked_by':
enum = Enum('owner', 'admin',
name='shadow_instances0locked_by')
column_copy = Column(column.name, enum)
else:
column_copy = column.copy()
columns.append(column_copy)
shadow_table_name = 'shadow_' + table_name
shadow_table = Table(shadow_table_name, meta, *columns,
mysql_engine='InnoDB')
try:
shadow_table.create()
except Exception:
LOG.info(repr(shadow_table))
LOG.exception(_LE('Exception while creating table.'))
raise
def _populate_instance_types(instance_types_table):
default_inst_types = {
'm1.tiny': dict(mem=512, vcpus=1, root_gb=1, eph_gb=0, flavid=1),
'm1.small': dict(mem=2048, vcpus=1, root_gb=20, eph_gb=0, flavid=2),
'm1.medium': dict(mem=4096, vcpus=2, root_gb=40, eph_gb=0, flavid=3),
'm1.large': dict(mem=8192, vcpus=4, root_gb=80, eph_gb=0, flavid=4),
'm1.xlarge': dict(mem=16384, vcpus=8, root_gb=160, eph_gb=0, flavid=5)
}
try:
i = instance_types_table.insert()
for name, values in default_inst_types.items():
i.execute({'name': name, 'memory_mb': values["mem"],
'vcpus': values["vcpus"], 'deleted': 0,
'root_gb': values["root_gb"],
'ephemeral_gb': values["eph_gb"],
'rxtx_factor': 1,
'swap': 0,
'flavorid': values["flavid"],
'disabled': False,
'is_public': True})
except Exception:
LOG.info(repr(instance_types_table))
LOG.exception(_LE('Exception while seeding instance_types table'))
raise
# NOTE(dprince): we add these here so our schema contains dump tables
# which were added in migration 209 (in Havana). We can drop these in
# Icehouse: https://bugs.launchpad.net/nova/+bug/1266538
def _create_dump_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = ['compute_node_stats', 'compute_nodes', 'instance_actions',
'instance_actions_events', 'instance_faults', 'migrations']
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
dump_table_name = 'dump_' + table.name
columns = []
for column in table.columns:
# NOTE(dprince): The dump_ tables were originally created from an
# earlier schema version so we don't want to add the pci_stats
# column so that schema diffs are exactly the same.
if column.name == 'pci_stats':
continue
else:
columns.append(column.copy())
table_dump = Table(dump_table_name, meta, *columns,
mysql_engine='InnoDB')
table_dump.create()
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
agent_builds = Table('agent_builds', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('hypervisor', String(length=255)),
Column('os', String(length=255)),
Column('architecture', String(length=255)),
Column('version', String(length=255)),
Column('url', String(length=255)),
Column('md5hash', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_hosts = Table('aggregate_hosts', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_metadata = Table('aggregate_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregates = Table('aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
block_device_mapping = Table('block_device_mapping', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('device_name', String(length=255), nullable=True),
Column('delete_on_termination', Boolean),
Column('snapshot_id', String(length=36), nullable=True),
Column('volume_id', String(length=36), nullable=True),
Column('volume_size', Integer),
Column('no_device', Boolean),
Column('connection_info', MediumText()),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
Column('source_type', String(length=255), nullable=True),
Column('destination_type', String(length=255), nullable=True),
Column('guest_format', String(length=255), nullable=True),
Column('device_type', String(length=255), nullable=True),
Column('disk_bus', String(length=255), nullable=True),
Column('boot_index', Integer),
Column('image_id', String(length=36), nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
bw_usage_cache = Table('bw_usage_cache', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('start_period', DateTime, nullable=False),
Column('last_refreshed', DateTime),
Column('bw_in', BigInteger),
Column('bw_out', BigInteger),
Column('mac', String(length=255)),
Column('uuid', String(length=36)),
Column('last_ctr_in', BigInteger()),
Column('last_ctr_out', BigInteger()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
cells = Table('cells', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('api_url', String(length=255)),
Column('weight_offset', Float),
Column('weight_scale', Float),
Column('name', String(length=255)),
Column('is_parent', Boolean),
Column('deleted', Integer),
Column('transport_url', String(length=255), nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
certificates = Table('certificates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('file_name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_node_stats = Table('compute_node_stats', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('compute_node_id', Integer, nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_nodes = Table('compute_nodes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('service_id', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('local_gb', Integer, nullable=False),
Column('vcpus_used', Integer, nullable=False),
Column('memory_mb_used', Integer, nullable=False),
Column('local_gb_used', Integer, nullable=False),
Column('hypervisor_type', MediumText(), nullable=False),
Column('hypervisor_version', Integer, nullable=False),
Column('cpu_info', MediumText(), nullable=False),
Column('disk_available_least', Integer),
Column('free_ram_mb', Integer),
Column('free_disk_gb', Integer),
Column('current_workload', Integer),
Column('running_vms', Integer),
Column('hypervisor_hostname', String(length=255)),
Column('deleted', Integer),
Column('host_ip', InetSmall()),
Column('supported_instances', Text),
Column('pci_stats', Text, nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
console_pools = Table('console_pools', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('username', String(length=255)),
Column('password', String(length=255)),
Column('console_type', String(length=255)),
Column('public_hostname', String(length=255)),
Column('host', String(length=255)),
Column('compute_host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
# NOTE(mriedem): DB2 can't create the FK since we don't have the unique
# constraint on instances.uuid because it's nullable (so a unique
# constraint isn't created for instances.uuid, only a unique index).
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
if migrate_engine.name != 'ibm_db_sa':
consoles_instance_uuid_column_args.append(
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
consoles = Table('consoles', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_name', String(length=255)),
Column('password', String(length=255)),
Column('port', Integer),
Column('pool_id', Integer, ForeignKey('console_pools.id')),
Column(*consoles_instance_uuid_column_args),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
dns_domains = Table('dns_domains', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Boolean),
Column('domain', String(length=255), primary_key=True, nullable=False),
Column('scope', String(length=255)),
Column('availability_zone', String(length=255)),
Column('project_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
fixed_ips = Table('fixed_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('network_id', Integer),
Column('allocated', Boolean),
Column('leased', Boolean),
Column('reserved', Boolean),
Column('virtual_interface_id', Integer),
Column('host', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
floating_ips = Table('floating_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('fixed_ip_id', Integer),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('auto_assigned', Boolean),
Column('pool', String(length=255)),
Column('interface', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_faults = Table('instance_faults', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36)),
Column('code', Integer, nullable=False),
Column('message', String(length=255)),
Column('details', MediumText()),
Column('host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_id_mappings = Table('instance_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_info_caches = Table('instance_info_caches', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('network_info', MediumText()),
Column('instance_uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
groups = Table('instance_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('uuid', String(length=36), nullable=False),
Column('name', String(length=255)),
UniqueConstraint('uuid', 'deleted',
name='uniq_instance_groups0uuid0deleted'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_metadata = Table('instance_group_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_policy = Table('instance_group_policy', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('policy', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_member = Table('instance_group_member', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_id', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_metadata = Table('instance_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_system_metadata = Table('instance_system_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36), nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_extra_specs = Table('instance_type_extra_specs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, ForeignKey('instance_types.id'),
nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_projects = Table('instance_type_projects', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_types = Table('instance_types', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('name', String(length=255)),
Column('id', Integer, primary_key=True, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('swap', Integer, nullable=False),
Column('vcpu_weight', Integer),
Column('flavorid', String(length=255)),
Column('rxtx_factor', Float),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('disabled', Boolean),
Column('is_public', Boolean),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
inst_lock_enum = Enum('owner', 'admin', name='instances0locked_by')
instances = Table('instances', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('internal_id', Integer),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('image_ref', String(length=255)),
Column('kernel_id', String(length=255)),
Column('ramdisk_id', String(length=255)),
Column('launch_index', Integer),
Column('key_name', String(length=255)),
Column('key_data', MediumText()),
Column('power_state', Integer),
Column('vm_state', String(length=255)),
Column('memory_mb', Integer),
Column('vcpus', Integer),
Column('hostname', String(length=255)),
Column('host', String(length=255)),
Column('user_data', MediumText()),
Column('reservation_id', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('availability_zone', String(length=255)),
Column('locked', Boolean),
Column('os_type', String(length=255)),
Column('launched_on', MediumText()),
Column('instance_type_id', Integer),
Column('vm_mode', String(length=255)),
Column('uuid', String(length=36)),
Column('architecture', String(length=255)),
Column('root_device_name', String(length=255)),
Column('access_ip_v4', InetSmall()),
Column('access_ip_v6', InetSmall()),
Column('config_drive', String(length=255)),
Column('task_state', String(length=255)),
Column('default_ephemeral_device', String(length=255)),
Column('default_swap_device', String(length=255)),
Column('progress', Integer),
Column('auto_disk_config', Boolean),
Column('shutdown_terminate', Boolean),
Column('disable_terminate', Boolean),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('cell_name', String(length=255)),
Column('node', String(length=255)),
Column('deleted', Integer),
Column('locked_by', inst_lock_enum),
Column('cleaned', Integer, default=0),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_actions = Table('instance_actions', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('action', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('request_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('message', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_actions_events = Table('instance_actions_events', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('event', String(length=255)),
Column('action_id', Integer, ForeignKey('instance_actions.id')),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('result', String(length=255)),
Column('traceback', Text),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
iscsi_targets = Table('compute_iscsi_targets', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('target_num', Integer),
Column('host', String(length=255)),
Column('volume_id', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
key_pairs = Table('key_pairs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('user_id', String(length=255)),
Column('fingerprint', String(length=255)),
Column('public_key', MediumText()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
migrations = Table('migrations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('source_compute', String(length=255)),
Column('dest_compute', String(length=255)),
Column('dest_host', String(length=255)),
Column('status', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('old_instance_type_id', Integer),
Column('new_instance_type_id', Integer),
Column('source_node', String(length=255)),
Column('dest_node', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
networks = Table('networks', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('injected', Boolean),
Column('cidr', Inet()),
Column('netmask', InetSmall()),
Column('bridge', String(length=255)),
Column('gateway', InetSmall()),
Column('broadcast', InetSmall()),
Column('dns1', InetSmall()),
Column('vlan', Integer),
Column('vpn_public_address', InetSmall()),
Column('vpn_public_port', Integer),
Column('vpn_private_address', InetSmall()),
Column('dhcp_start', InetSmall()),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('cidr_v6', Inet()),
Column('gateway_v6', InetSmall()),
Column('label', String(length=255)),
Column('netmask_v6', InetSmall()),
Column('bridge_interface', String(length=255)),
Column('multi_host', Boolean),
Column('dns2', InetSmall()),
Column('uuid', String(length=36)),
Column('priority', Integer),
Column('rxtx_base', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
pci_devices_uc_name = 'uniq_pci_devices0compute_node_id0address0deleted'
pci_devices = Table('pci_devices', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Integer, default=0, nullable=False),
Column('id', Integer, primary_key=True),
Column('compute_node_id', Integer, nullable=False),
Column('address', String(12), nullable=False),
Column('product_id', String(4)),
Column('vendor_id', String(4)),
Column('dev_type', String(8)),
Column('dev_id', String(255)),
Column('label', String(255), nullable=False),
Column('status', String(36), nullable=False),
Column('extra_info', Text, nullable=True),
Column('instance_uuid', String(36), nullable=True),
Index('ix_pci_devices_compute_node_id_deleted',
'compute_node_id', 'deleted'),
Index('ix_pci_devices_instance_uuid_deleted',
'instance_uuid', 'deleted'),
UniqueConstraint('compute_node_id',
'address', 'deleted',
name=pci_devices_uc_name),
mysql_engine='InnoDB',
mysql_charset='utf8')
provider_fw_rules = Table('provider_fw_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_classes = Table('quota_classes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('class_name', String(length=255)),
Column('resource', String(length=255)),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_usages = Table('quota_usages', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('in_use', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('until_refresh', Integer),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quotas = Table('quotas', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('project_id', String(length=255)),
Column('resource', String(length=255), nullable=False),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted"
project_user_quotas = Table('project_user_quotas', meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('user_id',
String(length=255),
nullable=False),
Column('project_id',
String(length=255),
nullable=False),
Column('resource',
String(length=255),
nullable=False),
Column('hard_limit', Integer, nullable=True),
UniqueConstraint('user_id', 'project_id', 'resource',
'deleted', name=uniq_name),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
reservations = Table('reservations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('usage_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('delta', Integer, nullable=False),
Column('expire', DateTime),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
s3_images = Table('s3_images', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_instance_association = \
Table('security_group_instance_association', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('security_group_id', Integer),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_rules = Table('security_group_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('parent_group_id', Integer, ForeignKey('security_groups.id')),
Column('protocol', String(length=255)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('group_id', Integer, ForeignKey('security_groups.id')),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_groups = Table('security_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('description', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_default_rules = Table('security_group_default_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer, default=0),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
services = Table('services', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('binary', String(length=255)),
Column('topic', String(length=255)),
Column('report_count', Integer, nullable=False),
Column('disabled', Boolean),
Column('deleted', Integer),
Column('disabled_reason', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshot_id_mappings = Table('snapshot_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshots = Table('snapshots', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('volume_id', String(length=36), nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('status', String(length=255)),
Column('progress', String(length=255)),
Column('volume_size', Integer),
Column('scheduled_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
task_log = Table('task_log', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('task_name', String(length=255), nullable=False),
Column('state', String(length=255), nullable=False),
Column('host', String(length=255), nullable=False),
Column('period_beginning', DateTime, nullable=False),
Column('period_ending', DateTime, nullable=False),
Column('message', String(length=255), nullable=False),
Column('task_items', Integer),
Column('errors', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
virtual_interfaces = Table('virtual_interfaces', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', String(length=255)),
Column('network_id', Integer),
Column('uuid', String(length=36)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_id_mappings = Table('volume_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volumes = Table('compute_volumes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('ec2_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('size', Integer),
Column('availability_zone', String(length=255)),
Column('mountpoint', String(length=255)),
Column('status', String(length=255)),
Column('attach_status', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('provider_location', String(length=256)),
Column('provider_auth', String(length=256)),
Column('snapshot_id', String(length=36)),
Column('volume_type_id', Integer),
Column('instance_uuid', String(length=36)),
Column('attach_time', DateTime),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_usage_cache = Table('volume_usage_cache', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('id', Integer(), primary_key=True, nullable=False),
Column('volume_id', String(36), nullable=False),
Column('tot_last_refreshed', DateTime(timezone=False)),
Column('tot_reads', BigInteger(), default=0),
Column('tot_read_bytes', BigInteger(), default=0),
Column('tot_writes', BigInteger(), default=0),
Column('tot_write_bytes', BigInteger(), default=0),
Column('curr_last_refreshed', DateTime(timezone=False)),
Column('curr_reads', BigInteger(), default=0),
Column('curr_read_bytes', BigInteger(), default=0),
Column('curr_writes', BigInteger(), default=0),
Column('curr_write_bytes', BigInteger(), default=0),
Column('deleted', Integer),
Column("instance_uuid", String(length=36)),
Column("project_id", String(length=36)),
Column("user_id", String(length=36)),
Column("availability_zone", String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instances.create()
Index('project_id', instances.c.project_id).create()
Index('uuid', instances.c.uuid, unique=True).create()
# create all tables
tables = [aggregates, console_pools, instance_types,
security_groups, snapshots, volumes,
# those that are children and others later
agent_builds, aggregate_hosts, aggregate_metadata,
block_device_mapping, bw_usage_cache, cells,
certificates, compute_node_stats, compute_nodes, consoles,
dns_domains, fixed_ips, floating_ips,
instance_faults, instance_id_mappings, instance_info_caches,
instance_metadata, instance_system_metadata,
instance_type_extra_specs, instance_type_projects,
instance_actions, instance_actions_events,
groups, group_metadata, group_policy, group_member,
iscsi_targets, key_pairs, migrations, networks,
pci_devices, provider_fw_rules, quota_classes, quota_usages,
quotas, project_user_quotas,
reservations, s3_images, security_group_instance_association,
security_group_rules, security_group_default_rules,
services, snapshot_id_mappings, task_log,
virtual_interfaces,
volume_id_mappings,
volume_usage_cache]
for table in tables:
try:
table.create()
except Exception:
LOG.info(repr(table))
LOG.exception(_LE('Exception while creating table.'))
raise
# task log unique constraint
task_log_uc = "uniq_task_log0task_name0host0period_beginning0period_ending"
task_log_cols = ('task_name', 'host', 'period_beginning', 'period_ending')
uc = UniqueConstraint(*task_log_cols, table=task_log, name=task_log_uc)
uc.create()
# networks unique constraint
UniqueConstraint('vlan', 'deleted', table=networks,
name='uniq_networks0vlan0deleted').create()
# instance_type_name constraint
UniqueConstraint('name', 'deleted', table=instance_types,
name='uniq_instance_types0name0deleted').create()
# flavorid unique constraint
UniqueConstraint('flavorid', 'deleted', table=instance_types,
name='uniq_instance_types0flavorid0deleted').create()
# keypair contraint
UniqueConstraint('user_id', 'name', 'deleted', table=key_pairs,
name='uniq_key_pairs0user_id0name0deleted').create()
# instance_type_projects constraint
inst_type_uc_name = 'uniq_instance_type_projects0instance_type_id0' + \
'project_id0deleted'
UniqueConstraint('instance_type_id', 'project_id', 'deleted',
table=instance_type_projects,
name=inst_type_uc_name).create()
# floating_ips unique constraint
UniqueConstraint('address', 'deleted',
table=floating_ips,
name='uniq_floating_ips0address0deleted').create()
# instance_info_caches
UniqueConstraint('instance_uuid',
table=instance_info_caches,
name='uniq_instance_info_caches0instance_uuid').create()
UniqueConstraint('address', 'deleted',
table=virtual_interfaces,
name='uniq_virtual_interfaces0address0deleted').create()
# cells
UniqueConstraint('name', 'deleted',
table=cells,
name='uniq_cells0name0deleted').create()
# security_groups
uc = UniqueConstraint('project_id', 'name', 'deleted',
table=security_groups,
name='uniq_security_groups0project_id0name0deleted')
uc.create()
# quotas
UniqueConstraint('project_id', 'resource', 'deleted',
table=quotas,
name='uniq_quotas0project_id0resource0deleted').create()
# fixed_ips
UniqueConstraint('address', 'deleted',
table=fixed_ips,
name='uniq_fixed_ips0address0deleted').create()
# services
UniqueConstraint('host', 'topic', 'deleted',
table=services,
name='uniq_services0host0topic0deleted').create()
UniqueConstraint('host', 'binary', 'deleted',
table=services,
name='uniq_services0host0binary0deleted').create()
# agent_builds
uc_name = 'uniq_agent_builds0hypervisor0os0architecture0deleted'
UniqueConstraint('hypervisor', 'os', 'architecture', 'deleted',
table=agent_builds,
name=uc_name).create()
uc_name = 'uniq_console_pools0host0console_type0compute_host0deleted'
UniqueConstraint('host', 'console_type', 'compute_host', 'deleted',
table=console_pools,
name=uc_name).create()
uc_name = 'uniq_aggregate_hosts0host0aggregate_id0deleted'
UniqueConstraint('host', 'aggregate_id', 'deleted',
table=aggregate_hosts,
name=uc_name).create()
uc_name = 'uniq_aggregate_metadata0aggregate_id0key0deleted'
UniqueConstraint('aggregate_id', 'key', 'deleted',
table=aggregate_metadata,
name=uc_name).create()
uc_name = 'uniq_instance_type_extra_specs0instance_type_id0key0deleted'
UniqueConstraint('instance_type_id', 'key', 'deleted',
table=instance_type_extra_specs,
name=uc_name).create()
# created first (to preserve ordering for schema diffs)
mysql_pre_indexes = [
Index('instance_type_id', instance_type_projects.c.instance_type_id),
Index('project_id', dns_domains.c.project_id),
Index('fixed_ip_id', floating_ips.c.fixed_ip_id),
Index('network_id', virtual_interfaces.c.network_id),
Index('network_id', fixed_ips.c.network_id),
Index('fixed_ips_virtual_interface_id_fkey',
fixed_ips.c.virtual_interface_id),
Index('address', fixed_ips.c.address),
Index('fixed_ips_instance_uuid_fkey', fixed_ips.c.instance_uuid),
Index('instance_uuid', instance_system_metadata.c.instance_uuid),
Index('iscsi_targets_volume_id_fkey', iscsi_targets.c.volume_id),
Index('snapshot_id', block_device_mapping.c.snapshot_id),
Index('usage_id', reservations.c.usage_id),
Index('virtual_interfaces_instance_uuid_fkey',
virtual_interfaces.c.instance_uuid),
Index('volume_id', block_device_mapping.c.volume_id),
Index('security_group_id',
security_group_instance_association.c.security_group_id),
]
# Common indexes (indexes we apply to all databases)
# NOTE: order specific for MySQL diff support
common_indexes = [
# aggregate_metadata
Index('aggregate_metadata_key_idx', aggregate_metadata.c.key),
# agent_builds
Index('agent_builds_hypervisor_os_arch_idx',
agent_builds.c.hypervisor,
agent_builds.c.os,
agent_builds.c.architecture),
# block_device_mapping
Index('block_device_mapping_instance_uuid_idx',
block_device_mapping.c.instance_uuid),
Index('block_device_mapping_instance_uuid_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
# NOTE(dprince): This is now a duplicate index on MySQL and needs to
# be removed there. We leave it here so the Index ordering
# matches on schema diffs (for MySQL).
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/compute/+bug/1265839
Index(
'block_device_mapping_instance_uuid_virtual_name_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
Index('block_device_mapping_instance_uuid_volume_id_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.volume_id),
# bw_usage_cache
Index('bw_usage_cache_uuid_start_period_idx',
bw_usage_cache.c.uuid, bw_usage_cache.c.start_period),
Index('certificates_project_id_deleted_idx',
certificates.c.project_id, certificates.c.deleted),
Index('certificates_user_id_deleted_idx', certificates.c.user_id,
certificates.c.deleted),
# compute_node_stats
Index('ix_compute_node_stats_compute_node_id',
compute_node_stats.c.compute_node_id),
Index('compute_node_stats_node_id_and_deleted_idx',
compute_node_stats.c.compute_node_id,
compute_node_stats.c.deleted),
# consoles
Index('consoles_instance_uuid_idx', consoles.c.instance_uuid),
# dns_domains
Index('dns_domains_domain_deleted_idx',
dns_domains.c.domain, dns_domains.c.deleted),
# fixed_ips
Index('fixed_ips_host_idx', fixed_ips.c.host),
Index('fixed_ips_network_id_host_deleted_idx', fixed_ips.c.network_id,
fixed_ips.c.host, fixed_ips.c.deleted),
Index('fixed_ips_address_reserved_network_id_deleted_idx',
fixed_ips.c.address, fixed_ips.c.reserved,
fixed_ips.c.network_id, fixed_ips.c.deleted),
Index('fixed_ips_deleted_allocated_idx', fixed_ips.c.address,
fixed_ips.c.deleted, fixed_ips.c.allocated),
# floating_ips
Index('floating_ips_host_idx', floating_ips.c.host),
Index('floating_ips_project_id_idx', floating_ips.c.project_id),
Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
floating_ips.c.pool, floating_ips.c.deleted,
floating_ips.c.fixed_ip_id, floating_ips.c.project_id),
# group_member
Index('instance_group_member_instance_idx',
group_member.c.instance_id),
# group_metadata
Index('instance_group_metadata_key_idx', group_metadata.c.key),
# group_policy
Index('instance_group_policy_policy_idx', group_policy.c.policy),
# instances
Index('instances_reservation_id_idx',
instances.c.reservation_id),
Index('instances_terminated_at_launched_at_idx',
instances.c.terminated_at,
instances.c.launched_at),
Index('instances_task_state_updated_at_idx',
instances.c.task_state,
instances.c.updated_at),
Index('instances_host_deleted_idx', instances.c.host,
instances.c.deleted),
Index('instances_uuid_deleted_idx', instances.c.uuid,
instances.c.deleted),
Index('instances_host_node_deleted_idx', instances.c.host,
instances.c.node, instances.c.deleted),
Index('instances_host_deleted_cleaned_idx',
instances.c.host, instances.c.deleted,
instances.c.cleaned),
# instance_actions
Index('instance_uuid_idx', instance_actions.c.instance_uuid),
Index('request_id_idx', instance_actions.c.request_id),
# instance_faults
Index('instance_faults_host_idx', instance_faults.c.host),
Index('instance_faults_instance_uuid_deleted_created_at_idx',
instance_faults.c.instance_uuid, instance_faults.c.deleted,
instance_faults.c.created_at),
# instance_id_mappings
Index('ix_instance_id_mappings_uuid', instance_id_mappings.c.uuid),
# instance_metadata
Index('instance_metadata_instance_uuid_idx',
instance_metadata.c.instance_uuid),
# instance_type_extra_specs
Index('instance_type_extra_specs_instance_type_id_key_idx',
instance_type_extra_specs.c.instance_type_id,
instance_type_extra_specs.c.key),
# iscsi_targets
Index('iscsi_targets_host_idx', iscsi_targets.c.host),
Index('iscsi_targets_host_volume_id_deleted_idx',
iscsi_targets.c.host, iscsi_targets.c.volume_id,
iscsi_targets.c.deleted),
# migrations
Index('migrations_by_host_nodes_and_status_idx',
migrations.c.deleted, migrations.c.source_compute,
migrations.c.dest_compute, migrations.c.source_node,
migrations.c.dest_node, migrations.c.status),
Index('migrations_instance_uuid_and_status_idx',
migrations.c.deleted, migrations.c.instance_uuid,
migrations.c.status),
# networks
Index('networks_host_idx', networks.c.host),
Index('networks_cidr_v6_idx', networks.c.cidr_v6),
Index('networks_bridge_deleted_idx', networks.c.bridge,
networks.c.deleted),
Index('networks_project_id_deleted_idx', networks.c.project_id,
networks.c.deleted),
Index('networks_uuid_project_id_deleted_idx',
networks.c.uuid, networks.c.project_id, networks.c.deleted),
Index('networks_vlan_deleted_idx', networks.c.vlan,
networks.c.deleted),
# project_user_quotas
Index('project_user_quotas_project_id_deleted_idx',
project_user_quotas.c.project_id,
project_user_quotas.c.deleted),
Index('project_user_quotas_user_id_deleted_idx',
project_user_quotas.c.user_id, project_user_quotas.c.deleted),
# reservations
Index('ix_reservations_project_id', reservations.c.project_id),
Index('ix_reservations_user_id_deleted',
reservations.c.user_id, reservations.c.deleted),
Index('reservations_uuid_idx', reservations.c.uuid),
# security_group_instance_association
Index('security_group_instance_association_instance_uuid_idx',
security_group_instance_association.c.instance_uuid),
# task_log
Index('ix_task_log_period_beginning', task_log.c.period_beginning),
Index('ix_task_log_host', task_log.c.host),
Index('ix_task_log_period_ending', task_log.c.period_ending),
# quota_classes
Index('ix_quota_classes_class_name', quota_classes.c.class_name),
# quota_usages
Index('ix_quota_usages_project_id', quota_usages.c.project_id),
Index('ix_quota_usages_user_id_deleted',
quota_usages.c.user_id, quota_usages.c.deleted),
# volumes
Index('volumes_instance_uuid_idx', volumes.c.instance_uuid),
]
# MySQL specific indexes
if migrate_engine.name == 'mysql':
for index in mysql_pre_indexes:
index.create(migrate_engine)
# mysql-specific index by leftmost 100 chars. (mysql gets angry if the
# index key length is too long.)
sql = ("create index migrations_by_host_nodes_and_status_idx ON "
"migrations (deleted, source_compute(100), dest_compute(100), "
"source_node(100), dest_node(100), status)")
migrate_engine.execute(sql)
# PostgreSQL specific indexes
if migrate_engine.name == 'postgresql':
Index('address', fixed_ips.c.address).create()
# NOTE(dprince): PostgreSQL doesn't allow duplicate indexes
# so we skip creation of select indexes (so schemas match exactly).
POSTGRES_INDEX_SKIPS = [
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/compute/+bug/1265839
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
]
# NOTE(mriedem): DB2 doesn't allow duplicate indexes either.
DB2_INDEX_SKIPS = POSTGRES_INDEX_SKIPS
MYSQL_INDEX_SKIPS = [
# we create this one manually for MySQL above
'migrations_by_host_nodes_and_status_idx'
]
for index in common_indexes:
if ((migrate_engine.name == 'postgresql' and
index.name in POSTGRES_INDEX_SKIPS) or
(migrate_engine.name == 'mysql' and
index.name in MYSQL_INDEX_SKIPS) or
(migrate_engine.name == 'ibm_db_sa' and
index.name in DB2_INDEX_SKIPS)):
continue
else:
index.create(migrate_engine)
Index('project_id', dns_domains.c.project_id).drop
# Common foreign keys
fkeys = [
[[instance_type_projects.c.instance_type_id],
[instance_types.c.id],
'instance_type_projects_ibfk_1'],
[[iscsi_targets.c.volume_id],
[volumes.c.id],
'iscsi_targets_volume_id_fkey'],
[[reservations.c.usage_id],
[quota_usages.c.id],
'reservations_ibfk_1'],
[[security_group_instance_association.c.security_group_id],
[security_groups.c.id],
'security_group_instance_association_ibfk_1'],
[[compute_node_stats.c.compute_node_id],
[compute_nodes.c.id],
'fk_compute_node_stats_compute_node_id'],
[[compute_nodes.c.service_id],
[services.c.id],
'fk_compute_nodes_service_id'],
]
# NOTE(mriedem): DB2 doesn't support unique constraints on columns that
# are nullable so we can only create foreign keys on unique constraints
# that actually exist, which excludes any FK on instances.uuid.
if migrate_engine.name != 'ibm_db_sa':
secgroup_instance_association_instance_uuid_fkey = (
'security_group_instance_association_instance_uuid_fkey')
fkeys.extend(
[
[[fixed_ips.c.instance_uuid],
[instances.c.uuid],
'fixed_ips_instance_uuid_fkey'],
[[block_device_mapping.c.instance_uuid],
[instances.c.uuid],
'block_device_mapping_instance_uuid_fkey'],
[[instance_info_caches.c.instance_uuid],
[instances.c.uuid],
'instance_info_caches_instance_uuid_fkey'],
[[instance_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_metadata_instance_uuid_fkey'],
[[instance_system_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_system_metadata_ibfk_1'],
[[security_group_instance_association.c.instance_uuid],
[instances.c.uuid],
secgroup_instance_association_instance_uuid_fkey],
[[virtual_interfaces.c.instance_uuid],
[instances.c.uuid],
'virtual_interfaces_instance_uuid_fkey'],
[[instance_actions.c.instance_uuid],
[instances.c.uuid],
'fk_instance_actions_instance_uuid'],
[[instance_faults.c.instance_uuid],
[instances.c.uuid],
'fk_instance_faults_instance_uuid'],
[[migrations.c.instance_uuid],
[instances.c.uuid],
'fk_migrations_instance_uuid']
])
for fkey_pair in fkeys:
if migrate_engine.name in ('mysql', 'ibm_db_sa'):
# For MySQL and DB2 we name our fkeys explicitly
# so they match Havana
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1],
name=fkey_pair[2])
fkey.create()
elif migrate_engine.name == 'postgresql':
# PostgreSQL names things like it wants (correct and compatible!)
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1])
fkey.create()
if migrate_engine.name == 'mysql':
# In Folsom we explicitly converted migrate_version to UTF8.
migrate_engine.execute(
'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8')
# Set default DB charset to UTF8.
migrate_engine.execute(
'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' %
migrate_engine.url.database)
_create_shadow_tables(migrate_engine)
# populate initial instance types
_populate_instance_types(instance_types)
_create_dump_tables(migrate_engine)
| {
"content_hash": "125db7ad331f637facf787fcd8919f85",
"timestamp": "",
"source": "github",
"line_count": 1566,
"max_line_length": 79,
"avg_line_length": 40.76372924648787,
"alnum_prop": 0.5907325020364684,
"repo_name": "HybridF5/jacket",
"id": "25f3afb31ae3c8317b3c2e0e72c248149f150ff7",
"size": "64449",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jacket/db/compute/sqlalchemy/migrate_repo/versions/216_havana.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26995056"
},
{
"name": "Shell",
"bytes": "28464"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
} |
import os
import re
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.contrib.auth.decorators import login_required
from localflavor.us.us_states import US_STATES
from django.contrib import messages
from django.template.loader import get_template as django_get_template
from django.template import Context, RequestContext
from website.utils.httpUtil import HttpRequestProcessor
from django.views.decorators import csrf
from dajax.core import Dajax
from django.conf import settings as django_settings
from django.core.mail.message import EmailMessage
from django.shortcuts import render
from django.shortcuts import render_to_response, redirect
from website.utils.mathUtil import MathUtil
from website.utils.geoHelper import GeoHelper
from website.models import Jurisdiction, Zipcode, UserSearch, Question, AnswerReference, AnswerAttachment, OrganizationMember, QuestionCategory, Comment, UserCommentView, Template, TemplateQuestion, ActionCategory, JurisdictionContributor, Action, UserDetail, OrganizationMember
from website.models import View, ViewQuestions, ViewOrgs
from website.utils.messageUtil import MessageUtil,add_system_message,get_system_message
from website.utils.miscUtil import UrlUtil
from website.utils.fieldValidationCycleUtil import FieldValidationCycleUtil
from website.utils.datetimeUtil import DatetimeHelper
from django.contrib.auth.models import User
import json
import datetime
import operator
from django.db import connections, transaction
from BeautifulSoup import BeautifulSoup
from website.utils.fileUploader import qqFileUploader
from django.utils.safestring import mark_safe
from website.utils import reporting
JURISDICTION_PAGE_SIZE = 30 #page size for endless scroll
def jurisdiction_comment(request):
requestProcessor = HttpRequestProcessor(request)
user = request.user
data = {}
dajax = Dajax()
ajax = requestProcessor.getParameter('ajax')
comments_changed = requestProcessor.getParameter('comments_changed')
if comments_changed == 'yes':
data['comments_changed'] = 'yes'
else:
data['comments_changed'] = 'no'
if (ajax != None):
if ajax == 'open_jurisdiction_comment':
entity_id = requestProcessor.getParameter('entity_id')
entity_name = requestProcessor.getParameter('entity_name')
jid = requestProcessor.getParameter('jurisdiction_id')
try:
jurisdiction = Jurisdiction.objects.get(id = jid)
except:
jurisdiction = None
comments = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id, parent_comment__isnull = True).order_by('-create_datetime')
userviews = UserCommentView.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id, user = user)
temp_comments = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).order_by('-create_datetime')
last_comment = None
if len(temp_comments) > 0 :
last_comment = temp_comments[0]
has_userview = False
if len(userviews) > 0:
userview = userviews[0]
if userview.last_comment != None:
data['userview_last_comment'] = userview.last_comment.id
data['userview'] = userviews[0]
userview.comments_count = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).count()
userview.last_comment = last_comment
userview.view_datetime = datetime.datetime.now()
userview.save()
has_userview = True
if has_userview == False:
userview = None
data['userview'] = userview
data['userview_last_comment'] = 0
userview = UserCommentView()
userview.user = user
userview.jurisdiction = jurisdiction
userview.entity_name = entity_name
userview.entity_id = entity_id
userview.comments_count = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).count()
userview.last_comment = last_comment
userview.view_datetime = datetime.datetime.now()
userview.save()
af = AnswerReference.objects.get(id = entity_id)
validation_util_obj = FieldValidationCycleUtil()
old_data = {}
old_data['jurisdiction_type'] = data['jurisdiction_type'] = af.jurisdiction.get_jurisdiction_type()
old_data['jurisdiction_id'] = data['jurisdiction_id'] = af.jurisdiction.id
old_data['jurisdiction'] = data['jurisdiction'] = af.jurisdiction
old_data['this_question'] = data['this_question'] = af.question
category_name = 'VoteRequirement'
vote_info = validation_util_obj.get_jurisdiction_voting_info_by_category(category_name, af.jurisdiction, af.question.category, af.question)
terminology = validation_util_obj.get_terminology(af.question)
#question_content = validation_util_obj.get_AHJ_question_data(request, update.jurisdiction, update.question, data)
question_content = validation_util_obj.get_authenticated_displayed_content(request, af.jurisdiction, af.question, vote_info, [af], terminology)
for key in question_content.keys():
data[key] = question_content.get(key)
data['answer'] = af
data['answer_id'] = af.id
#data['answer_text'] = aa.get_formatted_value(af.value, af.question)
answer_text = requestProcessor.decode_jinga_template(request,'website/blocks/display_answer.html', data, '')
data['answer_text'] = mark_safe(answer_text)
data['jurisdiction'] = jurisdiction
label = af.question.question
if len(af.question.question) > 75:
label = af.question.question[:78]+ '...'
data['label'] = label
data['comments'] = comments
others_afs = AnswerReference.objects.filter(jurisdiction = jurisdiction, question = af.question, approval_status='A').exclude(id = entity_id).order_by('-create_datetime')
if len(others_afs) > 0 :
old_answer = others_afs[0]
if old_answer.id < af.id:
data['old_answer'] = old_answer
old_question_content = validation_util_obj.get_authenticated_displayed_content(request, old_answer.jurisdiction, old_answer.question, vote_info, [old_answer], terminology)
for key in old_question_content.keys():
old_data[key] = old_question_content.get(key)
#data['old_answer_text'] = aa.get_formatted_value(old_answer.value, old_answer.question)
old_answer_text = requestProcessor.decode_jinga_template(request,'website/blocks/display_answer.html', old_data, '')
data['old_answer_text'] = mark_safe(old_answer_text)
else:
data['old_answer'] = None
data['old_answer_text'] = ''
else:
data['old_answer'] = None
data['old_answer_text'] = ''
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/jurisdiction_comment.html', data, '')
dajax.assign('#fancyboxformDiv','innerHTML', body)
script = requestProcessor.decode_jinga_template(request,'website/jurisdictions/jurisdiction_comment.js' , data, '')
dajax.script(script)
script = requestProcessor.decode_jinga_template(request,'website/blocks/comments_list.js' , data, '')
dajax.script(script)
dajax.script('controller.showModalDialog("#fancyboxformDiv");')
dajax.script('controller.updateUrlAnchor("#view_comment");')
if ajax =='create_jurisdiction_comment':
if not request.user.is_authenticated():
return HttpResponse(status=403)
answer_id = requestProcessor.getParameter('answer_id')
jid = requestProcessor.getParameter('jurisdiction_id')
comment_type = 'JC'
data['answer_id'] = answer_id
data['jurisdiction_id'] = jid
data['comment_type'] = comment_type
data['parent_comment'] = ''
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/create_comment.html', data, '')
script = requestProcessor.decode_jinga_template(request,'website/jurisdictions/create_comment.js' , data, '')
dajax.assign('#secondDialogDiv','innerHTML', body)
dajax.script(script)
dajax.script('controller.showSecondDialog("#secondDialogDiv");')
if ajax =='comment_create_submit':
if not request.user.is_authenticated():
return HttpResponse(status=403)
entity_id = requestProcessor.getParameter('entity_id')
entity_name = requestProcessor.getParameter('entity_name')
jurisdiction_id = requestProcessor.getParameter('jurisdiction_id')
comment_type = requestProcessor.getParameter('comment_type')
comment_text = requestProcessor.getParameter('comment')
parent_comment = requestProcessor.getParameter('parent_comment')
try:
jurisdiction = Jurisdiction.objects.get(id = jurisdiction_id)
except:
jurisdiction = None
comment = Comment()
comment.jurisdiction = jurisdiction
if entity_name != 'None':
comment.entity_name = entity_name
else:
entity_name = None
if entity_id != 'None':
comment.entity_id = entity_id
else:
entity_id = None
comment.user = user
comment.comment_type = comment_type
comment.comment = comment_text
if parent_comment != '':
parent = Comment.objects.get(id = parent_comment)
comment.parent_comment = parent
comment.save()
userviews = UserCommentView.objects.filter(user = user, jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id)
if userviews:
userview = userviews[0]
userview.last_comment = comment
userview.comments_count = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).count()
userview.view_datetime = datetime.datetime.now()
userview.save()
dajax.script('controller.closeSecondDialog();')
dajax.script('controller.postRequest("/jurisdiction_comment/", {ajax: "open_jurisdiction_comment", jurisdiction_id:'+str(jurisdiction_id)+', entity_id: "'+str(entity_id)+'", entity_name: "'+str(entity_name)+'", comments_changed: "yes"});')
data = {}
answers = AnswerReference.objects.filter(id=entity_id)
data['answers_comments'] = get_answers_comments( jurisdiction, answers, user)
dajax.add_data(data, 'process_ahj_comments')
dajax.script('controller.updateUrlAnchor("#add_comment");')
if ajax =='reply_comment':
if not request.user.is_authenticated():
return HttpResponse(status=403)
cid = requestProcessor.getParameter('cid')
comment = Comment.objects.get(id = cid)
data['comment'] = comment
body = requestProcessor.decode_jinga_template(request,'website/blocks/reply_comment_form.html', data, '')
script = requestProcessor.decode_jinga_template(request,'website/blocks/reply_comment_form.js' , data, '')
dajax.assign('#button-div-'+str(cid),'innerHTML', body)
dajax.script(script)
if ajax == 'cancel_reply':
if not request.user.is_authenticated():
return HttpResponse(status=403)
cid = requestProcessor.getParameter('cid')
body = '<a class="smallbutton commentReplayBtn" data-cid="'+cid+'" href="#">Reply</a><a class="smallbutton commentFlagBtn" data-cid="'+cid+'" href="#">Flag</a>'
dajax.assign('#button-div-'+str(cid),'innerHTML', body)
script = requestProcessor.decode_jinga_template(request,'website/blocks/comments_list.js' , data, '')
dajax.script(script)
if ajax == 'flag_comment':
cid = requestProcessor.getParameter('cid')
comment = Comment.objects.get(id = cid)
comment.approval_status = 'F'
comment.save()
af = AnswerReference.objects.get(id = comment.entity_id)
to_mail = [django_settings.ADMIN_EMAIL_ADDRESS]
data['comment'] = comment
data['user'] = user
data['question'] = af.question.question
data['site_url'] = django_settings.SITE_URL
data['requestProcessor'] = requestProcessor
data['request'] = request
send_email(data, to_mail)
dajax.assign('#comment_'+str(cid), 'innerHTML', '<p>This comment has been flagged as inappropriate and is hidden pending review.</p>')
if ajax == 'show_old_comments':
entity_id = requestProcessor.getParameter('answer_id')
entity_name = 'AnswerReference'
jid = requestProcessor.getParameter('jurisdiction_id')
try:
jurisdiction = Jurisdiction.objects.get(id = jid)
except:
jurisdiction = None
data['jurisdiction'] = jurisdiction
data['answer_id'] = entity_id
comments = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id, parent_comment__isnull = True).order_by('-create_datetime')
userviews = UserCommentView.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id, user = user)
temp_comments = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).order_by('-create_datetime')
last_comment = None
if len(temp_comments) > 0 :
last_comment = temp_comments[0]
if len(userviews) > 0:
userview = userviews[0]
data['userview'] = userview
data['userview_last_comment'] = userview.last_comment.id
userview.comments_count = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).count()
userview.last_comment = last_comment
userview.view_datetime = datetime.datetime.now()
userview.save()
else:
userview = None
data['userview'] = userview
data['userview_last_comment'] = 0
userview = UserCommentView()
userview.user = user
userview.jurisdiction = jurisdiction
userview.entity_name = entity_name
userview.entity_id = entity_id
userview.comments_count = Comment.objects.filter(jurisdiction = jurisdiction, entity_name = entity_name, entity_id = entity_id).count()
userview.last_comment = last_comment
userview.view_datetime = datetime.datetime.now()
userview.save()
data['comments'] = comments
body = requestProcessor.decode_jinga_template(request,'website/blocks/comments_list.html', data, '')
dajax.assign('#old_list ul', 'innerHTML', body)
scripts = requestProcessor.decode_jinga_template(request,'website/blocks/comments_list.js' , data, '')
dajax.script(scripts)
dajax.assign('#show_commnet_div', 'innerHTML', '<a id="id_a_hide" href="#"><img src="/media/images/arrow_down.png" style="vertical-align:middle;" alt="Hide old comments"> Hide old comments </a>')
script = requestProcessor.decode_jinga_template(request,'website/jurisdictions/jurisdiction_comment.js' , data, '')
dajax.script(script)
return HttpResponse(dajax.json())
return
def send_email(data, to_mail, subject='Flag Comment', template='flag_comment.jinja'):
requestProcessor = data['requestProcessor']
request = data['request']
body = requestProcessor.decode_jinga_template(request, 'website/emails/' + template, data, '')
from_mail = django_settings.DEFAULT_FROM_EMAIL
msg = EmailMessage( subject, body, from_mail, to_mail)
msg.content_subtype = "html"
msg.send()
def view_sc_AHJ(request, jurisdiction):
requestProcessor = HttpRequestProcessor(request)
data = {}
data['home'] = '/'
data['state'] = jurisdiction.state
data['state_long_name'] = dict(US_STATES)[data['state']]
data['city'] = jurisdiction.city
data['jurisdiction_type'] = jurisdiction.get_jurisdiction_type()
data['jurisdiction_id'] = jurisdiction.id
data['jurisdiction'] = jurisdiction
data['jurisdiction'] = jurisdiction
scfo_jurisdictions = Jurisdiction.objects.filter(parent=jurisdiction, jurisdiction_type__iexact='SCFO')
data['scfo_jurisdictions'] = scfo_jurisdictions
return requestProcessor.render_to_response(request,'website/jurisdictions/AHJ_sc.html', data, '')
def view_unincorporated_AHJ(request, jurisdiction):
requestProcessor = HttpRequestProcessor(request)
dajax = Dajax()
ajax = requestProcessor.getParameter('ajax')
user = request.user
data = {}
data['authenticated_page_message'] = ''
data['unauthenticated_page_message']= "Please log in to add or view comments."
data['home'] = '/'
data['state'] = jurisdiction.state
data['state_long_name'] = dict(US_STATES)[data['state']]
data['city'] = jurisdiction.city
data['jurisdiction_type'] = jurisdiction.get_jurisdiction_type()
data['jurisdiction_id'] = jurisdiction.id
data['jurisdiction'] = jurisdiction
data['user'] = user
parent_jurisdiction = None
try:
parent_jurisdiction = jurisdiction.parent
if parent_jurisdiction != None:
if parent_jurisdiction.jurisdiction_type == 'CINP' or parent_jurisdiction.jurisdiction_type == 'CONP':
parent_jurisdiction = parent_jurisdiction.parent
except:
parent_jurisdiction = None
data['parent_jurisdiction'] = parent_jurisdiction
comments = comments = Comment.objects.filter(jurisdiction = jurisdiction, parent_comment__isnull = True).order_by('-create_datetime')
data['comments'] = comments
data['userview_last_comment'] = 0
if ajax != None:
if ajax == 'create_jurisdiction_ucomment':
if not request.user.is_authenticated():
return HttpResponse(status=403)
data['comment_type'] = 'JC'
data['parent_comment'] = ''
body = requestProcessor.decode_jinga_template(request,'website/blocks/create_ucomment.html', data, '')
dajax.assign('#secondDialogDiv','innerHTML', body)
script = requestProcessor.decode_jinga_template(request,'website/blocks/create_ucomment.js' , data, '')
dajax.script(script)
dajax.script('controller.showSecondDialog("#secondDialogDiv", {top: 185});')
if ajax == 'ucomment_create_submit':
if not request.user.is_authenticated():
return HttpResponse(status=403)
comment_text = requestProcessor.getParameter('comment')
parent_comment = requestProcessor.getParameter('parent_comment')
comment = Comment()
comment.jurisdiction = jurisdiction
comment.user = user
comment.comment_type = 'JC'
comment.comment = comment_text
if parent_comment != '':
parent = Comment.objects.get(id = parent_comment)
comment.parent_comment = parent
comment.save()
dajax.script('controller.closeSecondDialog();')
comments = comments = Comment.objects.filter(jurisdiction = jurisdiction, parent_comment__isnull = True).order_by('-create_datetime')
data['comments'] = comments
body = requestProcessor.decode_jinga_template(request,'website/blocks/ucomment_list.html', data, '')
script = requestProcessor.decode_jinga_template(request,'website/blocks/ucomment_list.js' , data, '')
dajax.assign('.ul-level-1','innerHTML', body)
dajax.script(script)
if ajax == 'cancel_reply':
if not request.user.is_authenticated():
return HttpResponse(status=403)
cid = requestProcessor.getParameter('cid')
body = '<a class="smallbutton ucommentReplyBtn" data-cid="'+cid+'" href="#">Reply</a><a class="smallbutton ucommentDeleteBtn" data-cid="'+cid+'" href="#">Delete</a><a class="smallbutton ucommentFlagBtn" data-cid="'+cid+'" href="#">Flag</a>'
dajax.assign('#button-div-'+str(cid),'innerHTML', body)
script = requestProcessor.decode_jinga_template(request,'website/blocks/ucomment_list.js' , data, '')
dajax.script(script)
if ajax =='reply_comment':
if not request.user.is_authenticated():
return HttpResponse(status=403)
cid = requestProcessor.getParameter('cid')
comment = Comment.objects.get(id = cid)
data['comment'] = comment
body = requestProcessor.decode_jinga_template(request,'website/blocks/reply_ucomment_form.html', data, '')
script = requestProcessor.decode_jinga_template(request,'website/blocks/reply_ucomment_form.js' , data, '')
dajax.assign('#button-div-'+str(cid),'innerHTML', body)
dajax.script(script)
if ajax == 'flag_comment':
cid = requestProcessor.getParameter('cid')
comment = Comment.objects.get(id = cid)
comment.approval_status = 'F'
comment.save()
to_mail = [django_settings.ADMIN_EMAIL_ADDRESS]
data['comment'] = comment
data['user'] = user
data['site_url'] = django_settings.SITE_URL
data['requestProcessor'] = requestProcessor
data['request'] = request
email_host = [django_settings.EMAIL_HOST]
if email_host:
send_email(data, to_mail, subject='Flag Comment', template='flag_ucomment.html')
dajax.assign('#comment_'+str(cid), 'innerHTML', '<p>This comment had been flagged as inappropriate and is hidden pending review.</p>')
if ajax == 'remove_comment':
if not request.user.is_authenticated():
return HttpResponse(status=403)
cid = requestProcessor.getParameter('cid')
try:
comment = Comment.objects.get(id = cid)
cid = comment.id
except:
cid = 0
if cid != 0:
try:
for i in range(0, 4):
delete_comment(comment)
except:
pass
dajax.script('$("#li-'+str(cid)+'").remove();')
dajax.script("controller.showMessage('The comment(s) have been deleted.', 'success');")
return HttpResponse(dajax.json())
return requestProcessor.render_to_response(request,'website/jurisdictions/AHJ_unincorporated.html', data, '')
def delete_comment(comment):
comments = Comment.objects.filter(parent_comment = comment)
if len(comments) > 0:
for c in comments:
delete_comment(c)
else:
comment.delete()
def view_AHJ_by_name(request, name, category='all_info'):
mathUtil = MathUtil()
if mathUtil.is_number(name):
try:
jurisdictions = Jurisdiction.objects.filter(id=name)
except:
raise Http404
else:
jurisdictions = Jurisdiction.objects.filter(name_for_url__iexact=name)
if len(jurisdictions) >= 1:
jurisdiction = jurisdictions[0]
id = jurisdiction.id
if jurisdiction.jurisdiction_type == 'U' or jurisdiction.jurisdiction_type == 'CONP' or jurisdiction.jurisdiction_type == 'CINP':
return view_unincorporated_AHJ(request, jurisdiction)
elif jurisdiction.jurisdiction_type == 'SC':
return view_sc_AHJ(request, jurisdiction)
else:
user = request.user
## the following to make sure the user has access to the view from the url
if category != 'all_info':
question_categories = QuestionCategory.objects.filter(name__iexact=category, accepted=1)
if len(question_categories) == 0: # not a question category, could be a view or a favorites or a quirks
if category == 'favorite_fields':
if user.is_authenticated():
pass
else:
return redirect('/') # faorite fields needs login => home
elif category == 'quirks':
pass
elif category == 'attachments':
pass
else: # views
if user.is_authenticated():
login_user = User.objects.get(id=user.id)
if login_user.is_staff or login_user.is_superuser or ('accessible_views' in request.session and category in request.session['accessible_views']):
pass # logged in user, he is either a staff or superuser, or this user has access to the view per his organization membership
else:
return redirect('/jurisdiction/'+str(jurisdiction.name_for_url)+'/') # in the case of a non-question category, dump him at the general category
else:
return redirect('/')
requestProcessor = HttpRequestProcessor(request)
layout = requestProcessor.getParameter('layout')
if layout == 'print':
return view_AHJ_cqa_print(request, jurisdiction, category)
else:
return view_AHJ_cqa(request, jurisdiction, category)
else:
raise Http404
return redirect('/')
def view_AHJ_cqa_print(request, jurisdiction, category='all_info'):
data= {}
user = request.user
requestProcessor = HttpRequestProcessor(request)
validation_util_obj = FieldValidationCycleUtil()
show_google_map = False
if category != 'favorite_fields' and category != 'quirks':
if 'empty_data_fields_hidden' not in request.session:
empty_data_fields_hidden = 1
else:
empty_data_fields_hidden = request.session['empty_data_fields_hidden']
else:
empty_data_fields_hidden = 0
(question_ids, view) = get_questions_in_category(user, jurisdiction, category)
data['view'] = view
records = get_ahj_data(jurisdiction, category, empty_data_fields_hidden, user, question_ids)
answers_contents = {}
questions_have_answers = {}
questions_terminology = {}
for rec in records:
if rec['question_id'] not in questions_have_answers:
questions_have_answers[rec['question_id']] = False
if rec['question_id'] not in questions_terminology:
questions_terminology[rec['question_id']] = Question().get_question_terminology(rec['question_id'])
if rec['question_id'] == 4:
show_google_map = True
if rec['id'] != None:
if rec['question_id'] == 16:
fee_info = validation_util_obj.process_fee_structure(json.loads(rec['value']) )
for key in fee_info.keys():
data[key] = fee_info.get(key)
answer_content = json.loads(rec['value'])
answers_contents[rec['id']] = answer_content
questions_have_answers[rec['question_id']] = True
if category == 'all_info' or show_google_map == True:
data['show_google_map'] = show_google_map
################# get the correct address for google map ####################
question = Question.objects.get(id=4)
data['str_address'] = question.get_addresses_for_map(jurisdiction)
data['google_api_key'] = django_settings.GOOGLE_API_KEY
data['cqa'] = records
data['questions_terminology'] = questions_terminology
data['questions_have_answers'] = questions_have_answers
data['answers_contents'] = answers_contents
data['user'] = user
data['jurisdiction'] = jurisdiction
return requestProcessor.render_to_response(request,'website/jurisdictions/AHJ_cqa_print.html', data, '')
def view_AHJ_cqa(request, jurisdiction, category='all_info'):
dajax = Dajax()
validation_util_obj = FieldValidationCycleUtil()
requestProcessor = HttpRequestProcessor(request)
user = request.user
data = {}
data['time'] = []
data['category'] = category
if category == 'all_info':
data['category_name'] = 'All Categories'
else:
data['category_name'] = category
empty_data_fields_hidden = True
if category != 'favorite_fields' and category != 'quirks':
if jurisdiction.last_contributed_by == None:
empty_data_fields_hidden = False
else:
if 'empty_data_fields_hidden' in request.session:
empty_data_fields_hidden = request.session['empty_data_fields_hidden']
else:
empty_data_fields_hidden = False
param = requestProcessor.getParameter('empty_data_fields_hidden')
if param != None:
empty_data_fields_hidden = param == "1"
data['empty_data_fields_hidden'] = 1 if empty_data_fields_hidden else 0
ajax = requestProcessor.getParameter('ajax')
if ajax != None and ajax != '':
if (ajax == 'get_ahj_answers_headings'):
questions_answers = {}
jurisdiction_templates = get_jurisdiction_templates(jurisdiction)
jurisdiction_questions = get_jurisdiction_questions(jurisdiction, jurisdiction_templates, user, category)
for question in jurisdiction_questions:
questions_answers[question.id] = []
jurisdiction_answers = get_jurisdiction_answers(jurisdiction, jurisdiction_templates, jurisdiction_questions)
for answer in jurisdiction_answers:
questions_answers[answer.question.id].append(answer)
data['answers_headings'] = get_questions_answers_headings(questions_answers, user)
dajax.add_data(data, 'process_ahj_answers_headings')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_questions_actions'):
if not request.user.is_authenticated():
return HttpResponse(status=403)
data['questions_actions'] = get_ahj_actions( jurisdiction, user)
dajax.add_data(data, 'process_ahj_actions')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_questions_messages'):
jurisdiction_templates = get_jurisdiction_templates(jurisdiction)
jurisdiction_questions = get_jurisdiction_questions(jurisdiction, jurisdiction_templates, user, category)
jurisdiction_answers = get_jurisdiction_answers(jurisdiction, jurisdiction_templates, jurisdiction_questions)
answer_question_ids = jurisdiction_answers.values_list('question_id').distinct()
questions_with_answers = jurisdiction_questions.filter(id__in=answer_question_ids)
data['questions_messages'] = get_ahj_questions_messages(questions_with_answers, jurisdiction_answers, user)
dajax.add_data(data, 'process_ahj_questions_messages')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_answers_validation_history_and_comments'):
jurisdiction_templates = get_jurisdiction_templates(jurisdiction)
jurisdiction_questions = get_jurisdiction_questions(jurisdiction, jurisdiction_templates, user, category)
jurisdiction_answers = get_jurisdiction_answers(jurisdiction, jurisdiction_templates, jurisdiction_questions)
data['answers_comments'] = get_answers_comments( jurisdiction, jurisdiction_answers, user)
dajax.add_data(data, 'process_ahj_comments')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_ahj_top_contributors'):
data_top_contributors = {}
data_top_contributors['top_contributors'] = get_ahj_top_contributors(jurisdiction, category)
data['top_contributors'] = requestProcessor.decode_jinga_template(request,'website/jurisdictions/AHJ_top_contributors.html', data_top_contributors, '')
dajax.add_data(data, 'process_ahj_top_contributors')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_answers_attachments'):
data['answers_attachments'] = get_ahj_answers_attachments(jurisdiction)
dajax.add_data(data, 'process_ahj_answers_attachments')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_num_quirks_favorites'):
if not request.user.is_authenticated():
return HttpResponse(status=403)
view_questions_obj = ViewQuestions()
data['user_number_of_favorite_fields'] = 0
user_obj = User.objects.get(id=user.id)
if user_obj != None:
user_favorite_fields = view_questions_obj.get_user_favorite_fields(user_obj)
if 'view_id' in user_favorite_fields:
data['view_id'] = user_favorite_fields['view_id']
data['user_number_of_favorite_fields'] = len(user_favorite_fields['view_questions'])
dajax.add_data(data, 'process_ahj_qirks_user_favorites')
return HttpResponse(dajax.json())
if (ajax == 'get_ahj_answers_votes'):
jurisdiction_templates = get_jurisdiction_templates(jurisdiction)
jurisdiction_questions = get_jurisdiction_questions(jurisdiction, jurisdiction_templates, user, category)
jurisdiction_answers = get_jurisdiction_answers(jurisdiction, jurisdiction_templates, jurisdiction_questions)
category_name = 'VoteRequirement'
answer_ids = []
for answer in jurisdiction_answers:
answer_ids.append(answer.id)
data['answers_votes'] = get_answer_voting_info(category_name, jurisdiction, user, answer_ids)
dajax.add_data(data, 'process_ahj_answers_votes')
return HttpResponse(dajax.json())
if (ajax == 'get_add_form'):
data['mode'] = 'add'
data['user'] = user
data['jurisdiction'] = jurisdiction
question_id = requestProcessor.getParameter('question_id')
data['unique_key'] = data['mode'] + str(question_id)
data['form_field'] = {}
question = Question.objects.get(id=question_id)
form_field_data = validation_util_obj.get_form_field_data(jurisdiction, question)
for key in form_field_data:
data[key] = form_field_data[key]
data['default_values'] = {}
if question.default_value != None and question.default_value != '':
answer = json.loads(question.default_value)
for key in answer:
data[key] = str(answer[key])
data['city'] = jurisdiction.city
data['state'] = jurisdiction.state
if 'question_template' in data and data['question_template'] != None and data['question_template'] != '':
if form_field_data['question_id'] == 16:
data['fee_answer'] = answer
fee_info = validation_util_obj.process_fee_structure(answer)
for key in fee_info.keys():
data[key] = fee_info.get(key)
body = requestProcessor.decode_jinga_template(request,'website/form_fields/'+data['question_template']+'.jinja', data, '')
else:
body = ''
dajax.assign('#qa_'+str(question_id) + '_fields','innerHTML', body)
#if 'js' in data and data['js'] != None and data['js'] != '':
for js in data['js']:
script ="var disable_pre_validation = false;" #set open pre validation by default, we can overwrite it under each field js file.
script += requestProcessor.decode_jinga_template(request, "website/form_fields/"+js, data, '')
script +=";if ((!disable_pre_validation)&&!$('#form_"+question_id+"').checkValidWithNoError({formValidCallback:function(el){$('#save_"+question_id+"').removeAttr('disabled').removeClass('disabled');},formNotValidCallback:function(el){$('#save_"+question_id+"').attr('disabled','disabled').addClass('disabled');;}})){$('#save_"+question_id+"').attr('disabled','disabled').addClass('disabled');};"
dajax.script(script)
if question.support_attachments == 1:
script = requestProcessor.decode_jinga_template(request, "website/form_fields/file_uploading.js", data, '')
dajax.script(script)
return HttpResponse(dajax.json())
if (ajax == 'get_edit_form'):
data['mode'] = 'edit'
answer_id = requestProcessor.getParameter('answer_id')
data['unique_key'] = data['mode'] + str(answer_id)
answer_for_edit = AnswerReference.objects.get(id=answer_id)
question = answer_for_edit.question
data['jurisdiction'] = answer_for_edit.jurisdiction
data['values'] = {}
form_field_data = validation_util_obj.get_form_field_data(jurisdiction, question)
for key in form_field_data:
data[key] = form_field_data[key]
data['values'] = {}
answer = json.loads(answer_for_edit.value)
for key in answer:
data[key] = answer[key]
data['answer_id'] = answer_id
if 'question_template' in data and data['question_template'] != None and data['question_template'] != '':
if form_field_data['question_id'] == 16:
data['fee_answer'] = answer
fee_info = validation_util_obj.process_fee_structure(answer)
for key in fee_info.keys():
data[key] = fee_info.get(key)
body = requestProcessor.decode_jinga_template(request,'website/form_fields/'+data['question_template']+'.jinja', data, '')
else:
body = ''
dajax.assign('#qa_'+str(answer_id) + '_edit_fields','innerHTML', body)
for js in data['js']:
script = requestProcessor.decode_jinga_template(request, "website/form_fields/"+js, data, '')
dajax.script(script)
if question.support_attachments == 1:
script = requestProcessor.decode_jinga_template(request, "website/form_fields/file_uploading.js", data, '')
dajax.script(script)
return HttpResponse(dajax.json())
if (ajax == 'suggestion_submit'):
if not request.user.is_authenticated():
return HttpResponse(status=403)
answers = {}
data['user'] = user
data['jurisdiction'] = jurisdiction
field_prefix = 'field_'
question_id = requestProcessor.getParameter('question_id')
question = Question.objects.get(id=question_id)
answers = requestProcessor.get_form_field_values(field_prefix)
for key, answer in answers.items():
if answer == '':
del answers[key]
acrf = process_answer(answers, question, jurisdiction, request.user)
file_names = requestProcessor.getParameter('filename')
file_store_names = requestProcessor.getParameter('file_store_name')
if (file_names != '' and file_names != None) and (file_store_names != '' and file_store_names != None):
file_name_list = file_names.split(',')
file_store_name_list = file_store_names.split(',')
for i in range(0, len(file_name_list)):
aac = AnswerAttachment()
aac.answer_reference = acrf
aac.file_name = file_name_list[i]
store_file = os.path.join('answer_ref_attaches', file_store_name_list[i])
aac.file_upload = store_file
aac.creator = user
aac.save()
view_question_obj = ViewQuestions()
view_question_obj.add_question_to_view('a', question, jurisdiction)
reporting.update_reports(question, jurisdictions=jurisdiction)
dajax = get_question_answers_dajax(request, jurisdiction, question, data)
return HttpResponse(dajax.json())
if (ajax == 'suggestion_edit_submit'):
if not request.user.is_authenticated():
return HttpResponse(status=403)
answers = {}
data['user'] = user
answer_id = requestProcessor.getParameter('answer_id')
field_prefix = 'field_'
answer = AnswerReference.objects.get(id=answer_id)
questions = Question.objects.filter(id=answer.question.id) # done on purpose.
question = questions[0]
answers = requestProcessor.get_form_field_values(field_prefix)
for key, answer in answers.items():
if answer == '':
del answers[key]
acrf = process_answer(answers, question, jurisdiction, request.user, answer_id)
file_names = requestProcessor.getParameter('filename')
file_store_names = requestProcessor.getParameter('file_store_name')
if (file_names != '' and file_names != None) and (file_store_names != '' and file_store_names != None):
AnswerAttachment.objects.filter(answer_reference = acrf).delete()
file_name_list = file_names.split(',')
file_store_name_list = file_store_names.split(',')
for i in range(0, len(file_name_list)):
aac = AnswerAttachment()
aac.answer_reference = acrf
aac.file_name = file_name_list[i]
store_file = os.path.join('answer_ref_attaches', file_store_name_list[i])
aac.file_upload = store_file
aac.creator = user
aac.save()
view_question_obj = ViewQuestions()
view_question_obj.add_question_to_view('a', question, jurisdiction)
reporting.update_reports(question, jurisdictions=jurisdiction)
dajax = get_question_answers_dajax(request, jurisdiction, question, data)
return HttpResponse(dajax.json())
if (ajax == 'add_to_views'):
view_obj = None
user = request.user
if not user.is_authenticated():
return HttpResponse(status=403)
entity_name = requestProcessor.getParameter('entity_name')
question_id = requestProcessor.getParameter('question_id')
if entity_name == 'quirks':
view_objs = View.objects.filter(view_type = 'q', jurisdiction = jurisdiction)
if len(view_objs) > 0:
view_obj = view_objs[0]
else:
view_obj = View()
view_obj.name = 'quirks'
view_obj.description = 'Quirks'
view_obj.view_type = 'q'
view_obj.jurisdiction_id = jurisdiction.id
view_obj.save()
elif entity_name == 'favorites':
view_objs = View.objects.filter(view_type = 'f', user = request.user)
if len(view_objs) > 0:
view_obj = view_objs[0]
else:
view_obj = View()
view_obj.name = 'Favorite Fields'
view_obj.description = 'Favorite Fields'
view_obj.view_type = 'f'
view_obj.user_id = request.user.id
view_obj.save()
if view_obj != None:
view_questions_objs = ViewQuestions.objects.filter(view = view_obj).order_by('-display_order')
if len(view_questions_objs) > 0:
highest_display_order = view_questions_objs[0].display_order
else:
highest_display_order = 0
if not question_id in [vq.question.id for vq in view_questions_objs]:
view_questions_obj = ViewQuestions()
view_questions_obj.view_id = view_obj.id
view_questions_obj.question_id = question_id
view_questions_obj.display_order = int(highest_display_order) + 5
view_questions_obj.save()
view_questions_obj = ViewQuestions()
if entity_name == 'favorites':
data['user_number_of_favorite_fields'] = 0
if request.user.is_authenticated():
user_obj = User.objects.get(id=request.user.id)
if user_obj != None:
user_favorite_fields = view_questions_obj.get_user_favorite_fields(user_obj)
if 'view_questions' in user_favorite_fields:
data['user_number_of_favorite_fields'] = len(user_favorite_fields['view_questions'])
# update the quirks or the favorite fields count
favorite_id = '#favorite_field_' + str(question_id)
dajax.assign('#favfieldscount','innerHTML', data['user_number_of_favorite_fields'])
dajax.remove_css_class(favorite_id, 'add_to_favorites')
dajax.add_css_class(favorite_id, 'remove_from_favorites')
return HttpResponse(dajax.json())
if (ajax == 'remove_from_views'):
view_obj = None
user = request.user
if not user.is_authenticated():
return HttpResponse(status=401)
entity_name = requestProcessor.getParameter('entity_name')
question_id = requestProcessor.getParameter('question_id')
if entity_name == 'quirks':
view_objs = View.objects.filter(view_type = 'q', jurisdiction = jurisdiction)
if len(view_objs) > 0:
view_obj = view_objs[0]
elif entity_name == 'favorites':
view_objs = View.objects.filter(view_type = 'f', user = request.user)
if len(view_objs) > 0:
view_obj = view_objs[0]
if view_obj != None:
question = Question.objects.get(id=question_id)
view_questions_objs = ViewQuestions.objects.filter(view = view_obj, question = question)
if len(view_questions_objs) > 0:
for view_questions_obj in view_questions_objs:
view_questions_obj.delete()
view_questions_obj = ViewQuestions()
if entity_name == 'favorites':
data['user_number_of_favorite_fields'] = 0
if request.user.is_authenticated():
user_obj = User.objects.get(id=request.user.id)
if user_obj != None:
user_favorite_fields = view_questions_obj.get_user_favorite_fields(user_obj)
if 'view_questions' in user_favorite_fields:
data['user_number_of_favorite_fields'] = len(user_favorite_fields['view_questions'])
# update the quirks or the favorite fields count
favorite_id = '#favorite_field_' + str(question_id)
dajax.assign('#favfieldscount','innerHTML', data['user_number_of_favorite_fields'])
dajax.remove_css_class(favorite_id, 'remove_from_favorites')
dajax.add_css_class(favorite_id, 'add_to_favorites')
dajax.assign(favorite_id, 'text', 'Favorite')
if category == 'quirks' or category == 'favorite_fields':
dajax.script('$("#div_question_content_' + str(question_id) + '").parent().remove();')
return HttpResponse(dajax.json())
if (ajax == 'validation_history'):
caller = requestProcessor.getParameter('caller')
entity_name = requestProcessor.getParameter('entity_name')
entity_id = requestProcessor.getParameter('entity_id')
data = validation_util_obj.get_validation_history(entity_name, entity_id)
data['destination'] = requestProcessor.getParameter('destination')
if caller == None:
params = 'zIndex: 8000'
elif caller == 'dialog':
params = 'zIndex: 12000'
if data['destination'] == None:
data['destination'] = ''
if data['destination'] == 'dialog':
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/validation_history_dialog.html', data, '')
dajax.assign('#fancyboxformDiv','innerHTML', body)
dajax.script('$("#fancybox_close").click(function(){$.fancybox.close();return false;});')
dajax.script('controller.showModalDialog("#fancyboxformDiv");')
else:
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/validation_history.html', data, '')
dajax.assign('#validation_history_div_'+entity_id,'innerHTML', body)
#dajax.assign('.info_content','innerHTML', body)
#dajax.script("controller.showInfo({target: '#validation_history_"+entity_id+"', "+params+"});")
return HttpResponse(dajax.json())
if (ajax == 'cancel_suggestion'):
user = request.user
if not user.is_authenticated():
return HttpResponse(status=403)
data['user'] = user
entity_id = requestProcessor.getParameter('entity_id')
answer = AnswerReference.objects.get(id=entity_id)
answer_prev_status = answer.approval_status
answer.approval_status = 'C'
answer.status_datetime = datetime.datetime.now()
answer.save()
jurisdiction = answer.jurisdiction
question = answer.question
dajax = get_question_answers_dajax(request, jurisdiction, question, data)
if answer_prev_status == 'A':
data['top_contributors'] = get_ahj_top_contributors(jurisdiction, category)
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/AHJ_top_contributors.html', data, '')
dajax.assign('#top-contributor','innerHTML', body)
if question.support_attachments == 1:
view_question_obj = ViewQuestions()
view_question_obj.remmove_question_from_view('a', question, jurisdiction)
reporting.update_reports(question, jurisdictions=jurisdiction)
return HttpResponse(dajax.json())
if (ajax == 'approve_suggestion'):
user = request.user
if not user.is_authenticated():
return HttpResponse(status=403)
data['user'] = user
entity_id = requestProcessor.getParameter('entity_id')
answer = AnswerReference.objects.get(id=entity_id)
answer.approval_status = 'A'
answer.status_datetime = datetime.datetime.now()
answer.save()
validation_util_obj.on_approving_a_suggestion(answer)
jurisdiction = answer.jurisdiction
question = answer.question
dajax = get_question_answers_dajax(request, jurisdiction, question, data)
data['top_contributors'] = get_ahj_top_contributors(jurisdiction, category)
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/AHJ_top_contributors.html', data, '')
reporting.update_reports(question, jurisdictions=jurisdiction)
dajax.assign('#top-contributor','innerHTML', body)
return HttpResponse(dajax.json())
if (ajax == 'vote'):
requestProcessor = HttpRequestProcessor(request)
dajax = Dajax()
ajax = requestProcessor.getParameter('ajax')
user = request.user
if not user.is_authenticated():
return HttpResponse(status=403)
entity_id = requestProcessor.getParameter('entity_id')
entity_name = requestProcessor.getParameter('entity_name')
vote = requestProcessor.getParameter('vote')
confirmed = requestProcessor.getParameter('confirmed')
if confirmed == None:
confirmed = 'not_yet'
feedback = validation_util_obj.process_vote(user, vote, entity_name, entity_id, confirmed)
if feedback == 'registered':
if entity_name == 'requirement':
answer = AnswerReference.objects.get(id=entity_id)
question = answer.question
answer_ids = [answer.id]
category_name = 'VoteRequirement'
data['answers_votes'] = get_answer_voting_info(category_name, answer.jurisdiction, user, answer_ids)
dajax.add_data(data, 'process_ahj_answers_votes')
dajax.script("show_hide_vote_confirmation('"+entity_id+"');")
elif feedback == 'registered_with_changed_status':
if entity_name == 'requirement':
answer = AnswerReference.objects.get(id=entity_id)
question = answer.question
dajax = get_question_answers_dajax(request, jurisdiction, question, data)
terminology = question.get_terminology()
if answer.approval_status == 'A':
dajax.script("controller.showMessage('"+str(terminology)+" approved. Thanks for voting.', 'success');")
elif answer.approval_status == 'R':
dajax.script("controller.showMessage('"+str(terminology)+" rejected. Thanks for voting.', 'success');")
dajax.script("show_hide_vote_confirmation('"+entity_id+"');")
if answer.approval_status == 'A':
category_obj = question.category
category = category_obj.name
data['top_contributors'] = get_ahj_top_contributors(data['jurisdiction'], category)
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/AHJ_top_contributors.html', data, '')
dajax.assign('#top-contributor','innerHTML', body)
elif feedback == 'will_approve':
# prompt confirmation
answer = AnswerReference.objects.get(id=entity_id)
answers = AnswerReference.objects.filter(question=answer.question, jurisdiction=answer.jurisdiction)
if len(answers) > 1 and answer.question.has_multivalues == 0:
dajax.script("confirm_approved('yes');")
else:
dajax.script("confirm_approved('no');")
elif feedback == 'will_reject':
# prompt confirmation
answer = AnswerReference.objects.get(id=entity_id)
question = answer.question
question_terminology = question.get_terminology()
dajax.script("confirm_rejected("+str(entity_id)+",'"+question_terminology+"');")
#dajax.script("controller.showMessage('Your feedback has been sent and will be carefully reviewed.', 'success');")
reporting.update_reports(question, jurisdictions=jurisdiction)
return HttpResponse(dajax.json())
######################################### END OF AJAX #######################################################
############## to determine the last contributor's organization
organization = None
try:
contributor = jurisdiction.last_contributed_by
except:
contributor = None
data['last_contributed_by'] = contributor
###################################################
data['jurisdiction'] = jurisdiction
data['jurisdiction_id'] = jurisdiction.id
data['nav'] = 'no'
data['current_nav'] = 'browse'
data['home'] = '/'
data['page'] = 'AHJ'
data['unauthenticated_page_message'] = "The information on this page is available to all visitors of the National Solar Permitting Database. If you would like to add information to the database and interact with the solar community, please sign in below or "
data['authenticated_page_message'] = 'See missing or incorrect information? Mouse over a field and click the blue pencil to add or edit the information.'
data[category] = ' active' # to set the active category in the left nav
data['show_google_map'] = False
data['str_address'] = ''
###############################################################################################################
#jurisdiction_templates = get_jurisdiction_templates(jurisdiction)
data['jurisdiction'] = jurisdiction
if 'accessible_views' in request.session:
data['accessible_views'] = request.session['accessible_views']
else:
data['accessible_views'] = []
show_google_map = False
(question_ids, view) = get_questions_in_category(user, jurisdiction, category)
data['view'] = view
data['cqa'] = get_categorized_ahj_data(jurisdiction, category,
empty_data_fields_hidden, user,
question_ids = question_ids)
if 1 in data['cqa'] and 4 in data['cqa'][1]['questions']:
show_google_map = True
if category == 'all_info' or show_google_map == True:
data['show_google_map'] = show_google_map
################# get the correct address for google map ####################
question = Question.objects.get(id=4)
data['str_address'] = question.get_addresses_for_map(jurisdiction)
data['google_api_key'] = django_settings.GOOGLE_API_KEY
if category != 'favorite_fields' and category != 'quirks':
request.session['empty_data_fields_hidden'] = data['empty_data_fields_hidden']
################# Show the message in the yellow box on top of the ahj page ####################
data['show_ahj_message'] = False
################################################################################################
data['user'] = user
################# save_recent_search ####################
if user.is_authenticated():
user_obj = User.objects.get(id=user.id)
save_recent_search(user_obj, jurisdiction)
message_data = get_system_message(request) #get the message List
data = dict(data.items() + message_data.items()) #merge message list to data
return requestProcessor.render_to_response(request,'website/jurisdictions/AHJ_cqa.html', data, '')
def get_jurisdiction_templates(jurisdiction):
cf_template_objs = Template.objects.filter(jurisdiction = jurisdiction, template_type__iexact='CF', accepted=1)
rt_template_objs = Template.objects.filter(template_type__iexact='RT', accepted=1)
template_objs = rt_template_objs | cf_template_objs
return template_objs
def get_jurisdiction_answers(jurisdiction, jurisdiction_templates, jurisdiction_questions, answer_status=None):
if answer_status == None:
jurisdiction_answer_objs = AnswerReference.objects.filter(jurisdiction = jurisdiction, approval_status__in=('A', 'P'), question__accepted__exact=1, question__qtemplate__in=jurisdiction_templates, question__in=jurisdiction_questions).order_by('question__category__name','question__display_order','approval_status','create_datetime')
elif answer_status == 'A':
jurisdiction_answer_objs = AnswerReference.objects.filter(jurisdiction = jurisdiction, approval_status__in=('A'), question__accepted__exact=1, question__qtemplate__in=jurisdiction_templates, question__in=jurisdiction_questions).order_by('question__category__name','question__display_order','approval_status','create_datetime')
elif answer_status == 'P':
jurisdiction_answer_objs = AnswerReference.objects.filter(jurisdiction = jurisdiction, approval_status__in=('P'), question__accepted__exact=1, question__qtemplate__in=jurisdiction_templates, question__in=jurisdiction_questions).order_by('question__category__name','question__display_order','approval_status','create_datetime')
return jurisdiction_answer_objs
def get_jurisdiction_questions(jurisdiction, jurisdiction_templates, user, category='all_info'):
jurisdiction_question_objs = None
if category == 'all_info':
jurisdiction_question_objs = Question.objects.filter(accepted=1, qtemplate__in=jurisdiction_templates).order_by('display_order', '-modify_datetime')
else:
category_objs = QuestionCategory.objects.filter(name__iexact=category, accepted=1)
if len(category_objs) > 0:
jurisdiction_question_objs = Question.objects.filter(category__in=category_objs, accepted=1, qtemplate__in=jurisdiction_templates).order_by('display_order', '-modify_datetime')
else:
if category == 'favorite_fields':
category_objs = View.objects.filter(user = user, view_type__exact='f')
elif category == 'quirks':
category_objs = View.objects.filter(jurisdiction = jurisdiction, view_type__exact='q')
elif category == 'attachments':
category_objs = View.objects.filter(jurisdiction = jurisdiction, view_type__exact='a')
else:
category_objs = View.objects.filter(name__iexact=category) # we work with one view per ahj content, not like muliple categories in all_info
question_ids = ViewQuestions.objects.filter(view__in=category_objs).order_by('display_order').values('question').distinct()
jurisdiction_question_objs = Question.objects.filter(id__in=question_ids)
return jurisdiction_question_objs
def get_questions_with_answers(self, jurisdiction, jurisdiction_questions):
answer_question_ids = AnswerReference.objects.filter(jurisdiction = jurisdiction, approval_status__in=('A', 'P'), question__accepted__exact=1, question__qtemplate__in=jurisdiction_templates, question__in=jurisdiction_questions).values_list('question_id').distinct()
questions_with_answers = jurisdiction_questions.filter(id__in=answer_question_ids)
return questions_with_answers
def get_ahj_votes_html(request, jurisdiction, answers, login_user, category='all_info', questions=None):
requestProcessor = HttpRequestProcessor(request)
data = {}
data['jurisdiction_id'] = jurisdiction.id
category_name = 'VoteRequirement'
answers_votes = get_jurisdiction_voting_info(category_name, jurisdiction, login_user)
ahj_votes_html = {}
for answer in answers:
data['this_answer_id'] = answer.id
if answer.id in answers_votes:
data['total_up_votes'] = answers_votes[answer.id]['total_up_votes']
data['total_down_votes'] = answers_votes[answer.id]['total_down_votes']
data['num_consecutive_last_down_votes'] = answers_votes[answer.id]['num_consecutive_last_down_votes']
data['can_vote_up'] = answers_votes[answer.id]['can_vote_up']
data['can_vote_down'] = answers_votes[answer.id]['can_vote_down']
data['last_down_vote_date'] = answers_votes[answer.id]['last_down_vote_date']
data['up_vote_found'] = answers_votes[answer.id]['up_vote_found']
data['login_user_last_vote'] = answers_votes[answer.id]['login_user_last_vote']
else:
data['total_up_votes'] = 0
data['total_down_votes'] = 0
data['num_consecutive_last_down_votes'] = 0
data['can_vote_up'] = True
data['can_vote_down'] = True
data['last_down_vote_date'] = ''
data['up_vote_found'] = False
data['login_user_last_vote'] = ''
data['creator_id'] = answer.creator_id
data['login_user_id'] = login_user.id
ahj_votes_html[answer.id] = requestProcessor.decode_jinga_template(request,'website/jurisdictions/ahj_answer_votes.html', data, '')
return ahj_votes_html
def get_jurisdiction_voting_info(category_name, jurisdiction, login_user, category = 'all_info', questions = None):
action_category = ActionCategory.objects.filter(name__iexact=category_name)
if category == 'all_info':
category_objs = QuestionCategory.objects.filter(accepted=1)
else:
category_objs = QuestionCategory.objects.filter(name__iexact=category, accepted=1)
if len(action_category) > 0:
if questions == None:
votes = Action.objects.filter(category__in=action_category, jurisdiction=jurisdiction, question_category__in=category_objs).order_by('question_category', 'entity_id', '-action_datetime')
else:
answer_ids = AnswerReference.objects.filter(jurisdiction__exact=jurisdiction, question__in=questions).exclude(approval_status__exact='R').exclude(approval_status__exact='F').exclude(approval_status__exact='C').values_list('id', flat=True)
if len(answer_ids) > 0:
votes = Action.objects.filter(category__in=action_category, jurisdiction=jurisdiction, entity_id__in=answer_ids, question_category__in=category_objs).order_by('question_category', 'entity_id', '-action_datetime')
else:
votes = None
vote_info = {}
if votes != None:
for vote in votes:
if vote.entity_id not in vote_info:
vote_info[vote.entity_id] = {}
vote_info[vote.entity_id]['total_up_votes'] = 0
vote_info[vote.entity_id]['total_down_votes'] = 0
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = 0
vote_info[vote.entity_id]['can_vote_up'] = False
vote_info[vote.entity_id]['can_vote_down'] = False
vote_info[vote.entity_id]['last_down_vote_date'] = ''
vote_info[vote.entity_id]['up_vote_found'] = False
vote_info[vote.entity_id]['login_user_last_vote'] = ''
if vote.data == 'Vote: Up':
vote_info[vote.entity_id]['total_up_votes'] = vote_info[vote.entity_id]['total_up_votes'] + 1
vote_info[vote.entity_id]['up_vote_found'] = True
vote_info[vote.entity_id]['can_vote_down'] = True
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = 0
if vote.user == login_user:
vote_info[vote.entity_id]['login_user_last_vote'] = 'up'
elif vote.data == 'Vote: Down':
vote_info[vote.entity_id]['total_down_votes'] = vote_info[vote.entity_id]['total_down_votes'] + 1
vote_info[vote.entity_id]['can_vote_up'] = True
if 'last_down_vote_date' not in vote_info[vote.entity_id]:
#vote_info[vote.entity_id]['last_down_vote'] = vote
datetime_util_obj = DatetimeHelper(vote.action_datetime)
last_down_vote_date = datetime_util_obj.showStateTimeFormat(jurisdiction.state)
vote_info[vote.entity_id]['last_down_vote_date'] = last_down_vote_date
if vote.user_id == login_user.id:
vote_info[vote.entity_id]['login_user_last_vote'] = 'down'
if vote_info[vote.entity_id]['up_vote_found'] == False:
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = vote_info[vote.entity_id]['num_consecutive_last_down_votes'] + 1
return vote_info
def get_ahj_action_html(request, jurisdiction, questions, login_user, category):
requestProcessor = HttpRequestProcessor(request)
data = {}
data['jurisdiction_id'] = jurisdiction.id
questions_actions = get_ahj_actions(jurisdiction, login_user)
data['quirk_questions' ] = questions_actions['quirk_questions']
data['favorite_questions' ] = questions_actions['favorite_questions']
data['category' ] = questions_actions['category']
ahj_action_html = {}
for question in questions:
data['this_question_id'] = question.id
ahj_action_html[question.id] = requestProcessor.decode_jinga_template(request,'website/jurisdictions/ahj_actions.html', data, '')
return ahj_action_html
def get_ahj_answers_attachments(jurisdiction):
answers_attachments = {}
attachments = AnswerAttachment.objects.filter(answer_reference__jurisdiction = jurisdiction) # to gather all the attachments for all the answers.
for attachment in attachments:
answers_attachments[attachment.answer_reference.id] ={}
answers_attachments[attachment.answer_reference.id]['file_name'] = str(attachment.file_name) # to build dict of attachment per answer, for ease of retrival
answers_attachments[attachment.answer_reference.id]['file_upload'] = str(attachment.file_upload)
return answers_attachments
def get_ahj_actions(jurisdiction, login_user):
questions_actions = {}
view_questions_obj = ViewQuestions()
quirks = view_questions_obj.get_jurisdiction_quirks(jurisdiction)
quirk_questions = []
if len(quirks) > 0:
if len(quirks['view_questions']) > 0:
for quirk in quirks['view_questions']:
quirk_questions.append(quirk.question_id)
user_favorite_fields = view_questions_obj.get_user_favorite_fields(login_user)
favorite_questions = []
if len(user_favorite_fields) > 0:
if len(user_favorite_fields['view_questions']) > 0:
for favorite_question in user_favorite_fields['view_questions']:
favorite_questions.append(favorite_question.question_id)
questions_actions['quirk_questions'] = quirk_questions
questions_actions['favorite_questions'] = favorite_questions
return questions_actions
def get_answers_comments(jurisdiction, answers, login_user):
answers_comments = {}
for answer in answers:
answer_comment = {}
comment_total = Comment.objects.filter(jurisdiction__exact=jurisdiction, entity_name = 'AnswerReference', entity_id = answer.id).count()
answer_comment['comment_total'] = comment_total
try:
userviews = UserCommentView.objects.filter(jurisdiction__exact=jurisdiction, entity_name = 'AnswerReference', entity_id = answer.id, user = login_user)
userview = userviews[0]
answer_comment['new_comment_total'] = comment_total - userview.comments_count
except:
answer_comment['new_comment_total'] = comment_total - 0
answers_comments[answer.id] = answer_comment
return answers_comments
def get_answer_comment_txt(jurisdiction, answer_id, login_user):
comment = {}
comment_total = Comment.objects.filter(jurisdiction__exact=jurisdiction, entity_name = 'AnswerReference', entity_id = answer_id).count()
comment['comment_total'] = comment_total
try:
userviews = UserCommentView.objects.filter(jurisdiction__exact=jurisdiction, entity_name = 'AnswerReference', entity_id = answer.id, user = login_user)
userview = userviews[0]
comment['new_comment_total'] = comment_total - userview.comments_count
except:
comment['new_comment_total'] = comment_total - 0
if comment['comment_total'] == 0:
comment['comment_text'] = "Comment"
else:
comment['comment_text'] = str(comment['comment_total']) + " comments"
if comment['new_comment_total'] != comment['comment_total']:
comment['comment_text'] = comment['comment_text'] + " (* " + str(comment['new_comment_total']) + " new)"
return comment
def get_ahj_questions_messages(questions_with_answers, jurisdiction_answers, login_user):
questions_messages = {}
for question in questions_with_answers:
answers = jurisdiction_answers.filter(question = question)
questions_messages[question.id] = get_question_messages(question, answers, login_user)
return questions_messages
def get_question_messages(question, question_answers, login_user):
question_terminology = question.get_terminology()
pending_answers = []
approved_answers = []
for answer in question_answers:
if answer.approval_status == 'A':
approved_answers.append(answer)
elif answer.approval_status == 'P':
pending_answers.append(answer)
message = ''
if len(question_answers) > 1:
message = message + "More than one "+question_terminology +" suggested. Please vote.<br/>"
if len(approved_answers) > 0 and len(pending_answers) > 0:
message = message + 'The previous approved '+ question_terminology + ' has been challenged.<br/>'
for answer in pending_answers:
datetime_util_obj = DatetimeHelper(answer.create_datetime)
answer_create_datetime= datetime_util_obj.showStateTimeFormat(answer.jurisdiction.state)
if answer.creator_id == login_user.id: # your own suggestion. cannot vote on your own suggestion.
message = message + 'You suggested a ' + question_terminology + ' for this field on ' + answer_create_datetime + '.<br>The community must vote on its accuracy or it must remain unchallenged for one week before it is approved.<br/>'
else:
div_id="id_"+str(answer.id)
try: # somebody else's suggestion. u can vote on it
user = User.objects.get(id=answer.creator_id)
user_display_name = user.get_profile().get_display_name()
user_id = user.id
except:
user_id = 0
user_display_name = 'NA'
onmouseover="controller.postRequest('/account/', {ajax: 'user_profile_short', user_id: '"+str(user_id)+"', unique_list_id: '"+str(answer.id)+"' });"
onmouseout = "document.getElementById('simple_popup_div_on_page').style.display='none';"
temp_str = ''
if approved_answers > 0:
if pending_answers == 1:
temp_str = "A new "
else:
if pending_answers == 1:
temp_str = 'This '
message = message + temp_str + question_terminology + " was suggested by <a href='#' id='"+div_id+"' onmouseover=\""+onmouseover+"\" onmouseout=\""+onmouseout+"\" >"+user_display_name + "</a> on " + answer_create_datetime + ". Please vote on its accuracy.<br/>"
return message
def get_ahj_additional_display_data(questions, jurisdiction_answers, login_user):
ahj_additional_display_data = {}
for question in questions:
answers = jurisdiction_answers.filter(question = question)
ahj_additional_display_data[question.id] = get_question_additional_display_data(question, answers, login_user)
return ahj_additional_display_data
def get_questions_answers_headings(questions_with_answers, login_user):
answers_headings = {}
for question_id in questions_with_answers.keys():
answers = questions_with_answers.get(question_id)
if len(answers) > 0:
question_answers_headings = get_answers_headings(answers, login_user)
answers_headings.update(question_answers_headings)
return answers_headings
def get_answers_headings(answers, login_user):
approved_answers = []
pending_answers = []
for answer in answers:
if answer.approval_status == 'A':
approved_answers.append(answer)
if answer.approval_status == 'P':
pending_answers.append(answer)
answers_headings = {}
count = 0
suggestion_header = ''
for answer in answers:
question = answer.question
if answer.approval_status == 'P':
if question.has_multivalues != 1:
if len(approved_answers) > 0: # has approved answers
if len(pending_answers) == 1: # one approved answer and only one suggestion
suggestion_header = 'New suggestion'
else: # one approved answer and multiple suggestion (2 max)
count = count + 1
suggestion_header = 'New suggestion ' + str(count)
else: # NO approved answer
if len(pending_answers) == 1: # NO approved answer and only one suggestion
suggestion_header = ''
else: # one approved answer and multiple suggestion (no max in num of suggestions)
count = count + 1
suggestion_header = 'Suggestion ' + str(count)
else:
suggestion_header = '' # no heading is needed for multivalues items
else:
if question.has_multivalues != 1:
if len(pending_answers) > 0: # one approved answer and there are new suggestion
suggestion_header = 'Previously approved value'
answers_headings[answer.id] = suggestion_header
return answers_headings
def process_answer(data, question, jurisdiction, user, answer_id=None):
answer = json.dumps(data) # to convert to json
if question:
is_callout=0
validation_util_obj = FieldValidationCycleUtil()
arcf = validation_util_obj.save_answer(question, answer, jurisdiction, 'AddRequirement', user, is_callout, answer_id)
return arcf
else:
return None
def save_recent_search(user, jurisdiction):
try:
user_search = UserSearch(user=user)
user_search.entity_name = 'Jurisdiction'
user_search.entity_id = jurisdiction.id
user_search.label = jurisdiction.show_jurisdiction()
user_search.save()
except:
pass
def get_ahj_top_contributors(jurisdiction, category):
category_objs = []
if category == 'all_info':
category_objs = QuestionCategory.objects.filter(accepted__exact=1).order_by('display_order')
else:
category_objs = QuestionCategory.objects.filter(name__iexact=category, accepted__exact=1)
top_contributors = []
answers = AnswerReference.objects.filter(jurisdiction=jurisdiction, question__category__in=category_objs, approval_status ='A')
if len(answers)>0:
contributors = {}
for answer in answers:
if answer.organization != None:
if answer.organization.name != 'Clean Power Finance': # cpf not included.
if answer.organization.id in contributors:
contributors[answer.organization] = contributors[answer.organization] + 1
else:
contributors[answer.organization] = 1
else:
#should include only status = 'A' since we want only 'active' org member, not 'AI' or 'MI'. those are not approved yet.
orgmembers = OrganizationMember.objects.filter(user = answer.creator, organization__status = 'A').exclude(status__iexact='RM').exclude(status__iexact='R')
if orgmembers != None and len(orgmembers) > 0:
org = orgmembers[0].organization
answer.organization = org
answer.save()
if org.id in contributors:
contributors[answer.organization] = contributors[answer.organization] + 1
else:
contributors[answer.organization] = 1
if len(contributors) > 0:
top_contributors = sorted(contributors.iteritems(), key=operator.itemgetter(1), reverse=True)
return top_contributors
def answer_uploadfile(request):
allowedExtension = ('.pdf')
sizeLimit = django_settings.MAX_UPLOAD_FILE_SIZE
uploader = qqFileUploader(allowedExtension, sizeLimit)
result = uploader.handleUpload(request, os.path.join(django_settings.MEDIA_ROOT, "answer_ref_attaches"))
return_array = result["json"]
from django.utils import simplejson as json
if result['success'] == True:
return_array = json.loads(result["json"])
return_array = json.dumps(return_array)
return HttpResponse(return_array)
def get_question_answers_dajax(request, jurisdiction, question, data):
dajax = Dajax()
requestProcessor = HttpRequestProcessor(request)
data = get_question_data(request, jurisdiction, question, data)
body = requestProcessor.decode_jinga_template(request,'website/jurisdictions/AHJ_cqa_qa.html', data, '')
dajax.assign('#div_question_content_'+str(question.id),'innerHTML', body)
#import os
#with open(os.path.join(django_settings.PROJECT_ROOT, 'website/static/jurisdictions/AHJ_cqa_qa.js')) as f:
# dajax.script(f.read())
if data['category'] == 'all_info':
question_categories = QuestionCategory.objects.filter(accepted=1)
else:
question_categories = QuestionCategory.objects.filter(name__iexact=data['category'])
# for google map
if question.id == 4:
str_addr = question.get_addresses_for_map(jurisdiction)
data['str_address'] = str_addr
dajax.script("load_google_map('"+str(str_addr)+"')")
return dajax
def get_question_data(request, jurisdiction, question, data):
validation_util_obj = FieldValidationCycleUtil()
requestProcessor = HttpRequestProcessor(request)
user = request.user
questions = Question.objects.filter(id=question.id)
# TODO: use a join
template_ids = TemplateQuestion.objects.filter(question = questions).values_list('template_id')
templates = Template.objects.filter(id__in=template_ids)
data['user'] = user
data['jurisdiction'] = jurisdiction
data['question_id'] = question.id
data['question'] = question.__dict__.copy()
data['question']['answers'] = []
data['question']['logged_in_user_suggested_a_value'] = False
data['question']['user_can_suggest'] = True
data['question']['terminology'] = Question().get_question_terminology(question.id)
data['question']['pending_answer_ids'] = []
cqa = get_categorized_ahj_data(jurisdiction, None, False, user,
question_ids = [question.id])
data['question']['answers'] = cqa[question.category.id]['questions'][question.id]['answers']
return data
def get_answer_voting_info(category_name, jurisdiction, login_user, answer_ids):
action_category = ActionCategory.objects.filter(name__iexact=category_name)
votes = Action.objects.filter(category__in=action_category, jurisdiction=jurisdiction, entity_id__in=answer_ids).order_by('entity_id', '-action_datetime')
vote_info = {}
if len(votes) > 0:
for vote in votes:
if vote.entity_id not in vote_info:
vote_info[vote.entity_id] = {}
vote_info[vote.entity_id]['total_up_votes'] = 0
vote_info[vote.entity_id]['total_down_votes'] = 0
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = 0
vote_info[vote.entity_id]['can_vote_up'] = False
vote_info[vote.entity_id]['can_vote_down'] = False
vote_info[vote.entity_id]['last_down_vote_date'] = ''
vote_info[vote.entity_id]['up_vote_found'] = False
vote_info[vote.entity_id]['login_user_last_vote'] = ''
if vote.data == 'Vote: Up':
vote_info[vote.entity_id]['total_up_votes'] = vote_info[vote.entity_id]['total_up_votes'] + 1
vote_info[vote.entity_id]['up_vote_found'] = True
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = 0
if vote.user == login_user:
vote_info[vote.entity_id]['login_user_last_vote'] = 'up'
elif vote.data == 'Vote: Down':
vote_info[vote.entity_id]['total_down_votes'] = vote_info[vote.entity_id]['total_down_votes'] + 1
if 'last_down_vote_date' not in vote_info[vote.entity_id]:
#vote_info[vote.entity_id]['last_down_vote'] = vote
datetime_util_obj = DatetimeHelper(vote.action_datetime)
last_down_vote_date = datetime_util_obj.showStateTimeFormat(jurisdiction.state)
vote_info[vote.entity_id]['last_down_vote_date'] = last_down_vote_date
if vote.user == login_user:
vote_info[vote.entity_id]['login_user_last_vote'] = 'down'
if vote_info[vote.entity_id]['up_vote_found'] == False:
vote_info[vote.entity_id]['num_consecutive_last_down_votes'] = vote_info[vote.entity_id]['num_consecutive_last_down_votes'] + 1
#if vote.user_id not in vote_info[vote.entity_id]['user_last_vote_on_this_item']:
# vote_info[vote.entity_id]['user_last_vote_on_this_item'][vote.user_id] = vote
# temp test data
#vote_info[vote.entity_id]['can_vote_up'] = False
#vote_info[vote.entity_id]['can_vote_down'] = False
return vote_info
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def get_questions_in_category(user, jurisdiction, category):
if category == 'all_info':
return (None, False) # we really mean all questions
else:
category_objs = QuestionCategory.objects.filter(name__iexact=category, accepted__exact=1)
if len(category_objs) == 0: # view
if category == 'favorite_fields':
category_objs = View.objects.filter(user = user, view_type__exact='f')
elif category == 'quirks':
category_objs = View.objects.filter(jurisdiction = jurisdiction, view_type__exact='q')
elif category == 'attachments':
category_objs = View.objects.filter(jurisdiction = jurisdiction, view_type__exact='a')
else:
category_objs = View.objects.filter(name__iexact=category) # we work with one view per ahj content, not like muliple categories in all_info
view_questions = ViewQuestions.objects.filter(view__in=category_objs).order_by('display_order').values('question_id').distinct()
return ([q.get('question_id') for q in view_questions], True)
else:
category_questions = Question.objects.filter(accepted=1, category__in=category_objs).values('id').distinct()
return ([q.get('id') for q in category_questions], False)
def get_categorized_ahj_data(jurisdiction, category, empty_data_fields_hidden, user, all_votes=None, question_ids = None):
records = get_ahj_data(jurisdiction, category, empty_data_fields_hidden, user, question_ids = question_ids)
if not all_votes:
all_votes = get_jurisdiction_voting_info('VoteRequirement', jurisdiction, user, questions = question_ids)
records_by_category = {}
for rec in records:
cid = rec['category_id']
if not cid in records_by_category:
records_by_category[cid] = { 'cat_description': rec['cat_description'],
'sorted_question_ids': [],
'questions': {} }
qid = rec['question_id']
if qid != 283 or (qid == 283 and jurisdiction.state == 'CA'):
if not rec['id']: # this is a question
assert(rec['question_id'] not in records_by_category[cid]['questions']) # shouldn't get duplicate questions
records_by_category[cid]['sorted_question_ids'].append(qid)
rec['answers'] = []
rec['logged_in_user_suggested_a_value'] = False
rec['user_can_suggest'] = True
rec['terminology'] = Question().get_question_terminology(qid)
rec['pending_answer_ids'] = []
records_by_category[cid]['questions'][qid] = rec
if rec['question_id'] == 16 and rec['value']:
rec['fee_info'] = FieldValidationCycleUtil().process_fee_structure(json.loads(rec['value']))
else: # it's an answer
assert(rec['question_id'] in records_by_category[cid]['questions'])
question = records_by_category[cid]['questions'][qid]
question['answers'].append(rec)
if rec['creator_id'] == user.id and rec['approval_status'] == 'P':
question['pending_answer_ids'].append(rec['id'])
rec['content'] = json.loads(rec['value'])
question['logged_in_user_suggested_a_value'] = rec['creator_id'] == user.id
votes = all_votes.get(rec['id'], None)
rec['votes'] = votes
suggestion_has_votes = votes and \
(votes['total_up_votes'] > 0 or \
votes['total_down_votes'] > 0)
users_existing_suggestions = [a for a in question['answers'] if a['creator_id'] == user.id]
if rec['creator_id'] == user.id:
question['user_can_suggest'] = question['has_multivalues'] or \
len(users_existing_suggestions) == 0 or \
suggestion_has_votes
rec['comment_text'] = get_answer_comment_txt(jurisdiction, rec['id'], user)['comment_text']
return records_by_category
def get_ahj_data(jurisdiction, category, empty_data_fields_hidden, user, question_ids = []):
# if we're in a special category then the meaning of an empty
# question_ids is reversed; we want to return nothing instead of
# all questions
if not question_ids and (category == "quirks" or category == "favorite_fields" or category == "attachments"):
return []
# in either case, if we have a list of question_ids, then we want
# to limit ourselves to just that list
placeholder = ",".join(["%s" for id in question_ids]) if question_ids else None
query_str = '''(SELECT website_answerreference.id,
website_answerreference.question_id as question_id,
website_answerreference.value,
website_answerreference.file_upload,
website_answerreference.create_datetime,
website_answerreference.modify_datetime,
website_answerreference.jurisdiction_id,
website_answerreference.is_current,
website_answerreference.is_callout,
website_answerreference.approval_status,
website_answerreference.creator_id,
website_answerreference.status_datetime,
website_answerreference.organization_id,
website_question.form_type,
website_question.answer_choice_group_id,
website_question.display_order,
website_question.default_value,
website_question.reviewed,
website_question.accepted,
website_question.instruction,
website_question.category_id,
website_question.applicability_id,
website_question.question,
website_question.label,
website_question.template,
website_question.validation_class,
website_question.js,
website_question.field_attributes,
website_question.terminology,
website_question.has_multivalues,
website_question.qtemplate_id,
website_question.display_template,
website_question.field_suffix,
website_question.migration_type,
website_question.state_exclusive,
website_question.description,
website_question.support_attachments,
website_questioncategory.name,
website_questioncategory.description AS cat_description,
website_questioncategory.accepted AS cat_accepted,
website_questioncategory.display_order AS cat_display_order,
auth_user.username,
auth_user.first_name,
auth_user.last_name,
auth_user.is_staff,
auth_user.is_active,
auth_user.is_superuser,
website_userdetail.display_preference,
0 as count_of_answers
FROM website_answerreference
LEFT OUTER JOIN website_question
ON website_question.id = website_answerreference.question_id
LEFT OUTER JOIN website_questioncategory
ON website_questioncategory.id = website_question.category_id
LEFT OUTER JOIN auth_user
ON auth_user.id = website_answerreference.creator_id
LEFT OUTER JOIN website_userdetail
ON website_userdetail.user_id = website_answerreference.creator_id
LEFT OUTER JOIN website_template
ON website_question.qtemplate_id = website_template.id
WHERE website_answerreference.jurisdiction_id = %(jurisdiction_id)s AND
(website_question.form_type != 'CF' OR
(website_question.form_type = 'CF' AND
website_template.jurisdiction_id = %(jurisdiction_id)s)) AND'''
if placeholder:
query_str += ''' website_question.id IN (%(placeholder)s) AND'''
query_str += ''' website_question.accepted = '1' AND
((website_answerreference.approval_status = 'A' AND
website_question.has_multivalues = '0' AND
website_answerreference.create_datetime = (
SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = website_answerreference.question_id AND
temp_table.jurisdiction_id = website_answerreference.jurisdiction_id AND
temp_table.approval_status = 'A')) OR
(website_answerreference.approval_status = 'P' AND
website_question.has_multivalues = '0' AND
website_answerreference.create_datetime >= (
SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = website_answerreference.question_id AND
temp_table.jurisdiction_id = website_answerreference.jurisdiction_id AND
temp_table.approval_status = 'A')) OR
(website_question.has_multivalues = '1' AND
(website_answerreference.approval_status = 'A' OR
website_answerreference.approval_status = 'P')) OR
(website_answerreference.approval_status = 'P' AND
(SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = website_answerreference.question_id AND
temp_table.jurisdiction_id = website_answerreference.jurisdiction_id AND
temp_table.approval_status = 'A') IS NULL)))
UNION ALL
(SELECT NULL AS id,
website_question.id AS the_question_id,
NULL AS value,
NULL AS file_upload,
NULL AS create_datetime,
NULL AS modify_datetime,
NULL AS jurisdiction_id,
NULL AS is_current,
NULL AS is_callout,
NULL AS approval_status,
NULL AS creator_id,
NULL AS status_datetime,
NULL AS organization_id,
website_question.form_type,
website_question.answer_choice_group_id,
website_question.display_order,
website_question.default_value,
website_question.reviewed,
website_question.accepted,
website_question.instruction,
website_question.category_id,
website_question.applicability_id,
website_question.question,
website_question.label,
website_question.template,
website_question.validation_class,
website_question.js,
website_question.field_attributes,
website_question.terminology,
website_question.has_multivalues,
website_question.qtemplate_id,
website_question.display_template,
website_question.field_suffix,
website_question.migration_type,
website_question.state_exclusive,
website_question.description,
website_question.support_attachments,
website_questioncategory.name,
website_questioncategory.description AS cat_description,
website_questioncategory.accepted AS cat_accepted,
website_questioncategory.display_order AS cat_display_order,
NULL AS username,
NULL AS first_name,
NULL AS last_name,
NULL AS is_staff,
NULL AS is_active,
NULL AS is_superuser,
NULL AS display_preference,'''
if empty_data_fields_hidden:
query_str += ''' (SELECT count(*)
FROM website_answerreference as temp_answers LEFT OUTER JOIN
website_question
ON website_question.id = temp_answers.question_id
WHERE temp_answers.jurisdiction_id = %(jurisdiction_id)s AND
temp_answers.question_id = the_question_id AND
((temp_answers.approval_status = 'A' AND
website_question.has_multivalues = '0' AND
temp_answers.create_datetime = (
SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = temp_answers.question_id AND
temp_table.jurisdiction_id = temp_answers.jurisdiction_id AND
temp_table.approval_status = 'A')) OR
(temp_answers.approval_status = 'P' AND
website_question.has_multivalues = '0' AND
temp_answers.create_datetime >= (
SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = temp_answers.question_id AND
temp_table.jurisdiction_id = temp_answers.jurisdiction_id AND
temp_table.approval_status = 'A')) OR
(website_question.has_multivalues = '1' AND
(temp_answers.approval_status = 'A' OR
temp_answers.approval_status = 'P')) OR
(temp_answers.approval_status = 'P' AND
(SELECT MAX(create_datetime)
FROM website_answerreference AS temp_table
WHERE temp_table.question_id = temp_answers.question_id AND
temp_table.jurisdiction_id = temp_answers.jurisdiction_id AND
temp_table.approval_status = 'A') IS NULL))) AS count_of_answers'''
else:
query_str += ''' 0 as count_of_answers'''
query_str += ''' FROM website_question
LEFT OUTER JOIN website_questioncategory
ON website_questioncategory.id = website_question.category_id
LEFT OUTER JOIN website_template
ON website_template.id = website_question.qtemplate_id
WHERE website_questioncategory.accepted = '1' AND
website_question.accepted = '1' AND
(website_question.form_type != 'CF' OR
(website_question.form_type = 'CF' AND
website_template.jurisdiction_id = %(jurisdiction_id)s)) AND'''
if placeholder:
query_str += ''' website_question.id IN (%(placeholder)s)'''
else:
query_str += ''' 1'''
if empty_data_fields_hidden:
query_str += ''' HAVING count_of_answers > 0)'''
else:
query_str += ''')'''
query_str += '''ORDER BY cat_display_order ASC,
category_id ASC,
display_order ASC,
question_id ASC,
approval_status ASC,
create_datetime ASC,
id DESC;'''
query_str = query_str % { 'jurisdiction_id': jurisdiction.id,
'placeholder': placeholder }
query_params = []
if question_ids:
for question_id in question_ids:
query_params.append(question_id)
for question_id in question_ids:
query_params.append(question_id)
cursor = connections['default'].cursor()
cursor.execute(unicode(query_str), query_params)
return dictfetchall(cursor)
| {
"content_hash": "d8a9ffae5af9448c61887e357e1d6e5e",
"timestamp": "",
"source": "github",
"line_count": 2019,
"max_line_length": 411,
"avg_line_length": 54.65676077265973,
"alnum_prop": 0.560760113092649,
"repo_name": "solarpermit/solarpermit",
"id": "2d5266950863b4662f6ccb168abc7f7bc9286fdf",
"size": "110352",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "website/views/AHJ.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "126992"
},
{
"name": "JavaScript",
"bytes": "808802"
},
{
"name": "Python",
"bytes": "6625868"
}
],
"symlink_target": ""
} |
from flask import Markup
from itertools import product
import random
import genetic
geneSize = 10;
numPolygons = 125
numVertices = 3
canvasSize = 200
# maps strings to dictionaries
def domains():
# use same domain for something to make it easier
params = {}
for x in xrange(0, numPolygons):
for y in xrange(0, numVertices):
params[str(x) + '#' + str(y) + 'vertex' + 'DiffX'] = range(canvasSize)
params[str(x) + '#' + str(y) + 'vertex' + 'DiffY'] = range(canvasSize)
params[str(x) + '#r'] = range(canvasSize)
params[str(x) + '#g'] = range(canvasSize)
params[str(x) + '#b'] = range(canvasSize)
params[str(x) + '#a'] = range(canvasSize)
params[str(x) + '#baseX'] = range(canvasSize)
params[str(x) + '#baseY'] = range(canvasSize)
return params
def mutate(parameters):
new = dict(**parameters)
for param in parameters:
if random.random() < 0.02:
new[param] += random.random() * canvasSize * 0.2 - 0.1 * canvasSize
return new
def combine(parent_a, parent_b):
new = genetic.combine_random(parent_a, parent_b)
return new
# returns Markup object
def generate(parameters):
polygons = ""
objects = {}
for param in parameters:
paramparts = param.split('#')
print "PARAM PARTS" + str(paramparts)
objectkey = paramparts[0]
if objectkey not in objects:
objects[objectkey] = {}
objects[objectkey][paramparts[1]] = parameters[param]
for anobjectkey in objects:
points = ""
anobject = objects[anobjectkey]
for x in xrange(0, numVertices):
xval = anobject[str(x) + 'vertex' + 'DiffX']
yval = anobject[str(x) + 'vertex' + 'DiffY']
xval -= canvasSize / 2
yval -= canvasSize / 2
xval = anobject["baseX"] + xval
yval = anobject["baseY"] + yval
xval = min(max(0, xval), canvasSize - 1)
yval = min(max(0, yval), canvasSize - 1)
xstr = str(xval)
ystr = str(yval)
points += xstr + ',' + ystr + ' '
r = int(anobject["r"] * 256. / canvasSize)
g = int(anobject["g"] * 256. / canvasSize)
b = int(anobject["b"] * 256. / canvasSize)
a = float(anobject["a"] * 1. / canvasSize)
newFiller = '''
<polygon points="{points}" style="fill:rgba({r},{g},{b},{a});fill-rule:nonzero;" />
'''.format(points=points, r=r, g=g, b=b, a=a)
polygons += newFiller
logo = '''
<svg xmlns="http://www.w3.org/2000/svg" height="{height}" width="{width}" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1">
<rect height="{width}" width="{height}" fill="#ecf0f1"/>
{polygons}
</svg>
'''.format(width=canvasSize, height=canvasSize, polygons=polygons)
return Markup(logo)
| {
"content_hash": "a4ffdb274ce740cc5d551b8fbd7d0e0d",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 135,
"avg_line_length": 35.13414634146341,
"alnum_prop": 0.5671641791044776,
"repo_name": "techx/hackmit-evolution-chamber",
"id": "bce385430b3fc6329990b32f96cc048904eefac7",
"size": "2881",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "speciesnew.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1781116"
},
{
"name": "HTML",
"bytes": "231154"
},
{
"name": "JavaScript",
"bytes": "1881240"
},
{
"name": "Python",
"bytes": "27693"
},
{
"name": "Shell",
"bytes": "69"
}
],
"symlink_target": ""
} |
import base64
import json
import os
import re
import dill
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.contrib.operators.mlengine_operator import MLEngineBatchPredictionOperator
from airflow.contrib.operators.dataflow_operator import DataFlowPythonOperator
from airflow.exceptions import AirflowException
from airflow.operators.python_operator import PythonOperator
from urllib.parse import urlsplit
def create_evaluate_ops(task_prefix,
data_format,
input_paths,
prediction_path,
metric_fn_and_keys,
validate_fn,
batch_prediction_job_id=None,
project_id=None,
region=None,
dataflow_options=None,
model_uri=None,
model_name=None,
version_name=None,
dag=None):
"""
Creates Operators needed for model evaluation and returns.
It gets prediction over inputs via Cloud ML Engine BatchPrediction API by
calling MLEngineBatchPredictionOperator, then summarize and validate
the result via Cloud Dataflow using DataFlowPythonOperator.
For details and pricing about Batch prediction, please refer to the website
https://cloud.google.com/ml-engine/docs/how-tos/batch-predict
and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/
It returns three chained operators for prediction, summary, and validation,
named as <prefix>-prediction, <prefix>-summary, and <prefix>-validation,
respectively.
(<prefix> should contain only alphanumeric characters or hyphen.)
The upstream and downstream can be set accordingly like:
pred, _, val = create_evaluate_ops(...)
pred.set_upstream(upstream_op)
...
downstream_op.set_upstream(val)
Callers will provide two python callables, metric_fn and validate_fn, in
order to customize the evaluation behavior as they wish.
- metric_fn receives a dictionary per instance derived from json in the
batch prediction result. The keys might vary depending on the model.
It should return a tuple of metrics.
- validation_fn receives a dictionary of the averaged metrics that metric_fn
generated over all instances.
The key/value of the dictionary matches to what's given by
metric_fn_and_keys arg.
The dictionary contains an additional metric, 'count' to represent the
total number of instances received for evaluation.
The function would raise an exception to mark the task as failed, in a
case the validation result is not okay to proceed (i.e. to set the trained
version as default).
Typical examples are like this:
def get_metric_fn_and_keys():
import math # imports should be outside of the metric_fn below.
def error_and_squared_error(inst):
label = float(inst['input_label'])
classes = float(inst['classes']) # 0 or 1
err = abs(classes-label)
squared_err = math.pow(classes-label, 2)
return (err, squared_err) # returns a tuple.
return error_and_squared_error, ['err', 'mse'] # key order must match.
def validate_err_and_count(summary):
if summary['err'] > 0.2:
raise ValueError('Too high err>0.2; summary=%s' % summary)
if summary['mse'] > 0.05:
raise ValueError('Too high mse>0.05; summary=%s' % summary)
if summary['count'] < 1000:
raise ValueError('Too few instances<1000; summary=%s' % summary)
return summary
For the details on the other BatchPrediction-related arguments (project_id,
job_id, region, data_format, input_paths, prediction_path, model_uri),
please refer to MLEngineBatchPredictionOperator too.
:param task_prefix: a prefix for the tasks. Only alphanumeric characters and
hyphen are allowed (no underscores), since this will be used as dataflow
job name, which doesn't allow other characters.
:type task_prefix: str
:param data_format: either of 'TEXT', 'TF_RECORD', 'TF_RECORD_GZIP'
:type data_format: str
:param input_paths: a list of input paths to be sent to BatchPrediction.
:type input_paths: list[str]
:param prediction_path: GCS path to put the prediction results in.
:type prediction_path: str
:param metric_fn_and_keys: a tuple of metric_fn and metric_keys:
- metric_fn is a function that accepts a dictionary (for an instance),
and returns a tuple of metric(s) that it calculates.
- metric_keys is a list of strings to denote the key of each metric.
:type metric_fn_and_keys: tuple of a function and a list[str]
:param validate_fn: a function to validate whether the averaged metric(s) is
good enough to push the model.
:type validate_fn: function
:param batch_prediction_job_id: the id to use for the Cloud ML Batch
prediction job. Passed directly to the MLEngineBatchPredictionOperator as
the job_id argument.
:type batch_prediction_job_id: str
:param project_id: the Google Cloud Platform project id in which to execute
Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s
`default_args['project_id']` will be used.
:type project_id: str
:param region: the Google Cloud Platform region in which to execute Cloud ML
Batch Prediction and Dataflow jobs. If None, then the `dag`'s
`default_args['region']` will be used.
:type region: str
:param dataflow_options: options to run Dataflow jobs. If None, then the
`dag`'s `default_args['dataflow_default_options']` will be used.
:type dataflow_options: dictionary
:param model_uri: GCS path of the model exported by Tensorflow using
tensorflow.estimator.export_savedmodel(). It cannot be used with
model_name or version_name below. See MLEngineBatchPredictionOperator for
more detail.
:type model_uri: str
:param model_name: Used to indicate a model to use for prediction. Can be
used in combination with version_name, but cannot be used together with
model_uri. See MLEngineBatchPredictionOperator for more detail. If None,
then the `dag`'s `default_args['model_name']` will be used.
:type model_name: str
:param version_name: Used to indicate a model version to use for prediction,
in combination with model_name. Cannot be used together with model_uri.
See MLEngineBatchPredictionOperator for more detail. If None, then the
`dag`'s `default_args['version_name']` will be used.
:type version_name: str
:param dag: The `DAG` to use for all Operators.
:type dag: airflow.models.DAG
:returns: a tuple of three operators, (prediction, summary, validation)
:rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator,
PythonOperator)
"""
# Verify that task_prefix doesn't have any special characters except hyphen
# '-', which is the only allowed non-alphanumeric character by Dataflow.
if not re.match(r"^[a-zA-Z][-A-Za-z0-9]*$", task_prefix):
raise AirflowException(
"Malformed task_id for DataFlowPythonOperator (only alphanumeric "
"and hyphens are allowed but got: " + task_prefix)
metric_fn, metric_keys = metric_fn_and_keys
if not callable(metric_fn):
raise AirflowException("`metric_fn` param must be callable.")
if not callable(validate_fn):
raise AirflowException("`validate_fn` param must be callable.")
if dag is not None and dag.default_args is not None:
default_args = dag.default_args
project_id = project_id or default_args.get('project_id')
region = region or default_args.get('region')
model_name = model_name or default_args.get('model_name')
version_name = version_name or default_args.get('version_name')
dataflow_options = dataflow_options or \
default_args.get('dataflow_default_options')
evaluate_prediction = MLEngineBatchPredictionOperator(
task_id=(task_prefix + "-prediction"),
project_id=project_id,
job_id=batch_prediction_job_id,
region=region,
data_format=data_format,
input_paths=input_paths,
output_path=prediction_path,
uri=model_uri,
model_name=model_name,
version_name=version_name,
dag=dag)
metric_fn_encoded = base64.b64encode(dill.dumps(metric_fn, recurse=True))
evaluate_summary = DataFlowPythonOperator(
task_id=(task_prefix + "-summary"),
py_options=["-m"],
py_file="airflow.contrib.utils.mlengine_prediction_summary",
dataflow_default_options=dataflow_options,
options={
"prediction_path": prediction_path,
"metric_fn_encoded": metric_fn_encoded,
"metric_keys": ','.join(metric_keys)
},
dag=dag)
evaluate_summary.set_upstream(evaluate_prediction)
def apply_validate_fn(*args, **kwargs):
prediction_path = kwargs["templates_dict"]["prediction_path"]
scheme, bucket, obj, _, _ = urlsplit(prediction_path)
if scheme != "gs" or not bucket or not obj:
raise ValueError("Wrong format prediction_path: %s",
prediction_path)
summary = os.path.join(obj.strip("/"),
"prediction.summary.json")
gcs_hook = GoogleCloudStorageHook()
summary = json.loads(gcs_hook.download(bucket, summary))
return validate_fn(summary)
evaluate_validation = PythonOperator(
task_id=(task_prefix + "-validation"),
python_callable=apply_validate_fn,
provide_context=True,
templates_dict={"prediction_path": prediction_path},
dag=dag)
evaluate_validation.set_upstream(evaluate_summary)
return evaluate_prediction, evaluate_summary, evaluate_validation
| {
"content_hash": "3f95f99dd7dbd786b777564b4588780f",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 87,
"avg_line_length": 44.04782608695652,
"alnum_prop": 0.6639028723719278,
"repo_name": "r39132/airflow",
"id": "e1682ef45ade0b554234a9af1bc35ce0ec746b1f",
"size": "10914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/contrib/utils/mlengine_operator_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "4111"
},
{
"name": "HTML",
"bytes": "128531"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5928206"
},
{
"name": "Shell",
"bytes": "41869"
}
],
"symlink_target": ""
} |
import mock
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import policy_fixture
class HypervisorError(Exception):
"""This is just used to make sure the exception type is in the fault."""
pass
class ServerFaultTestCase(test.TestCase,
integrated_helpers.InstanceHelperMixin):
"""Tests for the server faults reporting from the API."""
def setUp(self):
super(ServerFaultTestCase, self).setUp()
# Setup the standard fixtures.
fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
self.useFixture(policy_fixture.RealPolicyFixture())
# Start the compute services.
self.start_service('conductor')
self.start_service('scheduler')
self.compute = self.start_service('compute')
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
self.admin_api = api_fixture.admin_api
def test_server_fault_non_nova_exception(self):
"""Creates a server using the non-admin user, then reboots it which
will generate a non-NovaException fault and put the instance into
ERROR status. Then checks that fault details are only visible to the
admin user.
"""
# Create the server with the non-admin user.
server = self._build_server(
networks=[{'port': nova_fixtures.NeutronFixture.port_1['id']}])
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(server, 'ACTIVE')
# Stop the server before rebooting it so that after the driver.reboot
# method raises an exception, the fake driver does not report the
# instance power state as running - that will make the compute manager
# set the instance vm_state to error.
self.api.post_server_action(server['id'], {'os-stop': None})
server = self._wait_for_state_change(server, 'SHUTOFF')
# Stub out the compute driver reboot method to raise a non-nova
# exception to simulate some error from the underlying hypervisor
# which in this case we are going to say has sensitive content.
error_msg = 'sensitive info'
with mock.patch.object(
self.compute.manager.driver, 'reboot',
side_effect=HypervisorError(error_msg)) as mock_reboot:
reboot_request = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], reboot_request)
# In this case we wait for the status to change to ERROR using
# the non-admin user so we can assert the fault details. We also
# wait for the task_state to be None since the wrap_instance_fault
# decorator runs before the reverts_task_state decorator so we will
# be sure the fault is set on the server.
server = self._wait_for_server_parameter(
server, {'status': 'ERROR', 'OS-EXT-STS:task_state': None},
api=self.api)
mock_reboot.assert_called_once()
# The server fault from the non-admin user API response should not
# have details in it.
self.assertIn('fault', server)
fault = server['fault']
self.assertNotIn('details', fault)
# And the sensitive details from the non-nova exception should not be
# in the message.
self.assertIn('message', fault)
self.assertNotIn(error_msg, fault['message'])
# The exception type class name should be in the message.
self.assertIn('HypervisorError', fault['message'])
# Get the server fault details for the admin user.
server = self.admin_api.get_server(server['id'])
fault = server['fault']
# The admin can see the fault details which includes the traceback.
self.assertIn('details', fault)
# The details also contain the exception message (which is not in the
# fault message).
self.assertIn(error_msg, fault['details'])
# Make sure the traceback is there by looking for part of it.
self.assertIn('in reboot_instance', fault['details'])
# The exception type class name should be in the message for the admin
# user as well since the fault handling code cannot distinguish who
# is going to see the message so it only sets class name.
self.assertIn('HypervisorError', fault['message'])
| {
"content_hash": "e3cc5ee2fc2047ea9677963aeb9ff350",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 48.32,
"alnum_prop": 0.6556291390728477,
"repo_name": "rahulunair/nova",
"id": "80e998d466af7cf05bec1d28f436e546507af9a0",
"size": "5378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/functional/test_server_faults.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "22804450"
},
{
"name": "Shell",
"bytes": "41649"
},
{
"name": "Smarty",
"bytes": "472764"
}
],
"symlink_target": ""
} |
"""Module to find the latest green build from Pulse.
This module locates the latest green build from Pulse for download.
"""
import xmlrpclib
def GetLatest(server_url, project_name, username, password,
command, name, filename, stage):
"""Get the latest version of an artifact from Pulse.
Args:
server_url: The pulse server url, for example http://pulse:8080/
project_name: The name of the Pulse project to access.
username: Username for login.
password: Password for login.
command: The command the artifact comes from.
name: The name of the artifact.
filename: The relative path to the artifact file.
stage: The stage to grab the artifact from.
Returns:
Returns None if nothing is found, otherwise it returns a permalink to the
artifacts download.
Raises:
IOError: In the event of access failure or if no green builds exist.
"""
server = xmlrpclib.ServerProxy(server_url + 'xmlrpc')
token = server.RemoteApi.login(username, password)
# Get the latest 100 builds of the tools.
builds = server.RemoteApi.getLatestBuildsForProject(token, project_name,
'', True, 100)
# Extract the latest green build.
green_builds = [b for b in builds if b['status'] == 'success']
if not green_builds:
raise IOError('No green builds of project %s found' % project_name)
build = green_builds[0]
artifacts = server.RemoteApi.getArtifactsInBuild(token, project_name,
build['id'])
# Pick out the desired artifact file.
link = None
for a in artifacts:
# Skip everything other than what we're looking for.
if a['command'] != command or a['name'] != name or a['stage'] != stage:
continue
# Construct full permalink to artifact.
link = (server_url + a['permalink'] + filename)
break
server.RemoteApi.logout(token)
return link
| {
"content_hash": "bb51cbea1c023cc8bfafa27094025109",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 77,
"avg_line_length": 35.925925925925924,
"alnum_prop": 0.6649484536082474,
"repo_name": "asacamano/keyczar",
"id": "7ff06c3772279f5d528acaf3e293637f54896bc9",
"size": "3490",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "cpp/src/tools/swtoolkit/site_scons/pulse_latest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4094"
},
{
"name": "C",
"bytes": "40648"
},
{
"name": "C++",
"bytes": "840887"
},
{
"name": "CSS",
"bytes": "17986"
},
{
"name": "HTML",
"bytes": "2750877"
},
{
"name": "Java",
"bytes": "463654"
},
{
"name": "JavaScript",
"bytes": "11024"
},
{
"name": "Makefile",
"bytes": "1172"
},
{
"name": "Objective-C",
"bytes": "2028"
},
{
"name": "Objective-C++",
"bytes": "2107"
},
{
"name": "Python",
"bytes": "2500374"
},
{
"name": "Shell",
"bytes": "8582"
},
{
"name": "TeX",
"bytes": "52886"
}
],
"symlink_target": ""
} |
from supriya.tools.systemtools.SupriyaValueObject import SupriyaValueObject
class MidiFile(SupriyaValueObject):
r'''MIDI file.
'''
### CLASS VARIABLES ###
__slots__ = (
'_midi_tracks',
)
### INITIALIZER ###
def __init__(self, midi_tracks=None):
from supriya.tools import miditools
prototype = miditools.MidiTrack
if midi_tracks is not None:
assert all(isinstance(_, prototype) for _ in midi_tracks)
midi_tracks = tuple(midi_tracks)
self._midi_tracks = midi_tracks
### PUBLIC PROPERTIES ###
@property
def midi_tracks(self):
r'''Gets MIDI tracks in MIDI file.
'''
return self._midi_tracks | {
"content_hash": "28cc2bcfdcfd48fc59c496bc6dcea993",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 75,
"avg_line_length": 24.2,
"alnum_prop": 0.59366391184573,
"repo_name": "andrewyoung1991/supriya",
"id": "549ac986e44449406b7f30950f744f3e7c4cc5b9",
"size": "752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supriya/tools/miditools/MidiFile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6712"
},
{
"name": "CSS",
"bytes": "446"
},
{
"name": "HTML",
"bytes": "1083"
},
{
"name": "JavaScript",
"bytes": "6163"
},
{
"name": "Makefile",
"bytes": "6775"
},
{
"name": "Python",
"bytes": "2693776"
}
],
"symlink_target": ""
} |
"""
gRPC client meant to connect to a gRPC server endpoint, and query the
end-point's schema by calling SchemaService.Schema(Empty) and all of its
semantics are derived from the recovered schema.
"""
from __future__ import absolute_import
import os
import sys
import time
from random import randint
from zlib import decompress
import functools
import grpc
from consul import Consul
from grpc._channel import _Rendezvous
from structlog import get_logger
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.error import ConnectError
from .protos.schema_pb2_grpc import SchemaServiceStub
from google.protobuf.empty_pb2 import Empty
from .asleep import asleep
log = get_logger()
class GrpcClient(object):
"""
Connect to a gRPC server, fetch its schema, and process the downloaded
schema files to drive the customization of the north-bound interface(s)
of Chameleon.
"""
RETRY_BACKOFF = [0.05, 0.1, 0.2, 0.5, 1, 2, 5]
def __init__(self, consul_endpoint, work_dir, endpoint='localhost:50055',
reconnect_callback=None, credentials=None, restart_on_disconnect=False):
self.consul_endpoint = consul_endpoint
self.endpoint = endpoint
self.work_dir = work_dir
self.reconnect_callback = reconnect_callback
self.credentials = credentials
self.restart_on_disconnect = restart_on_disconnect
self.google_api_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'protos'))
self.plugin_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'protoc_plugins'))
self.channel = None
self.schema = None
self.retries = 0
self.shutting_down = False
self.connected = False
self.was_connected = False
def start(self):
log.debug('starting')
if not self.connected:
reactor.callLater(0, self.connect)
log.info('started')
return self
def stop(self):
log.debug('stopping')
if self.shutting_down:
return
self.shutting_down = True
log.info('stopped')
def set_reconnect_callback(self, reconnect_callback):
self.reconnect_callback = reconnect_callback
return self
def connectivity_callback(self, client, connectivity):
if (self.was_connected) and (connectivity in [connectivity.TRANSIENT_FAILURE, connectivity.SHUTDOWN]):
log.info("connectivity lost -- restarting")
os.execv(sys.executable, ['python'] + sys.argv)
if (connectivity == connectivity.READY):
self.was_connected = True
# Sometimes gRPC transitions from READY to IDLE, skipping TRANSIENT_FAILURE even though a socket is
# disconnected. So on idle, force a connectivity check.
if (connectivity == connectivity.IDLE) and (self.was_connected):
connectivity = client.channel._channel.check_connectivity_state(True)
# The result will probably show IDLE, but passing in True has the side effect of reconnecting if the
# connection has been lost, which will trigger the TRANSIENT_FALURE we were looking for.
@inlineCallbacks
def connect(self):
"""
(Re-)Connect to end-point
"""
if self.shutting_down or self.connected:
return
try:
if self.endpoint.startswith('@'):
_endpoint = yield self._get_endpoint_from_consul(
self.endpoint[1:])
else:
_endpoint = self.endpoint
if self.credentials:
log.info('securely connecting', endpoint=_endpoint)
self.channel = grpc.secure_channel(_endpoint, self.credentials)
else:
log.info('insecurely connecting', endpoint=_endpoint)
self.channel = grpc.insecure_channel(_endpoint)
if self.restart_on_disconnect:
connectivity_callback = functools.partial(self.connectivity_callback, self)
self.channel.subscribe(connectivity_callback)
# Delay between initiating connection and executing first gRPC. See CORD-3012.
time.sleep(0.5)
swagger_from = self._retrieve_schema()
self._compile_proto_files(swagger_from)
self._clear_backoff()
self.connected = True
if self.reconnect_callback is not None:
reactor.callLater(0, self.reconnect_callback)
return
except _Rendezvous as e:
if e.code() == grpc.StatusCode.UNAVAILABLE:
log.info('grpc-endpoint-not-available')
else:
log.exception('rendezvous error', e=e)
yield self._backoff('not-available')
except Exception:
if not self.shutting_down:
log.exception('cannot-connect', endpoint=_endpoint)
yield self._backoff('unknown-error')
reactor.callLater(0, self.connect)
def _backoff(self, msg):
wait_time = self.RETRY_BACKOFF[min(self.retries,
len(self.RETRY_BACKOFF) - 1)]
self.retries += 1
log.error(msg, retry_in=wait_time)
return asleep(wait_time)
def _clear_backoff(self):
if self.retries:
log.info('reconnected', after_retries=self.retries)
self.retries = 0
@inlineCallbacks
def _get_endpoint_from_consul(self, service_name):
"""
Look up an appropriate grpc endpoint (host, port) from
consul, under the service name specified by service-name
"""
host = self.consul_endpoint.split(':')[0].strip()
port = int(self.consul_endpoint.split(':')[1].strip())
while True:
log.debug('consul-lookup', host=host, port=port)
consul = Consul(host=host, port=port)
_, services = consul.catalog.service(service_name)
log.debug('consul-response', services=services)
if services:
break
log.warning('no-service', consul_host=host, consul_port=port,
service_name=service_name)
yield asleep(1.0)
# pick local addresses when resolving a service via consul
# see CORD-815 (https://jira.opencord.org/browse/CORD-815)
service = services[randint(0, len(services) - 1)]
endpoint = '{}:{}'.format(service['ServiceAddress'],
service['ServicePort'])
returnValue(endpoint)
def _retrieve_schema(self):
"""
Retrieve schema from gRPC end-point, and save all *.proto files in
the work directory.
"""
assert isinstance(self.channel, grpc.Channel)
stub = SchemaServiceStub(self.channel)
# try:
schemas = stub.GetSchema(Empty(), timeout=120)
# except _Rendezvous, e:
# if e.code == grpc.StatusCode.UNAVAILABLE:
#
# else:
# raise e
os.system('mkdir -p %s' % self.work_dir)
os.system('rm -fr /tmp/%s/*' %
self.work_dir.replace('/tmp/', '')) # safer
for proto_file in schemas.protos:
proto_fname = proto_file.file_name
proto_content = proto_file.proto
log.debug('saving-proto', fname=proto_fname, dir=self.work_dir,
length=len(proto_content))
with open(os.path.join(self.work_dir, proto_fname), 'w') as f:
f.write(proto_content)
desc_content = decompress(proto_file.descriptor)
desc_fname = proto_fname.replace('.proto', '.desc')
log.debug('saving-descriptor', fname=desc_fname, dir=self.work_dir,
length=len(desc_content))
with open(os.path.join(self.work_dir, desc_fname), 'wb') as f:
f.write(desc_content)
return schemas.swagger_from
def _compile_proto_files(self, swagger_from):
"""
For each *.proto file in the work directory, compile the proto
file into the respective *_pb2.py file as well as generate the
web server gateway python file *_gw.py.
:return: None
"""
chameleon_base_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '.'
))
for fname in [f for f in os.listdir(self.work_dir)
if f.endswith('.proto')]:
need_swagger = fname == swagger_from
log.debug('compiling', file=fname, need_swagger=need_swagger)
cmd = (
'cd %s && '
'env PATH=%s PYTHONPATH=%s '
'python -m grpc.tools.protoc '
'-I. -I %s '
'--python_out=. '
'--grpc_python_out=. '
'--plugin=protoc-gen-gw=%s/gw_gen.py '
'--gw_out=. '
'%s' % (
self.work_dir,
':'.join([os.environ['PATH'], self.plugin_dir]),
chameleon_base_dir,
self.google_api_dir,
self.plugin_dir,
fname)
)
log.debug('executing', cmd=cmd, file=fname)
os.system(cmd)
log.info('compiled', file=fname)
# test-load each _pb2 file to see all is right
if self.work_dir not in sys.path:
sys.path.insert(0, self.work_dir)
for fname in [f for f in os.listdir(self.work_dir)
if f.endswith('_pb2.py')]:
modname = fname[:-len('.py')]
log.debug('test-import', modname=modname)
_ = __import__(modname) # noqa: F841
@inlineCallbacks
def invoke(self, stub, method_name, request, metadata, retry=1):
"""
Invoke a gRPC call to the remote server and return the response.
:param stub: Reference to the *_pb2 service stub
:param method_name: The method name inside the service stub
:param request: The request protobuf message
:param metadata: [(str, str), (str, str), ...]
:return: The response protobuf message and returned trailing metadata
"""
if not self.connected:
raise ConnectError()
try:
method = getattr(stub(self.channel), method_name)
response, rendezvous = method.with_call(request, metadata=metadata)
returnValue((response, rendezvous.trailing_metadata()))
except grpc._channel._Rendezvous as e:
code = e.code()
if code == grpc.StatusCode.UNAVAILABLE:
e = ConnectError()
if self.connected:
self.connected = False
yield self.connect()
if retry > 0:
response = yield self.invoke(stub, method_name,
request, metadata,
retry=retry - 1)
returnValue(response)
elif code in (
grpc.StatusCode.NOT_FOUND,
grpc.StatusCode.INVALID_ARGUMENT,
grpc.StatusCode.ALREADY_EXISTS,
grpc.StatusCode.UNAUTHENTICATED,
grpc.StatusCode.PERMISSION_DENIED):
pass # don't log error, these occur naturally
else:
log.exception(e)
raise e
| {
"content_hash": "248a21cbdbda68b53ac457f82fb1a7e8",
"timestamp": "",
"source": "github",
"line_count": 314,
"max_line_length": 112,
"avg_line_length": 37.031847133757964,
"alnum_prop": 0.5701754385964912,
"repo_name": "open-cloud/xos",
"id": "2b2d1e0d8290f30e3641dc22e73bdbe1533c31d1",
"size": "12229",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/xos-api/xosapi/chameleon_client/grpc_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "5024"
},
{
"name": "Makefile",
"bytes": "13624"
},
{
"name": "Python",
"bytes": "1329912"
},
{
"name": "Shell",
"bytes": "57651"
},
{
"name": "Smarty",
"bytes": "3161"
}
],
"symlink_target": ""
} |
"""Reads logs from an Azure subscription."""
import json
import tempfile
from typing import Optional
from azure.mgmt import monitor as az_monitor
from azure.core import exceptions as az_exceptions
from libcloudforensics import errors as lcf_errors
from libcloudforensics.providers.azure.internal import common as lcf_common
from dftimewolf.lib import module
from dftimewolf.lib.containers import containers
from dftimewolf.lib.modules import manager as modules_manager
from dftimewolf.lib.state import DFTimewolfState
class AzureLogsCollector(module.BaseModule):
"""Collector for Azure Activity logs."""
def __init__(self,
state: DFTimewolfState,
name: Optional[str]=None,
critical: bool=False) -> None:
"""Initializes an Azure logs collector."""
super(AzureLogsCollector, self).__init__(
state, name=name, critical=critical)
self._filter_expression = ''
self._subscription_id = ''
self._profile_name: Optional[str] = ''
# pylint: disable=arguments-differ
def SetUp(self,
subscription_id: str,
filter_expression: str,
profile_name: Optional[str]=None) -> None:
"""Sets up an Azure logs collector.
Args:
subscription_id (str): name of the subscription_id to fetch logs from.
filter_expression (str): Azure logs filter expression.
profile_name (str): a profile name to use for finding credentials.
"""
self._subscription_id = subscription_id
self._filter_expression = filter_expression
self._profile_name = profile_name
def Process(self) -> None:
"""Copies logs from an Azure subscription."""
output_file = tempfile.NamedTemporaryFile(
mode='w', delete=False, encoding='utf-8', suffix='.jsonl')
output_path = output_file.name
self.logger.info(f'Downloading logs to {output_path:s}')
try:
_, credentials = lcf_common.GetCredentials(
profile_name=self._profile_name)
except (lcf_errors.CredentialsConfigurationError,
FileNotFoundError) as exception:
self.ModuleError('Ensure credentials are properly configured as expected '
'by libcloudforensics: either a credentials.json file associated '
'with the provided profile_name, environment variables as per '
'https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate ' # pylint: disable=line-too-long
', or Azure CLI credentials.')
self.ModuleError(str(exception), critical=True)
monitoring_client = az_monitor.MonitorManagementClient(
credentials, self._subscription_id)
activity_logs_client = monitoring_client.activity_logs
try:
results = activity_logs_client.list(filter=self._filter_expression)
while True:
try:
result_entry = next(results)
except StopIteration:
break
log_dict = result_entry.as_dict()
output_file.write(json.dumps(log_dict))
output_file.write('\n')
except az_exceptions.ClientAuthenticationError as exception:
self.ModuleError('Ensure credentials are properly configured.')
self.ModuleError(str(exception), critical=True)
except az_exceptions.HttpResponseError as exception:
if exception.status_code == 400:
self.ModuleError(
'Badly formed request, ensure that the filter expression is '
'formatted correctly e.g. "eventTimestamp ge \'2022-02-01\'"')
if exception.status_code == 403:
self.ModuleError(
'Make sure you have the appropriate permissions in the '
'subscription')
if exception.status_code == 404:
self.ModuleError(
'Resource not found, ensure that subscription_id is correct.')
self.ModuleError(str(exception), critical=True)
self.PublishMessage('Downloaded logs to {output_path}')
output_file.close()
logs_report = containers.File('AzureLogsCollector result', output_path)
self.state.StoreContainer(logs_report)
modules_manager.ModulesManager.RegisterModule(AzureLogsCollector)
| {
"content_hash": "4538e4ab6f92eca121b44a9a4b99e987",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 123,
"avg_line_length": 37.345454545454544,
"alnum_prop": 0.6879259980525804,
"repo_name": "log2timeline/dftimewolf",
"id": "91f5a045a6ced5286451f92e48da5d3f7bddcdca",
"size": "4132",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dftimewolf/lib/collectors/azure_logging.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "633"
},
{
"name": "Python",
"bytes": "980195"
},
{
"name": "Shell",
"bytes": "11863"
}
],
"symlink_target": ""
} |
"""Remove Nth Node From End of List
Given a linked list, remove the n-th node from the end of list and return its head.
Example:
Given linked list: 1 -> 2 -> 3 -> 4 -> 5, and n = 2.
After removing the second node from the end, the linked list becomes 1 -> 2 -> 3 ->5
Note:
Given n will always be valid.
Refer https://leetcode.com/problems/remove-nth-node-from-end-of-list
"""
from utils.list import ListNode, create_linked_list
class Solution:
"""单向链表只能遍历一次后才能知道其长度"""
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
length = 1
node = head
while node.next is not None:
length += 1
node = node.next
if length < n:
return head
location = length - n
if location == 0:
return head.next
i, node = 1, head
while i < location:
node = node.next
i += 1
node.next = node.next.next
return head
class SolutionOnce:
"""利用双指针遍历一次"""
def removeNthFromEnd(self, head: ListNode, n: int):
dummy_node = ListNode(-1)
dummy_node.next = head
node = self.find_from_end(dummy_node, n+1)
node.next = node.next.next
return dummy_node.next
def find_from_end(self, head: ListNode, n: int):
p1 = head
for i in range(n):
p1 = p1.next
p2 = head
while p1 is not None:
p2 = p2.next
p1 = p1.next
return p2
if __name__ == '__main__':
cases = [
(create_linked_list(list(range(5))), 4, 1),
(create_linked_list(list(range(5))), 1, 4),
(create_linked_list(list(range(5))), 5, 0),
]
solutions = (Solution(), SolutionOnce())
for case in cases:
for ss in solutions:
result = [node.val for node in ss.removeNthFromEnd(case[0], case[1])]
assert case[2] not in result
| {
"content_hash": "8c90b88e9f16079923596807034edeeb",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 84,
"avg_line_length": 24.28048780487805,
"alnum_prop": 0.5479658463083877,
"repo_name": "aiden0z/snippets",
"id": "f49021cbc65e682d1f6cf0e455b59a1e76e9a6f2",
"size": "2045",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/019_remove_nth_node_from_end_olf_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1794"
},
{
"name": "CSS",
"bytes": "325"
},
{
"name": "Go",
"bytes": "2256"
},
{
"name": "HTML",
"bytes": "28217"
},
{
"name": "Java",
"bytes": "78744"
},
{
"name": "Python",
"bytes": "280837"
},
{
"name": "Rust",
"bytes": "102204"
}
],
"symlink_target": ""
} |
from puzzle.utils import (get_most_severe_consequence, get_cytoband_coord,
get_omim_number, get_gene_info)
def test_get_gene_info():
# test with HGNC symbol
genes = list(get_gene_info(hgnc_symbols=['CHAT']))
assert len(genes) == 1
assert genes[0].symbol == 'CHAT'
assert genes[0].ensembl_id == 'ENSG00000070748'
# test with Ensembl id
genes = list(get_gene_info(ensembl_ids=['ENSG00000156110']))
assert len(genes) == 1
assert genes[0].symbol == 'ADK'
assert genes[0].ensembl_id == 'ENSG00000156110'
def test_get_most_severe_consequence():
"""Test get_most_sever_consequence(transcripts) method"""
print("Test get_most_sever_consequence with a 'regular' transcripts list")
transcripts = [
{'consequence': 'transcript_ablation'}
]
assert get_most_severe_consequence(transcripts) == 'transcript_ablation'
print("Test get_most_sever_consequence with empty transcripts list")
transcripts = []
assert get_most_severe_consequence(transcripts) is None
print("Test get_most_sever_consequence with 'unknown' consequence")
transcripts = [
{'consequence': 'unknown'}
]
assert get_most_severe_consequence(transcripts) is None
print("Test most_severe_consequence with multiple transcripts")
transcripts = [
{'consequence': 'inframe_deletion'},
{'consequence': 'start_lost'},
{'consequence': 'synonymous_variant'}
]
assert get_most_severe_consequence(transcripts) == 'start_lost'
print("Test most_severe_consequence with multiple transcripts (annotations)")
transcripts = [
{'consequence': 'start_lost&synonymous_variant'},
]
assert get_most_severe_consequence(transcripts) == 'start_lost'
def test_get_cytoband_coord():
"""test get_cytoband_coord(chrom, pos) method"""
print("Test get_cytoband_coord with different input formats")
assert get_cytoband_coord('1', 3) == '1p36.33'
assert get_cytoband_coord('chr1', 3) == '1p36.33'
assert get_cytoband_coord('chr1', '3') == '1p36.33'
print("Test get_cytoband_coord with non existing chromosome")
assert get_cytoband_coord('chrMT', '3') is None
print("Test get_cytoband_coord with non existing position")
assert get_cytoband_coord('chrX', '155270600') is None
def test_get_omim_number():
"""Test get_omim_number(hgnc_symbol) method"""
print("Test get_omim_number with valid hgnc_symbol")
assert get_omim_number('IFT172') == 607386
print("Test get_omim_number with invalid hgnc_symbol")
assert get_omim_number('HEJ') is None
print("Test getting phenotype")
assert get_omim_number('MCCRP2') != get_omim_number('PLK4')
assert get_omim_number('MCCRP2') is None
assert get_omim_number('PLK4') == 605031
| {
"content_hash": "aa8d091f9f84e84038f251e5c94cbbc7",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 81,
"avg_line_length": 36.51948051948052,
"alnum_prop": 0.6717638691322901,
"repo_name": "robinandeer/puzzle",
"id": "2c870ad7ad7211dd18b56951b2b89a3b98ac62aa",
"size": "2812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/utils/test_get_info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "251"
},
{
"name": "HTML",
"bytes": "55258"
},
{
"name": "JavaScript",
"bytes": "1100"
},
{
"name": "Python",
"bytes": "233511"
}
],
"symlink_target": ""
} |
'''stuff we test:
* "hello" should work with all enabled schedulers and link against all single-scheduler libraries.
* "sleep" should sleep "quickly" with all enabled schedulers (partitioning test)
* random checker should find bugs.
* valgrind checker should find bugs.
* various stuff - like find, sort and accumulate.
'''
import os
import sys
import build
import commands
tests = 'bug.cpp sleep.cpp nested.cpp grain.cpp acc.cpp cancel.cpp sort.cpp'.split()
with_cpp = 'C++11' in build.enabled
with_pthreads = 'pthreads' in build.enabled
with_openmp = 'OpenMP' in build.enabled
with_tbb = 'TBB' in build.enabled
print '\nbuilding tests'
verbose = build.verbose
built = []
def buildtest(*args):
built.append(build.buildtest(*args))
buildtest('hello_ct.c')
if with_pthreads: buildtest('hello_ct.c','_pthreads')
if with_openmp: buildtest('hello_ct.c','_openmp')
if with_cpp:
buildtest('hello_ctx.cpp')
if with_pthreads: buildtest('hello_ctx.cpp','_pthreads')
if with_openmp: buildtest('hello_ctx.cpp','_openmp')
if with_tbb: buildtest('hello_ctx.cpp','_tbb')
for test in tests:
if test.endswith('.cpp') and not with_cpp:
continue
buildtest(test)
scheds = 'serial shuffle valgrind openmp tbb pthreads'.split()
# remove schedulers which we aren't configured to support
def lower(ls): return [s.lower() for s in ls]
scheds = [sched for sched in scheds if not (sched in lower(build.features) \
and sched not in lower(build.enabled))]
failed = []
def fail(command):
print ' ',command,'FAILED'
failed.append(command)
def runtest(name,args='',expected_status=0,expected_output=None,**env):
envstr=' '.join(['%s=%s'%(n,v) for n,v in env.items()])
command = 'env %s ./bin/%s %s'%(envstr,name,args)
return runcommand(command,expected_status,expected_output)
def runcommand(command,expected_status=0,expected_output=None):
if verbose:
print ' ','running',command
status,output = commands.getstatusoutput(command)
if verbose>1:
print ' ','\n '.join(output.split('\n'))
bad_status = status != expected_status and expected_status != None
bad_output = output != expected_output and expected_output != None
if bad_status or bad_output:
fail(command)
return status, output, command
print '\nrunning tests'
testscripts = 'hello.py bug.py nested.py sleep.py'.split()
for testscript in testscripts:
execfile('test/'+testscript)
for test in built:
if test in 'bug nested sleep'.split() or test.startswith('hello'):
continue
if test == 'sort':
runtest(test,args=str(1024*1024))
else:
runtest(test)
if failed:
print 'FAILED:'
print '\n'.join(failed)
sys.exit(1)
| {
"content_hash": "d6f1d4bfb5bf45fc29199b9e0eaa50db",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 98,
"avg_line_length": 31.34090909090909,
"alnum_prop": 0.6776649746192893,
"repo_name": "jinxuan/checkedthreads",
"id": "d99423241fad147641820062d2822145e0964169",
"size": "2776",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "53821"
},
{
"name": "C++",
"bytes": "20522"
},
{
"name": "Makefile",
"bytes": "890"
},
{
"name": "Python",
"bytes": "12653"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.apps import AppConfig
class CustomersConfig(AppConfig):
name = 'customers'
| {
"content_hash": "9788e908e652dfb52d75700afaebe2af",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 39,
"avg_line_length": 19.142857142857142,
"alnum_prop": 0.7611940298507462,
"repo_name": "rtorres90/learning-python-package-system",
"id": "62e59285b0846620e91844c9131ad38592336b39",
"size": "134",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "packages/customers/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7408"
}
],
"symlink_target": ""
} |
pytest_plugins = "pytester"
| {
"content_hash": "9cedcd7de2105f000ceed005528d8062",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 27,
"avg_line_length": 28,
"alnum_prop": 0.75,
"repo_name": "pytest-dev/pytest-subtests",
"id": "694d7d58d440c236ea8ddcfece9eb605427f7be9",
"size": "28",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "tests/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23671"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class TroubleshootingResult(Model):
"""Troubleshooting information gained from specified resource.
:param start_time: The start time of the troubleshooting.
:type start_time: datetime
:param end_time: The end time of the troubleshooting.
:type end_time: datetime
:param code: The result code of the troubleshooting.
:type code: str
:param results: Information from troubleshooting.
:type results:
list[~azure.mgmt.network.v2016_09_01.models.TroubleshootingDetails]
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'code': {'key': 'code', 'type': 'str'},
'results': {'key': 'results', 'type': '[TroubleshootingDetails]'},
}
def __init__(self, start_time=None, end_time=None, code=None, results=None):
super(TroubleshootingResult, self).__init__()
self.start_time = start_time
self.end_time = end_time
self.code = code
self.results = results
| {
"content_hash": "f591896c883e568356e4364d00b051d1",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 80,
"avg_line_length": 36.5,
"alnum_prop": 0.639269406392694,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "62625db5e1845a2cb0a5aa0fab4d9265462c6464",
"size": "1569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2016_09_01/models/troubleshooting_result.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
} |
from django.dispatch import Signal
# The signal which is fired on order status changes.
# You should handle this signal to create the payment Source records in Oscar.
order_status_changed = Signal(providing_args=["order", "old_status", "new_status"])
payment_updated = Signal(providing_args=['order', 'payment'])
payment_added = Signal(providing_args=['order', 'payment'])
return_view_called = Signal(providing_args=['request', 'order', 'callback'])
status_changed_view_called = Signal(providing_args=['request', 'order'])
| {
"content_hash": "51a7090d58d62ba239e0634847e960a1",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 83,
"avg_line_length": 37.785714285714285,
"alnum_prop": 0.7391304347826086,
"repo_name": "edoburu/django-oscar-docdata",
"id": "dcfb22f9cc3f45e40d54fca6fdf06bf69c79fa4d",
"size": "529",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "oscar_docdata/signals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "24711"
},
{
"name": "Python",
"bytes": "189773"
}
],
"symlink_target": ""
} |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.python import components
from zope.interface import implements, Interface
def foo():
return 2
class X:
def __init__(self, x):
self.x = x
def do(self):
#print 'X',self.x,'doing!'
pass
class XComponent(components.Componentized):
pass
class IX(Interface):
pass
class XA(components.Adapter):
implements(IX)
def method(self):
# Kick start :(
pass
components.registerAdapter(XA, X, IX)
| {
"content_hash": "0af692f4f712ea7ac121b9943f9d1322",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 48,
"avg_line_length": 17.029411764705884,
"alnum_prop": 0.6044905008635578,
"repo_name": "timkrentz/SunTracker",
"id": "3eebc8e6156229c53a3f36bed15534b35a5fd040",
"size": "579",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "IMU/VTK-6.2.0/ThirdParty/Twisted/twisted/test/crash_test_dummy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "185699"
},
{
"name": "Assembly",
"bytes": "38582"
},
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "48362836"
},
{
"name": "C++",
"bytes": "70478135"
},
{
"name": "CMake",
"bytes": "1755036"
},
{
"name": "CSS",
"bytes": "147795"
},
{
"name": "Cuda",
"bytes": "30026"
},
{
"name": "D",
"bytes": "2152"
},
{
"name": "GAP",
"bytes": "14495"
},
{
"name": "GLSL",
"bytes": "190912"
},
{
"name": "Groff",
"bytes": "66799"
},
{
"name": "HTML",
"bytes": "295090"
},
{
"name": "Java",
"bytes": "203238"
},
{
"name": "JavaScript",
"bytes": "1146098"
},
{
"name": "Lex",
"bytes": "47145"
},
{
"name": "Makefile",
"bytes": "5461"
},
{
"name": "Objective-C",
"bytes": "74727"
},
{
"name": "Objective-C++",
"bytes": "265817"
},
{
"name": "Pascal",
"bytes": "3407"
},
{
"name": "Perl",
"bytes": "178176"
},
{
"name": "Prolog",
"bytes": "4556"
},
{
"name": "Python",
"bytes": "16497901"
},
{
"name": "Shell",
"bytes": "48835"
},
{
"name": "Smarty",
"bytes": "1368"
},
{
"name": "Tcl",
"bytes": "1955829"
},
{
"name": "Yacc",
"bytes": "180651"
}
],
"symlink_target": ""
} |
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# SEE:
# - http://cairographics.org/samples/
# - http://cairographics.org/documentation/pycairo/3/reference/index.html
# - http://cairographics.org/pycairo/tutorial/
# - http://www.tortall.net/mu/wiki/CairoTutorial
import cairo
import math
WIDTH, HEIGHT = 256, 256
def main():
"""Main function"""
# Image surfaces provide the ability to render to memory buffers either
# allocated by cairo or by the calling code.
# List of supported surfaces: http://www.cairographics.org/manual/cairo-surfaces.html
surface = cairo.SVGSurface("circle.svg", WIDTH, HEIGHT)
# cairo.Context is the object that you send your drawing commands to.
context = cairo.Context(surface)
# Normalizing the canvas ([0,1],[0,1]) -> ([0,WIDTH],[0,HEIGHT])
context.scale(WIDTH, HEIGHT)
### DRAW ###
# context.set_source_rgb(0., 0., 0.)
# context.set_source_rgba(0., 0., 0., 1.)
# Sets the source pattern within context to an opaque color. This opaque color
# will then be used for any subsequent drawing operation until a new source
# pattern is set.
# The color components are floating point numbers in the range 0 to 1. If
# the values passed in are outside that range, they will be clamped.
# The default source pattern is opaque black, (that is, it is equivalent to
# cairo_set_source_rgb(context, 0.0, 0.0, 0.0)).
# Using set_source_rgb(r, g, b) is equivalent to using
# set_source_rgba(r, g, b, 1.0), and it sets your source color to use
# full opacity.
#
# context.stroke()
# The stroke() operation takes a virtual pen along the current path
# according to the current line width, line join, line cap, and dash
# settings. After cairo_stroke(), the current path will be cleared from
# the cairo context.
# See http://www.cairographics.org/manual/cairo-cairo-t.html#cairo-stroke
#
# context.fill()
# A drawing operator that fills the current path according to the current
# fill rule, (each sub-path is implicitly closed before being filled).
# After cairo_fill(), the current path will be cleared from the cairo
# context.
# See http://www.cairographics.org/manual/cairo-cairo-t.html#cairo-fill
context.set_line_width(0.02)
context.set_source_rgb(1, 1, 1)
context.rectangle(0, 0, 1, 1)
context.fill()
# STROKE
x_center = 0.5
y_center = 0.5
radius = 0.4
angle1 = math.radians(0.) # angles in radians
angle2 = math.radians(360.) # angles in radians
context.set_source_rgb(0, 0, 0)
context.arc(x_center, y_center, radius, angle1, angle2)
context.stroke()
# DRAW HELPING LINES
context.set_source_rgba(1, 0.2, 0.2, 0.6)
context.arc(x_center, y_center, 0.03, angle1, 2. * math.pi)
context.fill()
### WRITE THE SVG FILE ###
surface.finish()
if __name__ == '__main__':
main()
| {
"content_hash": "5046e1bba3c799f6594869b367c94946",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 89,
"avg_line_length": 35.1958762886598,
"alnum_prop": 0.6707674282366726,
"repo_name": "jeremiedecock/snippets",
"id": "9a430d952915bdb8b6a02c0a8a2c25f36e2afabd",
"size": "4100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/cairo/circle.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "4294"
},
{
"name": "Batchfile",
"bytes": "6779"
},
{
"name": "C",
"bytes": "102107"
},
{
"name": "C++",
"bytes": "320943"
},
{
"name": "CMake",
"bytes": "11424"
},
{
"name": "CSS",
"bytes": "21121"
},
{
"name": "Cython",
"bytes": "21"
},
{
"name": "Dockerfile",
"bytes": "1818"
},
{
"name": "Fortran",
"bytes": "633"
},
{
"name": "Gnuplot",
"bytes": "39999"
},
{
"name": "Go",
"bytes": "3166"
},
{
"name": "Groovy",
"bytes": "3009"
},
{
"name": "HTML",
"bytes": "138995"
},
{
"name": "IDL",
"bytes": "43"
},
{
"name": "Java",
"bytes": "120221"
},
{
"name": "JavaScript",
"bytes": "32342"
},
{
"name": "Jinja",
"bytes": "206"
},
{
"name": "Jupyter Notebook",
"bytes": "95991"
},
{
"name": "Lua",
"bytes": "200"
},
{
"name": "M4",
"bytes": "111"
},
{
"name": "MATLAB",
"bytes": "31972"
},
{
"name": "Makefile",
"bytes": "81307"
},
{
"name": "OpenSCAD",
"bytes": "14995"
},
{
"name": "PHP",
"bytes": "94"
},
{
"name": "Perl",
"bytes": "46"
},
{
"name": "Processing",
"bytes": "208"
},
{
"name": "Prolog",
"bytes": "454"
},
{
"name": "Python",
"bytes": "1685966"
},
{
"name": "R",
"bytes": "76"
},
{
"name": "Raku",
"bytes": "43"
},
{
"name": "Ruby",
"bytes": "42"
},
{
"name": "Scheme",
"bytes": "649"
},
{
"name": "Shell",
"bytes": "52865"
},
{
"name": "Smalltalk",
"bytes": "55"
},
{
"name": "TeX",
"bytes": "1189"
},
{
"name": "Vue",
"bytes": "49445"
},
{
"name": "XSLT",
"bytes": "1816"
}
],
"symlink_target": ""
} |
from tools.load import LoadMatrix
lm = LoadMatrix()
fm_train_real = lm.load_numbers('../data/fm_train_real.dat')
fm_test_real = lm.load_numbers('../data/fm_test_real.dat')
label_train_multiclass = lm.load_labels('../data/label_train_multiclass.dat')
parameter_list=[
[ fm_train_real, fm_test_real, label_train_multiclass, 1.2, 1.2, 1e-5, 1, 0.001, 1.5],
[ fm_train_real, fm_test_real, label_train_multiclass, 5, 1.2, 1e-2, 1, 0.001, 2]]
def mkl_multiclass (fm_train_real, fm_test_real, label_train_multiclass,
width, C, epsilon, num_threads, mkl_epsilon, mkl_norm):
from shogun import CombinedFeatures, MulticlassLabels
from shogun import MKLMulticlass
import shogun as sg
kernel = sg.kernel("CombinedKernel")
feats_train = CombinedFeatures()
feats_test = CombinedFeatures()
subkfeats_train = sg.features(fm_train_real)
subkfeats_test = sg.features(fm_test_real)
subkernel = sg.kernel("GaussianKernel", log_width=width)
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.add("kernel_array", subkernel)
subkfeats_train = sg.features(fm_train_real)
subkfeats_test = sg.features(fm_test_real)
subkernel = sg.kernel("LinearKernel")
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.add("kernel_array", subkernel)
subkfeats_train = sg.features(fm_train_real)
subkfeats_test = sg.features(fm_test_real)
subkernel = sg.kernel("PolyKernel", cache_size=10, degree=2)
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.add("kernel_array", subkernel)
kernel.init(feats_train, feats_train)
labels = MulticlassLabels(label_train_multiclass)
mkl = MKLMulticlass(C, kernel, labels)
mkl.set_epsilon(epsilon);
mkl.parallel.set_num_threads(num_threads)
mkl.set_mkl_epsilon(mkl_epsilon)
mkl.set_mkl_norm(mkl_norm)
mkl.train()
kernel.init(feats_train, feats_test)
out = mkl.apply().get_labels()
return out
if __name__ == '__main__':
print('mkl_multiclass')
mkl_multiclass(*parameter_list[0])
| {
"content_hash": "e3af60bf163e29333fa48ad70f90ce32",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 88,
"avg_line_length": 33.03174603174603,
"alnum_prop": 0.7385872176838059,
"repo_name": "karlnapf/shogun",
"id": "b1e85821dd1f0458390421519ad8c9bc4f0fac2a",
"size": "2103",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "examples/undocumented/python/mkl_multiclass.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "568"
},
{
"name": "C",
"bytes": "12000"
},
{
"name": "C++",
"bytes": "10554889"
},
{
"name": "CMake",
"bytes": "195345"
},
{
"name": "Dockerfile",
"bytes": "2029"
},
{
"name": "GDB",
"bytes": "89"
},
{
"name": "HTML",
"bytes": "2066"
},
{
"name": "MATLAB",
"bytes": "8755"
},
{
"name": "Makefile",
"bytes": "244"
},
{
"name": "Python",
"bytes": "285072"
},
{
"name": "Shell",
"bytes": "11995"
}
],
"symlink_target": ""
} |
from rest_framework.parsers import DataAndFiles, MultiPartParser
class MultiPartParser(MultiPartParser):
"""
Parser for multipart form data, which may include file data.
Lifted from https://github.com/tomchristie/django-rest-framework/pull/4026/
to work around request.data being empty when multipart/form-data is posted.
See https://github.com/tomchristie/django-rest-framework/issues/3951
"""
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as a multipart encoded form,
and returns a DataAndFiles object.
`.data` will be a `QueryDict` containing all the form parameters.
`.files` will be a `QueryDict` containing all the form files.
For POSTs, accept Django request parsing. See issue #3951.
"""
parser_context = parser_context or {}
request = parser_context['request']
_request = request._request
if _request.method == 'POST':
return DataAndFiles(_request.POST, _request.FILES)
return super(MultiPartParser, self).parse(
stream, media_type=media_type, parser_context=parser_context)
| {
"content_hash": "663a69d607aa506b782d713879074c19",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 79,
"avg_line_length": 41.06896551724138,
"alnum_prop": 0.6817800167926112,
"repo_name": "harikishen/addons-server",
"id": "b0797a6254f4cbe9bb804b9fe1f48f068de06305",
"size": "1191",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "src/olympia/api/parsers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "822508"
},
{
"name": "HTML",
"bytes": "698554"
},
{
"name": "JavaScript",
"bytes": "1087360"
},
{
"name": "Makefile",
"bytes": "811"
},
{
"name": "PLSQL",
"bytes": "990"
},
{
"name": "PLpgSQL",
"bytes": "2381"
},
{
"name": "Python",
"bytes": "4560536"
},
{
"name": "SQLPL",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "7564"
},
{
"name": "Smarty",
"bytes": "1859"
}
],
"symlink_target": ""
} |
from Adafruit_Nokia_LCD.PCD8544 import *
| {
"content_hash": "476c0970e349da6593e4e58ee5a7e681",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 40,
"avg_line_length": 41,
"alnum_prop": 0.8048780487804879,
"repo_name": "adafruit/Adafruit_Nokia_LCD",
"id": "b70432833f941faec451433f52cfa31b827f6075",
"size": "41",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Adafruit_Nokia_LCD/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21988"
}
],
"symlink_target": ""
} |
import urllib2
import json
import tarfile
import re
import os
from datetime import datetime
from flask import Flask
from flask import render_template
from flask import request
app = Flask(__name__)
app.debug = True
registry_url = "localhost"
if "REGURL" in os.environ:
registry_url = os.environ['REGURL']
print "Registry reside on http://" + str(registry_url) + "/v1"
FILE_TYPES = {
'f':'file',
'l':'hardlink',
's':'symlink',
'c':'char',
'b':'block',
'd':'directory',
'i':'fifo',
't':'cont',
'L':'longname',
'K':'longlink',
'S':'sparse',
}
def _query(path):
response = urllib2.urlopen("http://" + str(registry_url) + "/v1" + str(path))
result = json.loads(response.read())
return result
def _build_file_dict(files):
res = []
for file in files:
res.append({
'name': file[0],
'type': FILE_TYPES.get(file[1], 'unknown'),
'deleted': file[2],
'size': file[3],
'mtime': file[4],
'mode': file[5],
'owner': file[6],
'group': file[7]
})
return res
def _build_image_tree(images):
all_images = []
for image in images:
d = _query("/images/%s/json" % image['id'])
all_images.append(d)
exists = set(map(lambda x : x['id'], all_images))
top = [x for x in all_images if 'parent' not in x.keys()][0]
children = {}
for image in all_images:
if 'parent' not in image.keys():
continue
parent = image['parent']
if not parent:
continue
if parent not in exists:
continue
if parent in children:
children[parent].append(image)
else:
children[parent] = [ image ]
return _sort_image_list(children, top)
def _sort_image_list(children, top):
res = [ top ]
if top['id'] in children:
for child in children[top['id']]:
res += _sort_image_list(children, child)
return res
@app.route("/")
@app.route("/home", methods=['GET','POST'])
def index():
query = ''
images = []
if request.method == 'POST':
query = request.form['query']
result = _query('/search?q=' + query)
for repo in result['results']:
repo_images = _query("/repositories/%s/images" % repo['name'])
images.append({'container': repo['name'], 'images': repo_images})
return render_template('index.html', results=result['results'], images=images)
@app.route("/images/<image_id>")
@app.route("/images/<image_id>/<repo_name>")
def images(image_id, repo_name=None):
result = _query("/images/%s/json" % image_id)
files_raw = _query("/images/%s/files" % image_id)
files = _build_file_dict(files_raw)
return render_template('image.html', results=result, files=files, repo=repo_name)
@app.route("/repo/<repo_name>/<image_id>")
def repo(repo_name, image_id):
result = _query("/repositories/%s/%s/json" % (repo_name,image_id))
images = _query("/repositories/%s/%s/images" % (repo_name,image_id))
tags = _query("/repositories/%s/%s/tags" % (repo_name,image_id))
properties = _query("/repositories/%s/%s/properties" % (repo_name,image_id))
sorted_images = _build_image_tree(images)
return render_template('repo.html', name=repo_name+"/"+image_id,
results=result, images=sorted_images, tags=tags, properties=properties)
@app.template_filter()
def datetimefilter(value, format='%Y/%m/%d %H:%M'):
value = re.sub(r'[0-9]{2}Z$','', value)
d = datetime(*map(int, re.split('[^\d]', value)[:-1]))
return d.strftime(format)
@app.template_filter()
def joinifarray(value):
if type(value) == list:
res = ' '.join(value)
else:
res = value
return res
app.jinja_env.filters['datetimefilter'] = datetimefilter
app.jinja_env.filters['joinifarray'] = joinifarray
if __name__ == "__main__":
app.run(host='0.0.0.0',port=8080)
| {
"content_hash": "beec686ac9b1cad21d080cffdb7f98b2",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 85,
"avg_line_length": 29.69924812030075,
"alnum_prop": 0.5855696202531645,
"repo_name": "nextrevision/docker-registry-ui",
"id": "4fdac9b83ec0ad4d98f7d7f0cc4ceb16d3720f81",
"size": "3950",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "34"
},
{
"name": "Python",
"bytes": "3950"
},
{
"name": "Shell",
"bytes": "228"
}
],
"symlink_target": ""
} |
"""The tests for the USGS Earthquake Hazards Program Feed platform."""
import datetime
from unittest.mock import MagicMock, call, patch
from homeassistant.components import geo_location
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.components.usgs_earthquakes_feed.geo_location import (
ATTR_ALERT,
ATTR_EXTERNAL_ID,
ATTR_MAGNITUDE,
ATTR_PLACE,
ATTR_STATUS,
ATTR_TIME,
ATTR_TYPE,
ATTR_UPDATED,
CONF_FEED_TYPE,
SCAN_INTERVAL,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import assert_setup_component, async_fire_time_changed
CONFIG = {
geo_location.DOMAIN: [
{
"platform": "usgs_earthquakes_feed",
CONF_FEED_TYPE: "past_hour_m25_earthquakes",
CONF_RADIUS: 200,
}
]
}
CONFIG_WITH_CUSTOM_LOCATION = {
geo_location.DOMAIN: [
{
"platform": "usgs_earthquakes_feed",
CONF_FEED_TYPE: "past_hour_m25_earthquakes",
CONF_RADIUS: 200,
CONF_LATITUDE: 15.1,
CONF_LONGITUDE: 25.2,
}
]
}
def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
place=None,
attribution=None,
time=None,
updated=None,
magnitude=None,
status=None,
entry_type=None,
alert=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.place = place
feed_entry.attribution = attribution
feed_entry.time = time
feed_entry.updated = updated
feed_entry.magnitude = magnitude
feed_entry.status = status
feed_entry.type = entry_type
feed_entry.alert = alert
return feed_entry
async def test_setup(hass):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(-31.0, 150.0),
place="Location 1",
attribution="Attribution 1",
time=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
updated=datetime.datetime(2018, 9, 22, 9, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
status="Status 1",
entry_type="Type 1",
alert="Alert 1",
)
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (-31.1, 150.1))
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.usgs_earthquake_hazards_program_feed."
"UsgsEarthquakeHazardsProgramFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: -31.0,
ATTR_LONGITUDE: 150.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_PLACE: "Location 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_TIME: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_UPDATED: datetime.datetime(
2018, 9, 22, 9, 0, tzinfo=datetime.timezone.utc
),
ATTR_STATUS: "Status 1",
ATTR_TYPE: "Type 1",
ATTR_ALERT: "Alert 1",
ATTR_MAGNITUDE: 5.7,
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 15.5), 7) == 0
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: -31.1,
ATTR_LONGITUDE: 150.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 20.5), 7) == 0
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: -31.2,
ATTR_LONGITUDE: 150.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 25.5), 7) == 0
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 20.5, (-31.1, 150.1))
with patch(
"geojson_client.usgs_earthquake_hazards_program_feed."
"UsgsEarthquakeHazardsProgramFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
)
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert mock_feed.call_args == call(
(15.1, 25.2),
"past_hour_m25_earthquakes",
filter_minimum_magnitude=0.0,
filter_radius=200.0,
)
| {
"content_hash": "fa5dad3a02e191444d485b6be6d1f723",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 85,
"avg_line_length": 34.67213114754098,
"alnum_prop": 0.5715130023640662,
"repo_name": "Teagan42/home-assistant",
"id": "646878c97bdee125aeea16092142f87639eedea0",
"size": "8460",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/usgs_earthquakes_feed/test_geo_location.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19774313"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
"""Models."""
# Suppress pylint invalid import order
# pylint: disable-msg=C6203
import logging
import settings
from django.utils import translation
from google.appengine.api import users
from google.appengine.ext import db
import pytz
from ragendja import dbutils
from ragendja.auth import google_models
# Invalid warning re. unused import on rules
# pylint: disable-msg=W0611
from core import errors
from core import processors
from core import request_cache
from core import rules
from core import service_factory
from core import timezone_helper
from core import utils
# Suppress pylint const name warnings.
# pylint: disable-msg=C6409
_Active = utils.RegistrationActive
_Attend = utils.RegistrationAttend
_Confirm = utils.RegistrationConfirm
_Status = utils.RegistrationStatus
_AccessPoint = utils.AccessPointType
_ = translation.ugettext
class GlearnUser(google_models.User):
"""A user for the application.
Attributes:
timezone: The user preferred timezone.
course_creator: The user has course creator privileges.
location: The country-city code of the user, useful to find nearby courses.
"""
timezone = dbutils.FakeModelProperty(utils.Timezone,
default=utils.Timezone('US/Pacific'))
course_creator = db.IntegerProperty(default=1)
location = db.StringProperty(default='US-MTV')
@classmethod
def GetGlearnUserFromCache(cls, email):
"""Retrieves a user by email.
This methods uses a cache for the current request.
Args:
email: string email of user.
Returns:
models.GlearnUser.
"""
glearn_user = request_cache.GetObjectFromCache(email)
if not glearn_user:
glearn_user = cls.FromAppengineUser(users.User(email))
if glearn_user:
request_cache.CacheObject(glearn_user.user.email(), glearn_user)
return glearn_user
@classmethod
def get_djangouser_for_user(cls, user):
"""Overrides method from ragendja.auth.google_models.GoogleUserTraits.
Notable changes:
- cache user object in request
Args:
user: Appengine user
Returns:
A models.GlearnUser.
"""
django_user = cls.GetGlearnUserFromCache(user.email())
if not django_user:
django_user = cls.create_djangouser_for_user(user)
django_user.is_active = True
user_put = False
if django_user.user != user:
django_user.user = user
user_put = True
user_id = user.user_id()
if django_user.user_id != user_id:
django_user.user_id = user_id
user_put = True
if getattr(settings, 'AUTH_ADMIN_USER_AS_SUPERUSER', True):
is_admin = users.is_current_user_admin()
if (django_user.is_staff != is_admin or
django_user.is_superuser != is_admin):
django_user.is_superuser = django_user.is_staff = is_admin
user_put = True
if not django_user.is_saved() or user_put:
django_user.put()
return django_user
@classmethod
def create_djangouser_for_user(cls, user):
"""Overriding method used to instantiate user who logs in for first time.
Args:
user: users.User for whom a GlearnUser is being created for.
Returns:
A GlearnUser or derived class instance.
"""
logging.info('create_djangouser_for_user for first time user %s', user)
return cls.CreateUsers([user])[0]
@classmethod
def CreateUsers(cls, appengine_users):
"""Creates GlearnUsers from appengine users.
Args:
appengine_users: list of users.User.
Returns:
A list of GlearnUsers.
"""
logging.info('Creating users %s', appengine_users)
glearn_users = [cls(user=user, user_id=user.user_id())
for user in appengine_users]
GlearnUser.UpdateGlearnUserProperties(glearn_users)
return glearn_users
@classmethod
def UpdateGlearnUserProperties(cls, glearn_users):
"""Get the user properties relevant to the given GlearnUsers list.
Update datastore glearnUser entities with data from user info service.
Args:
glearn_users: List of models.GlearnUser objects whose properties will be
updated.
"""
email_list = [glearn_user.user.email() for glearn_user in glearn_users]
try:
user_service = service_factory.GetUserInfoService()
person_map = user_service.GetUserInfoMulti(email_list)
# Suppress pylint catch Exception
# pylint: disable-msg=W0703
except errors.ServiceCriticalError, exception:
logging.error('[%s] %s', type(exception), exception)
person_map = {}
for glearn_user in glearn_users:
user_email = glearn_user.user.email()
person_info = person_map.get(user_email)
if person_info:
glearn_user.location = person_info.country_city
timezone = timezone_helper.GetTimezoneForLocation(glearn_user.location)
glearn_user.timezone = utils.Timezone(timezone)
logging.info('Timezone, location is %s and %s for %s',
timezone, glearn_user.location, user_email)
else:
logging.warning('Could not retrieve timezone for %s',
glearn_user.user.email())
def CanCreateProgram(self):
"""Returns True if a user can create a program."""
return self.course_creator or self.is_staff # Superuser or creator.
def CanEditProgram(self, program):
"""Returns True if a user can edit a given program."""
if self.is_staff: return True # Superuser can edit everything.
user = self.appengine_user
return (user == program.owner or user in program.contact_list or
user in program.facilitator_list)
def CanCreateActivity(self, program):
"""Returns True if a user can create new activity under a program."""
return program.public_activity_creation or self.CanEditProgram(program)
def CanEditActivity(self, activity):
"""Returns True if a user can edit a given activity."""
if self.is_staff: return True # Superuser can edit everything.
user = self.appengine_user
if self.CanEditProgram(activity.parent()):
return True
elif user == activity.owner:
return True
else:
# Instructors can edit activity
for schedule in activity.ActivitySchedulesQuery():
if user in schedule.primary_instructors:
return True
return False
def CanEditManagerApproval(self, approval):
"""Returns True if a user can edit a given ManagerApproval."""
if self.is_staff: return True # Superuser can edit everything.
return self.appengine_user == approval.manager
def GetLocalTime(self, dt):
"""Converts a naive datetime to the local user time.
This method should be called on any time coming from datastore before
displaying it to the end user.
Args:
dt: A datetime.datetime instance with no timezone information.
Returns:
The converted datetime using the appropriate timezone.
"""
return utils.GetLocalTime(dt, self.GetTimeZone())
def GetUtcTime(self, dt):
"""Converts from a user local time to UTC using timezone translation.
This method should be called on any user-input time to translate it
to UTC before using it internally.
For example:
User has EST timezone and selects 3pm when creating a schedule.
Calling this method will return 8 pm (since 3 pm EST = 8 pm UTC).
Args:
dt: A datetime.datetime instance with no timezone information.
Returns:
The converted datetime in UTC.
"""
assert dt.tzinfo is None
return utils.GetUtcTime(dt, self.GetTimeZone())
def GetTimeZone(self):
"""Returns the pytz.timezone of a user."""
return pytz.timezone(self.timezone.name)
def GetCityCode(self):
try:
return self.location.split('-')[1]
except IndexError:
return 'MTV'
@property
def appengine_user(self):
"""Property to access the user property in GlearnUser."""
return self.user
@classmethod
def GetOrCreateUser(cls, email, create=False):
"""Retrieves and optionally creates a user from/in the datastore.
Args:
email: The user email address.
create: If True, tries to create and store a new GlearnUser entity with
the given email when not able to retrieve from datastore.
Returns:
A GlearnUser entity or None if not found/invalid.
"""
return cls.GetOrCreateUsers([email], create)[email]
@classmethod
def GetOrCreateUsers(cls, emails, create=False):
"""Retrieves and optionally creates users from/in the datastore.
Args:
emails: Str list of user email addresses.
create: If True, tries to create and store a new GlearnUser entity with
the given emails when not able to retrieve from datastore.
Returns:
A dictionary of {email: user} where user is a GlearnUser entity or None
if not found/invalid.
"""
logging.info('Entering GetOrCreateUsers for %s', emails)
glearn_users = {}
to_be_created = []
emails_to_lookup = []
# Build list of appengine users to lookup in datastore
for email in set(emails):
if utils.IsEmailValid(email):
emails_to_lookup.append(email)
else:
glearn_users[email] = None
# Break down queries in batches of 30 (limit 30 subqueries)
users_bucket = utils.ArraySplit(emails_to_lookup, 30)
for bucket in users_bucket:
appengine_users = [users.User(email) for email in bucket]
glearn_users_query = cls.FromAppengineUsers(appengine_users)
# Find missing users and create them
emails_found = []
for glearn_user in glearn_users_query.fetch(30):
emails_found.append(glearn_user.email)
glearn_users[glearn_user.email] = glearn_user
# For users not found, we need to create them
missing_emails = set(bucket) - set(emails_found)
to_be_created.extend([users.User(email) for email in missing_emails])
# We create the users which need to be created
if create and to_be_created:
created_users = cls.CreateUsers(to_be_created)
db.put(created_users)
for user, glearn_user in zip(to_be_created, created_users):
glearn_users[user.email()] = glearn_user
logging.info('Created %s new users', created_users)
return glearn_users
@classmethod
def FromAppengineUser(cls, appengine_user):
"""Query the appropriate GlearnUser given a user.User."""
query = db.Query(cls)
utils.AddFilter(query, 'user =', appengine_user)
return query.get()
@classmethod
def FromAppengineUsers(cls, appengine_users):
"""Query the appropriate GlearnUser given a user.User."""
query = db.Query(cls)
utils.AddFilter(query, 'user in', appengine_users)
return query
def get_and_delete_messages(self):
"""Overrides django method. We do not use the messages framework."""
return []
class _BaseModel(db.Model):
"""Base class which adds utilities."""
# Suppress pylint invalid inheritance from object
# pylint: disable-msg=C6601
class Meta:
abstract = True
def GetKey(self, prop_name):
"""Return the reference property key without a datastore fetch."""
return getattr(self.__class__, prop_name).get_value_for_datastore(self)
class _AuditedModel(_BaseModel):
"""Base class which adds audit properties to a model.
Attributes:
owner: The user who owns the entity.
creation_time: Date when entity was created.
last_modified: The date and time of last modification for the entity.
last_modified_by: The user who last edited/modified the program.
"""
# Suppress pylint invalid inheritance from object
# pylint: disable-msg=C6601
class Meta:
abstract = True
owner = db.UserProperty(required=True)
creation_time = db.DateTimeProperty(auto_now_add=True)
last_modified = db.DateTimeProperty(auto_now=True)
last_modified_by = db.UserProperty(required=True)
class _DeletedHierarchyModel(_AuditedModel):
"""Base class for objects that need to support delayed deletion.
Entities cannot be deleted right away when deleting them causes system to be
inconsistent. This base class adds another state for an entity that
differentiates a deleted state from a 'to be deleted' state.
Attributes:
deleted: An integer to indicate if the entity is deleted.
to_be_deleted: An integer to indicate if the entity is going to be deleted.
"""
# Suppress pylint invalid inheritance from object
# pylint: disable-msg=C6601
class Meta:
abstract = True
deleted = db.IntegerProperty(default=0)
to_be_deleted = db.IntegerProperty(default=0)
def _GetChildrenQuery(self):
"""Provides an iterator of child _DeletedHierarchyModel instances.
Should provide the list of entities that the current entity considers as
direct children in the hierarchy. These children entities should belong
to the same entity group since they are operated on in transactions.
For Example: Programs contain activities which contain activity schedules.
The Program class can thus return the list of activities that it considers
active as its children. And the activities can in turn provide the schedules
as their children. This provides a way to traverse the full hierarchy and
change attributes.
Returns:
An iterator of _DeletedHiearchyModel child entities.
Raises:
AbstractMethod: for default implementation
"""
raise errors.AbstractMethod
def DeleteUnsafeAndWrite(self, request_user):
"""Mark the hierarchy as deleted and update in datastore.
Args:
request_user: users.User requesting the modification.
Returns:
The list of entities that are marked as deleted.
"""
write_list = self.SetHierarchyAttribute('deleted', 1, request_user)
db.put(write_list)
return write_list
def MarkToBeDeletedUnsafeAndWrite(self, request_user):
"""Mark the hierarchy as to be deleted and update in datastore.
Args:
request_user: users.User requesting the modification.
Returns:
The list of entities that are marked as to be deleted.
"""
write_list = self.SetHierarchyAttribute('to_be_deleted', 1, request_user)
db.put(write_list)
return write_list
def SetHierarchyAttribute(self, attribute_name, attribute_value,
request_user):
"""Set the attribute value in the hierarchy.
Args:
attribute_name: Name of the model attribute to change.
attribute_value: Value to set the model attribute to.
request_user: users.User requesting the modification.
Returns:
The list of _DeletedHierarchyModel entities that were updated.
"""
setattr(self, attribute_name, attribute_value)
self.last_modified_by = request_user
entity_write_list = [self]
query = self._GetChildrenQuery()
entity_list = [entity for entity in query]
for entity in entity_list:
assert isinstance(self, _DeletedHierarchyModel)
entity_write_list.extend(entity.SetHierarchyAttribute(
attribute_name, attribute_value, request_user))
return entity_write_list
def StoreDeleteTaskConfig(self, request_user):
"""Stores a config entry indicating that the program should be deleted.
Entities like Program, Activity that have user registrations associated with
them are not deleted right away and are deleted in the background. Storing
this config entry is an indication to the background process on what entity
needs to be deleted.
Args:
request_user: users.User requesting the modification.
Returns:
The created Configuration entity.
"""
config_key = 'configuration_delete_entity_task'
config_value = '%s,%s' % (self.key(), request_user.email())
config = Configuration(parent=self, config_key=config_key,
config_value=config_value)
config.put()
return config
class _ModelRule(object):
"""Base class with helper methods for models which can have rules."""
def GetRule(self, rule_name):
"""Gets the given rule from activity rules.
Args:
rule_name: Name of rule.
Returns:
The rules.RuleConfig or None if not found.
"""
for rule in self.rules:
if rule_name == rule.rule_name:
return rule
return None
class Program(_DeletedHierarchyModel, _ModelRule):
"""A grouping of learning entities.
At a high level a program is a collection of activities and the rules that
determine the completion a particular knowledge it represents.
A program can be composed of either child programs or activities, not both.
Example: Program 'Java 101' may include child programs 'OO Programming', and
'Java Language Specification', and a rule that both of them should be
completed in that order. The program 'OO Programming' can be composed of
activities 'March OO Programming' and 'January OO Prog Video' and a common
completion rule that just one of them is required to complete the program
'OO Programming'.
Attributes:
name: A string that identifies the program uniquely.
description: Optional text to describe the program.
contact_list: List of users who have program edit permissions and are shown
in the contact information of a program detail page.
facilitator_list: List of users who help set up the program.
rules: A list of rules that are validated when registering, unregistering
for an activity under the program or certifying for the program based on
activities that are completed. Registration rules for example can limit
the number of program activities that can be taken, certification rules
can specify the number of programs/activities and their order required
for completion of the program.
program_tags: String list of tags associated with this program.
public_activity_creation: Boolean indicating if any user of the system can
create a new activity under a program. Used for programs that want the
flexibility to allow anyone to schedule and teach a session.
visible: Integer indicating programs setting for visibility. If this flag is
set to False then the program and the activities underneath it are
invisible.
"""
name = db.StringProperty(required=True)
description = db.TextProperty()
contact_list = db.ListProperty(users.User)
facilitator_list = db.ListProperty(users.User)
rules = dbutils.FakeModelListProperty(rules.RuleConfig, default=[])
program_tags = db.StringListProperty(default=[])
public_activity_creation = db.BooleanProperty(default=False)
visible = db.IntegerProperty(default=1)
def ActivitiesQuery(self, keys_only=False):
"""Build query to get activities under the program."""
return Program.ActivitiesQueryFromKey(self.key(), keys_only=keys_only)
@staticmethod
def ActivitiesQueryFromKey(program_key, keys_only=False):
"""Build query to get activities under the program.
Args:
program_key: Program db.Key to query activities under.
keys_only: Boolean if only keys should be returned by the query.
Returns:
Query object that provides the requested Activities or db.Keys.
"""
query = db.Query(Activity, keys_only=keys_only)
query.ancestor(program_key)
utils.AddFilter(query, 'deleted =', 0)
return query
def _GetChildrenQuery(self):
"""Overrides parent method."""
return self.ActivitiesQuery()
def __unicode__(self):
return unicode(self.name)
def ActivitySchedulesQuery(self):
"""Build query to get activity schedules under the program."""
query = db.Query(ActivitySchedule)
query.ancestor(self)
utils.AddFilter(query, 'deleted =', 0)
return query
def RegistrationsQuery(self):
"""Build query to get registrations for activities of a program."""
query = db.Query(UserRegistration)
utils.AddFilter(query, 'program =', self)
return query
@staticmethod
def GetSearchableProgramsQuery():
"""Query programs that can be searched."""
program_query = Program.all()
utils.AddFilter(program_query, 'visible =', 1)
utils.AddFilter(program_query, 'deleted =', 0)
return program_query
class Configuration(db.Model):
"""Configuration data store key and text/binary data.
Can be used for configuration of any kind. For example rules that are
configured at a global scope applicable for any program.
Attributes:
config_key: A string identifier for identifying the configuration.
config_value: Optional text configuration value.
config_binary_value: Optional binary configuration value.
last_modified: The date and time of last modification for the entity.
"""
config_key = db.StringProperty()
config_value = db.TextProperty()
config_binary_value = db.BlobProperty()
last_modified = db.DateTimeProperty(auto_now=True)
class Activity(_DeletedHierarchyModel, _ModelRule):
"""A program learning experience event that can be registered to as a unit.
A learning experience that one can register to and which imparts the
knowledge or information represented by a program. For example an instructor
led class that teaches Python is an activity. The different classes that
all teach the same thing fall under the same parent program.
Attributes:
name: A string that identifies the activity under a program uniquely.
start_time: The lower start time of all ActivitySchedule associated with
this activity.
end_time: The greater end time of all ActivitySchedule associated with this
activity.
rules: A list of rules that are validated when registering/unregistering
for the activity. Example - 'No more than 20 people in an activity'.
access_point_tags: Intersection of all schedule access point tags cached.
reserve_rooms: A boolean indicating if we should attempt to reserve
conference rooms for the activity schedules under this activity.
visible: Integer indicating activities preference for visibility. Activity
is visible only iff activity.visible and program.visible are True.
"""
# Suppress pylint invalid inheritance from object
# pylint: disable-msg=C6601
class Meta:
verbose_name = _('Activity')
verbose_name_plural = _('Activities')
name = db.StringProperty(required=True)
start_time = db.DateTimeProperty()
end_time = db.DateTimeProperty()
rules = dbutils.FakeModelListProperty(rules.RuleConfig, default=[])
access_point_tags = db.StringListProperty(default=[])
reserve_rooms = db.BooleanProperty(default=True)
visible = db.IntegerProperty(default=1)
def GetAccessPoints(self):
aps = []
for activity in self.ActivitySchedulesQuery():
aps.extend(activity.access_points)
return aps
def ActivitySchedulesQuery(self):
"""Build query to get schedules under an activity."""
return Activity.SchedulesQueryFromActivityKey(self.key())
def _GetChildrenQuery(self):
"""Overrides parent method."""
return self.ActivitySchedulesQuery()
@staticmethod
def GetLock(activity_key):
"""Gets a lock for this activity.
Args:
activity_key: models.Activity db.Key or string key representing the
activity.
Returns:
A lock utils.Lock.
"""
return utils.Lock(str(activity_key))
@classmethod
def SchedulesQueryFromActivityKey(cls, activity_key):
"""Build query to get the schedules under an activity given activity_key."""
query = db.Query(ActivitySchedule)
if isinstance(activity_key, basestring):
activity_key = db.Key(activity_key)
query.ancestor(activity_key)
utils.AddFilter(query, 'deleted =', 0)
return query
def RegistrationsQuery(self, keys_only=False):
"""Build query to get registrations under an activity."""
query = db.Query(UserRegistration, keys_only=keys_only)
return query.filter('activity =', self)
@staticmethod
def OrphanedActivities():
program_set = set(db.Query(Program, keys_only=True))
activities = db.Query(Activity)
orphan_activities = []
for activity in activities:
if activity.parent_key() not in program_set:
orphan_activities.append(activity)
return orphan_activities
def __unicode__(self):
return unicode(self.name)
def MaxCapacity(self):
"""Maximum number of allowed people based on rule config properties.
Returns:
The maximum number of people that will be allowed by an instantiated rule.
Returns None when not able to determine such a capacity.
"""
max_by_activity_rule = self.GetRule(rules.RuleNames.MAX_PEOPLE_ACTIVITY)
if max_by_activity_rule:
return max_by_activity_rule.parameters.get('max_people', None)
return None
class AccessPoint(_BaseModel):
"""Represents learning access entities like Rooms, VC, SCORM URLs etc,.
Attributes:
type: A category string indicating the type the access entity like 'room',
'web url', 'vc', 'telephone', 'scorm url'. Could be physical or virtual.
uri: A string containing the access point resource identifier.
location: A string representing the geographical location of the room.
This is usually a city (e.g. Mountain View).
tags: List of strings that help categorize access points. The
first tag is a special display tag. The display tag can represent the
access point when full uri detail is not needed. For example a display
tag of 'NYC' may be sufficient when we want to know where the activity
is being held.
calendar_email: String email of resource in google calendar. Used for
inviting the access point to events and blocking the time slot.
rules: List of rules to be validated against this access point for
registration. Example-'max 50 people', 'only VPs' etc.
deleted: Integer 1 if deleted, 0 if an active access point.
timezone: Timezone in which this room is located.
Example:
type = room; uri = nyc/9th avenue/4th floor/Lincoln Center
type = vc ; uri = 3-565-2639
"""
type = db.CategoryProperty(required=True, choices=_AccessPoint.Choices())
uri = db.StringProperty(required=True)
location = db.StringProperty()
tags = db.StringListProperty()
calendar_email = db.StringProperty(indexed=False)
rules = dbutils.FakeModelListProperty(rules.RuleConfig)
last_modified = db.DateTimeProperty(auto_now=True)
deleted = db.IntegerProperty(default=0)
timezone = dbutils.FakeModelProperty(utils.Timezone,
default=utils.Timezone('UTC'))
def GetTimeZone(self):
"""Returns the pytz.timezone for that access point."""
return pytz.timezone(self.timezone.name)
@classmethod
def GetAccessPointFromKeys(cls, keys):
"""Returns a list of access points given a list of keys.
Args:
keys: A list of access point keys.
Returns:
A list of models.AccessPoint or None elements.
"""
return db.get(keys)
@classmethod
def GetAccessPointFromUri(cls, uri):
"""Returns the access point which matches given URI.
Args:
uri: URI of the access point to retrieve.
Returns:
Relevant access point or None.
"""
query = db.Query(cls).filter('uri = ', uri)
#TODO(user): we return the first one. need to do better job here.
#How to handle duplicates, can we store twice similar numbers like
#321-1234 and 3211243 etc.
return query.get()
def Delete(self):
"""Deletes the access point."""
self.deleted = 1
self.put()
def __unicode__(self):
return unicode(self.uri)
class ActivitySchedule(_DeletedHierarchyModel):
"""Time slot, instructors and access points of an activity.
An activity can have multiple activity schedules. Activity schedules are
implicitly ordered by start_time and form the continuation of the
activity, the user is expected to attend ALL schedules of an activity.
Each schedule must have an Activity as a parent.
Attributes:
start_time: Date and time when the schedule starts.
end_time: Date and time when the schedule ends.
access_points: List of access points used to attend/access the activity.
access_points_secondary: List of secondary access points, instructors do not
attended in secondary access points. Can be used to digitally access the
instruction through them.
access_point_tags: List of string tags that are a union of the access
point tags under the schedule. Copied for query performance. Access
point type is also included as a tag. The first tag should be used for
display as the primary tag for this schedule.
primary_instructors: List of users who are the primary instructors.
primary_instructors_accesspoint: List of access points for each of the
primary_instructors as a 1-1 mapping.
secondary_instructors: List of users who have the same access permissions as
the primary instructors and aren't displayed for student searches.
calendar_edit_href: Edit URL of the calendar event for the schedule.
notes: Arbitraty text input for this schedule.
"""
start_time = db.DateTimeProperty(required=True)
end_time = db.DateTimeProperty(required=True)
access_point_tags = db.StringListProperty()
access_points = dbutils.KeyListProperty(AccessPoint)
access_points_secondary = dbutils.KeyListProperty(AccessPoint)
primary_instructors = db.ListProperty(users.User)
primary_instructors_accesspoint = dbutils.KeyListProperty(AccessPoint)
#Not indexing secondary instructors because we don't want to search them.
secondary_instructors = db.ListProperty(users.User, indexed=False)
calendar_edit_href = db.URLProperty()
notes = db.TextProperty()
def __unicode__(self):
return unicode(self.start_time)
def _GetChildrenQuery(self):
"""Overrides parent method."""
return []
@property
def activity(self):
return self.parent()
@property
def activity_key(self):
return self.parent_key()
@staticmethod
def ActiveSchedulesQuery():
"""Build query to get all schedules that aren't deleted."""
query = db.Query(ActivitySchedule)
utils.AddFilter(query, 'deleted =', 0)
return query
def GetAllAccessPoints(self):
"""Returns a set of primary and secondary access points."""
return set(self.access_points).union(self.access_points_secondary)
def ValidateInstance(self):
"""Validate the current schedule instance and return errors if any.
This method should be called just before writing the instance to the
datastore.
Returns:
A dictionary with items (property_name, string_errors_list). It is empty
when no validation errors occurred. The property_name is the name of the
entity property on which validation error occurred.
"""
errors_dict = {}
# Check access_points are valid.
ap_list = db.get(self.access_points)
if None in ap_list:
errors_dict['access_points'] = [_('Access Points not found')]
return errors_dict
@staticmethod
def OrphanedActivitySchedules():
"""Get all activity schedules that have activity missing."""
activity_set = set(db.Query(Activity, keys_only=True))
orphan_schedules = []
schedules = db.Query(ActivitySchedule)
for schedule in schedules:
activity_key = schedule.activity_key
if activity_key not in activity_set:
orphan_schedules.append(schedule)
return orphan_schedules
class ManagerApproval(_BaseModel):
"""Information about manager approval used by ManagerApproval rule.
Attributes:
candidate: users.User who needs the approval to attend an activity.
manager: users.User who needs to approve.
activity: Activity the candidate is trying to attend.
program: Program of the activity.
nominator: users.User who is trying to register candidate for the activity.
queue_time: The models.Registration.queue_time for the registration that has
initiated the workflow for manager approval.
last_update_time: The last time when the approval was updated.
approved: Boolean to indicate if the manager approved this request.
manager_decision: Boolean that indicates if manager took an action.
"""
candidate = db.UserProperty(required=True)
manager = db.UserProperty(required=True)
activity = db.ReferenceProperty(Activity, required=True)
program = db.ReferenceProperty(Program, required=True)
nominator = db.UserProperty(required=True)
queue_time = db.DateTimeProperty(auto_now_add=True)
last_update_time = db.DateTimeProperty(auto_now=True)
approved = db.BooleanProperty(required=True)
manager_decision = db.BooleanProperty(required=True, default=False)
@staticmethod
def GetPendingApprovalsQuery(manager_user):
"""Returns query for pending manager approval requests for a manager.
Args:
manager_user: users.User object of the manager for whom the pending
approval list should be queried.
Returns:
db.Query that can be queried to retrieve all the pending approvals.
"""
pending_approvals = ManagerApproval.all()
utils.AddFilter(pending_approvals, 'manager =', manager_user)
utils.AddFilter(pending_approvals, 'manager_decision =', False)
pending_approvals.order('queue_time')
return pending_approvals
class UserRegistration(_BaseModel):
"""User registration status for an activity.
UserRegistration records a user's registration attempt and tracks the status
of the registration attempt.
Attributes:
user: User who is trying to register.
activity: The activity to which the user is trying to register.
program: The program to which the activity belongs to.
queue_time: The time the user starts the registration attempt. Helps in
processing priority between similar requests between users. Once the
user starts a registration request a queue_time is created which is kept
active until the user initiates an unregister request.
creator: The user who initiated the registration.
schedule_list: A list of schedules the user is attending.
access_point_list: An ordered list relating 1-1 with schedule_list recording
which available access_point for the schedule the user wants to attend.
status: String category from utils.RegistrationStatus.
confirmed: Only entities marked with string status 'confirmed' are consumed
by the off-line rule context construction. Entities marked with 'ready'
are consumed to be processed. 'not ready' status is ignored.
active: String status that records if the entity is holding the latest
registration status. A user registration creates multiple entities over
the life cycle and only the latest will be marked with a value 'active'
online_unregistered: Flag that records if the online state was
notified after unregistration by the ofline process.
affecting_rule_tags: List string tags that affecting rules (rules that
agreed with the final status of rule engine evaluation) wanted to
identify the registration with. See rules.RuleRegister.Evaluate
for more info on how rule_tags can be used.
rule_tags: List of string tags that all the rules wanted to identify this
registration with. See rules.RuleRegister.Evaluate for more info on how
rule_tags can be used.
affecting_rule_configs: A list of rule configs that affected the current
status of the registration.
attendance: Category status depicting if the user attended the activity.
last_notified: Indicates the registration status of the last email
notification sent to the user. See utils.RegistrationStatus.
notify_email: Boolean to indicate whether to send email notification or not.
post_process_tasks: List of processors.TaskConfig that configure the
tasks to run after an unregistration is processed offline.
force_status = Boolean that indicates if the status has been forced to the
current value by ignoring the rule engine decision.
last_modified: Date the entity was last modified.
"""
user = db.UserProperty(required=True)
activity = db.ReferenceProperty(Activity, required=True)
program = db.ReferenceProperty(Program, required=True)
queue_time = db.DateTimeProperty(auto_now_add=True)
creator = db.UserProperty(required=True)
schedule_list = dbutils.KeyListProperty(ActivitySchedule, indexed=False)
access_point_list = dbutils.KeyListProperty(AccessPoint, indexed=False)
status = db.CategoryProperty(required=True, choices=_Status.Choices())
confirmed = db.CategoryProperty(required=True, choices=_Confirm.Choices())
active = db.CategoryProperty(required=True, choices=_Active.Choices())
online_unregistered = db.BooleanProperty(required=True, default=False,
indexed=False)
affecting_rule_tags = db.StringListProperty(indexed=True)
rule_tags = db.StringListProperty(indexed=False)
affecting_rule_configs = dbutils.FakeModelListProperty(
rules.RuleConfig, default=[])
attendance = db.CategoryProperty(required=True, choices=_Attend.Choices(),
default=_Attend.UNKNOWN)
last_notified = db.CategoryProperty(choices=_Status.Choices(), indexed=False)
notify_email = db.BooleanProperty(default=True)
post_process_tasks = dbutils.FakeModelListProperty(processors.TaskConfig,
default=[])
force_status = db.BooleanProperty(indexed=False, default=False)
last_modified = db.DateTimeProperty(auto_now=True)
def __init__(self, *args, **kwargs):
# Use of super on old style class. Invalid warning.
# pylint: disable-msg=E1002
"""Registration constructor that considers eval_context data.
When called with an eval_context named argument, the construction uses the
eval_context to build the properties. Properties in kwargs override the ones
in eval_context.
Args:
args: All the un-named parameters besides self.
kwargs: All named parameters like property names. If eval_context is one
of the named parameters then it is used to initialize some properties.
"""
if 'eval_context' in kwargs:
eval_context = kwargs['eval_context']
new_kwargs = {}
new_kwargs['program'] = eval_context.program
new_kwargs['activity'] = eval_context.activity
new_kwargs['queue_time'] = eval_context.queue_time
new_kwargs['force_status'] = eval_context.force_status
new_kwargs['user'] = eval_context.user.appengine_user
new_kwargs['creator'] = eval_context.creator.appengine_user
new_kwargs['schedule_list'] = eval_context.schedule_list
new_kwargs['access_point_list'] = eval_context.access_point_list
# Remove eval_context element from kwargs.
kwargs.pop('eval_context')
new_kwargs.update(kwargs)
super(UserRegistration, self).__init__(*args, **new_kwargs)
else:
super(UserRegistration, self).__init__(*args, **kwargs)
assert (self.confirmed != _Confirm.NOT_READY or
self.active == _Active.INACTIVE)
def OnlyWaitingForMaxPeopleActivity(self):
"""Determine if registration is waiting only on max people activity rule."""
a_configs = self.affecting_rule_configs
return (self.status == utils.RegistrationStatus.WAITLISTED and
len(a_configs) == 1 and
a_configs[0].rule_name == rules.RuleNames.MAX_PEOPLE_ACTIVITY)
def WaitingForMaxPeopleActivity(self):
"""Determine if registration is waiting on max people activity rule."""
max_people_rule_name = rules.RuleNames.MAX_PEOPLE_ACTIVITY
a_configs = self.affecting_rule_configs
return (self.status == utils.RegistrationStatus.WAITLISTED and
max_people_rule_name in [cfg.rule_name for cfg in a_configs])
def __unicode__(self):
return '%s/%s' % (unicode(self.user), unicode(self.activity))
@staticmethod
def AddRegisterOrder(query):
"""Adds to query the ranking order of user registration entities."""
query.order('queue_time')
query.order('user')
@staticmethod
def ActiveQuery(program=None, activity=None, user=None, query=None,
keys_only=False):
"""Constructs query for active UserRegistrations with additional filters.
Args:
program: If not None the query will filter for registrations related to
the given program. Can be a db.Key or Program instance.
activity: If not None the query will filter for registrations related to
the given activity. Can be a db.Key or Activity instance.
user: If not None the query will filter for registrations related to the
given user.User.
query: A valid query on the UserRegistration class that is modified to
return active registrations. If None new query is created.
keys_only: Boolean if only keys should be returned by the query.
Returns:
A query that can be used to access active registrations.
"""
if query is None:
query = UserRegistration.all(keys_only=keys_only)
utils.AddFilter(query, 'active =', _Active.ACTIVE)
if activity is not None:
utils.AddFilter(query, 'activity =', activity)
elif program is not None:
utils.AddFilter(query, 'program =', program)
if user is not None:
utils.AddFilter(query, 'user =', user)
return query
def isValid(self, schedules):
"""Checks that user registration is valid against expected schedules.
Args:
schedules: An iterator over models.ActivitySchedules
Returns:
True iff the user registration schedules/access points are part of the
given schedules.
"""
schedule_ap_map = {}
for schedule in schedules:
schedule_ap_map[schedule.key()] = (schedule.access_points +
schedule.access_points_secondary)
# Check that every access point of each schedule is still valid
for schedule_key, ap_key in zip(self.schedule_list,
self.access_point_list):
ap_keys = schedule_ap_map.get(schedule_key, None)
# Check that user access point selection is still valid
if ap_key not in ap_keys:
return False
return True
@staticmethod
def WaitlistRankForUser(activity, user):
"""Get the user's waitlist rank for a max capacity constrained course.
Args:
activity: Activity or db.Key of an Activity for which a user's waitlist
rank is required.
user: users.User for whom we need to find the waitlist rank.
Returns:
A integer waitlist rank starting from 1. If the waitlist cannot be found
or the user not available in it, it returns 0.
"""
query = UserRegistration.ActiveQuery(activity=activity)
UserRegistration.AddRegisterOrder(query)
utils.AddFilter(query, 'status =', utils.RegistrationStatus.WAITLISTED)
queue_rank = 1
for registration in query:
if registration.user == user:
break
if registration.OnlyWaitingForMaxPeopleActivity():
queue_rank += 1
else:
queue_rank = 0
return queue_rank
@staticmethod
def NumberRegisteredForActivity(activity_key):
"""Counts the number of active registered users for an activity."""
registrations = UserRegistration.ActiveQuery(activity=activity_key)
return registrations.count()
| {
"content_hash": "1d8a52af5c8198d744a18e11419cc0f6",
"timestamp": "",
"source": "github",
"line_count": 1193,
"max_line_length": 80,
"avg_line_length": 36.37300922045264,
"alnum_prop": 0.7085705067637638,
"repo_name": "Chase235/cloudcourse",
"id": "533e8c105c74243d22a426c35e9aba0948bf730e",
"size": "44013",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "core/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21215"
},
{
"name": "HTML",
"bytes": "79144"
},
{
"name": "JavaScript",
"bytes": "50374"
},
{
"name": "Python",
"bytes": "420437"
},
{
"name": "Shell",
"bytes": "1540"
}
],
"symlink_target": ""
} |
import os
from tempfile import NamedTemporaryFile
from mock import patch
from test_inischema import SAMPLE_CONFIGURATION
from test_inischema import SAMPLE_SCHEMA
SAMPLE_CONFIGURATION += """
[env]
somekey = somevalue
"""
class TestCommand(object):
with NamedTemporaryFile(delete=False) as config_file:
config_file.write(SAMPLE_CONFIGURATION)
def some_filename(self):
with NamedTemporaryFile(delete=False) as some_file:
return some_file.name
def test_simple(self):
from ..console import Command
called = []
def second(value1, value2=None):
called.append((value1, value2))
class MyCommand(Command):
__doc__ = """
Usage:
script second <config_file>
"""
schema = SAMPLE_SCHEMA
funcs = [second]
argv = ['script', 'second', self.config_file.name]
MyCommand()(argv)
assert len(called) == 1
assert called[0][0].startswith('a few line breaks')
assert called[0][1] is None
assert os.environ['SOMEKEY'] == 'somevalue'
def test_profiler(self):
from ..console import Command
called = []
def second(value1, value2=None):
called.append((value1, value2))
class MyCommand(Command):
__doc__ = """
Usage:
script second <config_file> [--profile=<file>]
"""
schema = SAMPLE_SCHEMA
funcs = [second]
profile_filename = self.some_filename()
argv = ['script', 'second', self.config_file.name,
'--profile', profile_filename]
MyCommand()(argv)
assert len(called) == 1
with open(profile_filename) as f:
assert(len(f.read()) > 1)
@patch('nolearn.console.pdb.post_mortem')
@patch('nolearn.console.traceback.print_exc')
def test_pdb(self, print_exc, post_mortem):
from ..console import Command
called = []
def second(value1, value2=None):
called.append((value1, value2))
raise ValueError()
class MyCommand(Command):
__doc__ = """
Usage:
script second <config_file> [--pdb]
"""
schema = SAMPLE_SCHEMA
funcs = [second]
argv = ['script', 'second', self.config_file.name, '--pdb']
MyCommand()(argv)
assert len(called) == 1
| {
"content_hash": "9d22d194c65163689547bc6631090d21",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 67,
"avg_line_length": 26.30851063829787,
"alnum_prop": 0.5564092195713708,
"repo_name": "rajegannathan/grasp-lift-eeg-cat-dog-solution-updated",
"id": "29fd7e5853044ed052b654f31098fd5cbdb752cc",
"size": "2473",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python-packages/nolearn-0.5/nolearn/tests/test_console.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "113"
},
{
"name": "C",
"bytes": "9257"
},
{
"name": "C++",
"bytes": "410482"
},
{
"name": "CSS",
"bytes": "3812"
},
{
"name": "Makefile",
"bytes": "23871"
},
{
"name": "PHP",
"bytes": "2068"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "5993790"
},
{
"name": "R",
"bytes": "145799"
},
{
"name": "Shell",
"bytes": "8953"
},
{
"name": "TeX",
"bytes": "912"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/bageraset/shared_lair_bageraset.iff"
result.attribute_template_id = -1
result.stfName("lair_n","bageraset")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "57f4b19d51394a76340550edd7721e44",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 77,
"avg_line_length": 23.384615384615383,
"alnum_prop": 0.6973684210526315,
"repo_name": "obi-two/Rebelion",
"id": "74df982b9d9bc3945b9feb7639e4e8a026dfd6bb",
"size": "449",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/lair/bageraset/shared_lair_bageraset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
from sts.entities.sts_entities import *
from sts.entities.controllers import *
from sts.entities.hosts import *
| {
"content_hash": "e1a47d15021809340b9717952f1f3bc0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 37.333333333333336,
"alnum_prop": 0.8035714285714286,
"repo_name": "jmiserez/sts",
"id": "d6d5fcebc39a1809de947c39aad0a44fed4a737f",
"size": "694",
"binary": false,
"copies": "2",
"ref": "refs/heads/hb",
"path": "sts/entities/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1167857"
},
{
"name": "Shell",
"bytes": "16594"
}
],
"symlink_target": ""
} |
import requests
url = "https://maps.googleapis.com/maps/api/directions/json?origin=place_id%3AChIJ685WIFYViEgRHlHvBbiD5nE&destination=place_id%3AChIJA01I-8YVhkgRGJb0fW4UX7Y&key=YOUR_API_KEY"
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
# [END maps_http_directions_place_id] | {
"content_hash": "c0ffd70dba152a216da07cd803b23503",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 174,
"avg_line_length": 29.083333333333332,
"alnum_prop": 0.7851002865329513,
"repo_name": "googlemaps/openapi-specification",
"id": "7f5591ebb7ebb09c9a875f9a47040a686b4e6ffc",
"size": "389",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dist/snippets/maps_http_directions_place_id/maps_http_directions_place_id.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Starlark",
"bytes": "11394"
},
{
"name": "TypeScript",
"bytes": "71469"
}
],
"symlink_target": ""
} |
"""Test the BlueMaestro sensors."""
from homeassistant.components.bluemaestro.const import DOMAIN
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT
from . import BLUEMAESTRO_SERVICE_INFO
from tests.common import MockConfigEntry
from tests.components.bluetooth import inject_bluetooth_service_info
async def test_sensors(hass):
"""Test setting up creates the sensors."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="aa:bb:cc:dd:ee:ff",
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all("sensor")) == 0
inject_bluetooth_service_info(hass, BLUEMAESTRO_SERVICE_INFO)
await hass.async_block_till_done()
assert len(hass.states.async_all("sensor")) == 4
humid_sensor = hass.states.get("sensor.tempo_disc_thd_eeff_temperature")
humid_sensor_attrs = humid_sensor.attributes
assert humid_sensor.state == "24.2"
assert humid_sensor_attrs[ATTR_FRIENDLY_NAME] == "Tempo Disc THD EEFF Temperature"
assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C"
assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement"
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
| {
"content_hash": "452070232f708a22e4f4da7a3ef251d9",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 86,
"avg_line_length": 36.81578947368421,
"alnum_prop": 0.7333809864188706,
"repo_name": "nkgilley/home-assistant",
"id": "e1c7b27673c3560eb890fcb19b11371b41019c90",
"size": "1400",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/bluemaestro/test_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
"""
Animated Snap 3D
================
An extension to Nuke's "snap" options for animated 3D objects. Based on a
selection of vertices, this allows for objects to match the position, rotation
and scale of that selection over a specified frame range.
## Usage
As the name suggests, this adds "animated" options to the snap_menu in 3d
nodes since Nuke 6.1. The 3 new options work exactly the same way as their
original counterparts, but extends their use to animated geometry.
## Installation
To install, simply ensure the 'animatedSnap3D' directory is in your .nuke
directory or anywhere else within the Nuke python path.
Then, add the following lines to your 'menu.py' file:
::
import animatedSnap3D
animatedSnap3D.run()
## Public Functions
run()
Adds the animatedSnap3D functions to the Axis Snap Menu
## License
The MIT License (MIT)
animatedSnap3D
Copyright (c) 2011 Ivan Busquets
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# =============================================================================
# IMPORTS
# =============================================================================
# Nuke Imports
try:
import nuke
except ImportError:
pass
# animatedSnap3D Imports
from .animatedSnap3D import animated_snap
# =============================================================================
# GLOBALS
# =============================================================================
__author__ = "Ivan Busquets"
__author_email__ = "[email protected]"
__copyright__ = "Copyright 2011, Ivan Busquets"
__credits__ = ["Ivan Busquets", "Sean Wallitsch", ]
__license__ = "MIT"
__version__ = "1.2b2"
__maintainer__ = "Sean Wallitsch"
__maintainer_email__ = "[email protected]"
__module_name__ = "animatedSnap3D"
__short_desc__ = "An extension to Nuke's 'snap' options for animated 3D objects"
__status__ = "Development"
__url__ = "http://github.com/ThoriumGroup/animatedSnap3D"
# =============================================================================
# EXPORTS
# =============================================================================
__all__ = [
'run',
'animated_snap'
]
# =============================================================================
# PUBLIC FUNCTIONS
# =============================================================================
def run():
"""Add animatedSnap3D menu items under the Axis Menu"""
try:
axis_menu = nuke.menu('Axis').findItem('Snap')
except AttributeError: # Could not find Axis menu
nuke.tprint(
"Could not find 'Axis' menu to append animatedSnap3D commands. "
"animatedSnap3D will not be available through menus."
)
return
else:
if not axis_menu: # Found Axis, could not find Snap
nuke.tprint(
"Could not find 'Snap' item of sub-menu 'Axis' to append "
"animatedSnap3D commands. animatedSnap3D will not be available "
"through menus."
)
return
axis_menu.addSeparator()
axis_menu.addCommand(
'Match animated selection position',
'animatedSnap3D.animated_snap()'
)
axis_menu.addCommand(
'Match animated selection position, orientation',
'animatedSnap3D.animated_snap(["translate", "rotate"])'
)
axis_menu.addCommand(
'Match animated selection position, orientation, size',
'animatedSnap3D.animated_snap(["translate", "rotate", "scaling"])'
)
| {
"content_hash": "e102417220d7ce1ba385e003c40fdb72",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 80,
"avg_line_length": 33.721804511278194,
"alnum_prop": 0.6015607580824972,
"repo_name": "ThoriumGroup/thorium",
"id": "1844c6ae2a8e9b6f4c43b5c75d21f7cc69047292",
"size": "4507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thorium/animatedSnap3D/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "173140"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class VerificationIPFlowResult(Model):
"""Results of IP flow verification on the target resource.
:param access: Indicates whether the traffic is allowed or denied.
Possible values include: 'Allow', 'Deny'
:type access: str or ~azure.mgmt.network.v2016_12_01.models.Access
:param rule_name: Name of the rule. If input is not matched against any
security rule, it is not displayed.
:type rule_name: str
"""
_attribute_map = {
'access': {'key': 'access', 'type': 'str'},
'rule_name': {'key': 'ruleName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VerificationIPFlowResult, self).__init__(**kwargs)
self.access = kwargs.get('access', None)
self.rule_name = kwargs.get('rule_name', None)
| {
"content_hash": "3fb211f54daa548512b1f8d5f67eab6c",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 75,
"avg_line_length": 35.869565217391305,
"alnum_prop": 0.6424242424242425,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "19f43d9e528f2d01376349876bda3e7470011baf",
"size": "1299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2016_12_01/models/verification_ip_flow_result.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
import numpy
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer.functions.activation import lstm
from chainer.functions.array import reshape
from chainer.functions.array import stack
from chainer.functions.connection import linear
from chainer.functions.connection import n_step_rnn
from chainer.utils import argument
if cuda.cudnn_enabled:
cudnn = cuda.cudnn
libcudnn = cuda.cuda.cudnn
def _stack_weight(ws):
# TODO(unno): Input of the current LSTM implementation is shuffled
w = stack.stack(ws, axis=1)
shape = w.shape
return reshape.reshape(w, (shape[0] * shape[1],) + shape[2:])
class NStepLSTM(n_step_rnn.BaseNStepRNN):
def __init__(self, n_layers, states, lengths):
n_step_rnn.BaseNStepRNN.__init__(
self, n_layers, states, lengths,
rnn_dir='uni', rnn_mode='lstm')
class NStepBiLSTM(n_step_rnn.BaseNStepRNN):
def __init__(self, n_layers, states, lengths):
n_step_rnn.BaseNStepRNN.__init__(
self, n_layers, states, lengths,
rnn_dir='bi', rnn_mode='lstm')
def n_step_lstm(
n_layers, dropout_ratio, hx, cx, ws, bs, xs, **kwargs):
"""n_step_lstm(n_layers, dropout_ratio, hx, cx, ws, bs, xs)
Stacked Uni-directional Long Short-Term Memory function.
This function calculates stacked Uni-directional LSTM with sequences.
This function gets an initial hidden state :math:`h_0`, an initial cell
state :math:`c_0`, an input sequence :math:`x`, weight matrices :math:`W`,
and bias vectors :math:`b`.
This function calculates hidden states :math:`h_t` and :math:`c_t` for each
time :math:`t` from input :math:`x_t`.
.. math::
i_t &= \\sigma(W_0 x_t + W_4 h_{t-1} + b_0 + b_4) \\\\
f_t &= \\sigma(W_1 x_t + W_5 h_{t-1} + b_1 + b_5) \\\\
o_t &= \\sigma(W_2 x_t + W_6 h_{t-1} + b_2 + b_6) \\\\
a_t &= \\tanh(W_3 x_t + W_7 h_{t-1} + b_3 + b_7) \\\\
c_t &= f_t \\cdot c_{t-1} + i_t \\cdot a_t \\\\
h_t &= o_t \\cdot \\tanh(c_t)
As the function accepts a sequence, it calculates :math:`h_t` for all
:math:`t` with one call. Eight weight matrices and eight bias vectors are
required for each layer. So, when :math:`S` layers exist, you need to
prepare :math:`8S` weight matrices and :math:`8S` bias vectors.
If the number of layers ``n_layers`` is greater than :math:`1`, the input
of the ``k``-th layer is the hidden state ``h_t`` of the ``k-1``-th layer.
Note that all input variables except the first layer may have different
shape from the first layer.
Args:
n_layers(int): The number of layers.
dropout_ratio(float): Dropout ratio.
hx (~chainer.Variable): Variable holding stacked hidden states.
Its shape is ``(S, B, N)`` where ``S`` is the number of layers and
is equal to ``n_layers``, ``B`` is the mini-batch size, and ``N``
is the dimension of the hidden units.
cx (~chainer.Variable): Variable holding stacked cell states.
It has the same shape as ``hx``.
ws (list of list of :class:`~chainer.Variable`): Weight matrices.
``ws[i]`` represents the weights for the i-th layer.
Each ``ws[i]`` is a list containing eight matrices.
``ws[i][j]`` corresponds to :math:`W_j` in the equation.
Only ``ws[0][j]`` where ``0 <= j < 4`` are ``(I, N)``-shaped as
they are multiplied with input variables, where ``I`` is the size
of the input and ``N`` is the dimension of the hidden units. All
other matrices are ``(N, N)``-shaped.
bs (list of list of :class:`~chainer.Variable`): Bias vectors.
``bs[i]`` represents the biases for the i-th layer.
Each ``bs[i]`` is a list containing eight vectors.
``bs[i][j]`` corresponds to :math:`b_j` in the equation.
The shape of each matrix is ``(N,)`` where ``N`` is the dimension
of the hidden units.
xs (list of :class:`~chainer.Variable`):
A list of :class:`~chainer.Variable`
holding input values. Each element ``xs[t]`` holds input value
for time ``t``. Its shape is ``(B_t, I)``, where ``B_t`` is the
mini-batch size for time ``t``. The sequences must be transposed.
:func:`~chainer.functions.transpose_sequence` can be used to
transpose a list of :class:`~chainer.Variable`\\ s each
representing a sequence.
When sequences has different lengths, they must be
sorted in descending order of their lengths before transposing.
So ``xs`` needs to satisfy
``xs[t].shape[0] >= xs[t + 1].shape[0]``.
Returns:
tuple: This function returns a tuple containing three elements,
``hy``, ``cy`` and ``ys``.
- ``hy`` is an updated hidden states whose shape is the same as
``hx``.
- ``cy`` is an updated cell states whose shape is the same as
``cx``.
- ``ys`` is a list of :class:`~chainer.Variable` . Each element
``ys[t]`` holds hidden states of the last layer corresponding
to an input ``xs[t]``. Its shape is ``(B_t, N)`` where ``B_t`` is
the mini-batch size for time ``t``, and ``N`` is size of hidden
units. Note that ``B_t`` is the same value as ``xs[t]``.
.. note::
The dimension of hidden units is limited to only one size ``N``. If you
want to use variable dimension of hidden units, please use
:class:`chainer.functions.lstm`.
.. seealso::
:func:`chainer.functions.lstm`
.. admonition:: Example
>>> batchs = [3, 2, 1] # support variable length sequences
>>> in_size, out_size, n_layers = 3, 2, 2
>>> dropout_ratio = 0.0
>>> xs = [np.ones((b, in_size)).astype(np.float32) for b in batchs]
>>> [x.shape for x in xs]
[(3, 3), (2, 3), (1, 3)]
>>> h_shape = (n_layers, batchs[0], out_size)
>>> hx = np.ones(h_shape).astype(np.float32)
>>> cx = np.ones(h_shape).astype(np.float32)
>>> w_in = lambda i, j: in_size if i == 0 and j < 4 else out_size
>>> ws = []
>>> bs = []
>>> for n in range(n_layers):
... ws.append([np.ones((out_size, w_in(n, i))).astype(np.float32) \
for i in range(8)])
... bs.append([np.ones((out_size,)).astype(np.float32) \
for _ in range(8)])
...
>>> ws[0][0].shape # ws[0][:4].shape are (out_size, in_size)
(2, 3)
>>> ws[1][0].shape # others are (out_size, out_size)
(2, 2)
>>> bs[0][0].shape
(2,)
>>> hy, cy, ys = F.n_step_lstm(
... n_layers, dropout_ratio, hx, cx, ws, bs, xs)
>>> hy.shape
(2, 3, 2)
>>> cy.shape
(2, 3, 2)
>>> [y.shape for y in ys]
[(3, 2), (2, 2), (1, 2)]
"""
return n_step_lstm_base(n_layers, dropout_ratio, hx, cx, ws, bs, xs,
use_bi_direction=False, **kwargs)
def n_step_bilstm(
n_layers, dropout_ratio, hx, cx, ws, bs, xs, **kwargs):
"""n_step_bilstm(n_layers, dropout_ratio, hx, cx, ws, bs, xs)
Stacked Bi-directional Long Short-Term Memory function.
This function calculates stacked Bi-directional LSTM with sequences.
This function gets an initial hidden state :math:`h_0`, an initial cell
state :math:`c_0`, an input sequence :math:`x`, weight matrices :math:`W`,
and bias vectors :math:`b`.
This function calculates hidden states :math:`h_t` and :math:`c_t` for each
time :math:`t` from input :math:`x_t`.
.. math::
i^{f}_t &=& \\sigma(W^{f}_0 x_t + W^{f}_4 h_{t-1} + b^{f}_0 + b^{f}_4),
\\\\
f^{f}_t &=& \\sigma(W^{f}_1 x_t + W^{f}_5 h_{t-1} + b^{f}_1 + b^{f}_5),
\\\\
o^{f}_t &=& \\sigma(W^{f}_2 x_t + W^{f}_6 h_{t-1} + b^{f}_2 + b^{f}_6),
\\\\
a^{f}_t &=& \\tanh(W^{f}_3 x_t + W^{f}_7 h_{t-1} + b^{f}_3 + b^{f}_7),
\\\\
c^{f}_t &=& f^{f}_t \\cdot c^{f}_{t-1} + i^{f}_t \\cdot a^{f}_t,
\\\\
h^{f}_t &=& o^{f}_t \\cdot \\tanh(c^{f}_t),
\\\\
i^{b}_t &=& \\sigma(W^{b}_0 x_t + W^{b}_4 h_{t-1} + b^{b}_0 + b^{b}_4),
\\\\
f^{b}_t &=& \\sigma(W^{b}_1 x_t + W^{b}_5 h_{t-1} + b^{b}_1 + b^{b}_5),
\\\\
o^{b}_t &=& \\sigma(W^{b}_2 x_t + W^{b}_6 h_{t-1} + b^{b}_2 + b^{b}_6),
\\\\
a^{b}_t &=& \\tanh(W^{b}_3 x_t + W^{b}_7 h_{t-1} + b^{b}_3 + b^{b}_7),
\\\\
c^{b}_t &=& f^{b}_t \\cdot c^{b}_{t-1} + i^{b}_t \\cdot a^{b}_t, \\\\
h^{b}_t &=& o^{b}_t \\cdot \\tanh(c^{b}_t), \\\\
h_t &=& [h^{f}_t; h^{b}_t]
where :math:`W^{f}` is the weight matrices for forward-LSTM, :math:`W^{b}`
is weight matrices for backward-LSTM.
As the function accepts a sequence, it calculates :math:`h_t` for all
:math:`t` with one call. Eight weight matrices and eight bias vectors are
required for each layer of each direction. So, when :math:`S` layers
exist, you need to prepare :math:`16S` weight matrices and :math:`16S`
bias vectors.
If the number of layers ``n_layers`` is greater than :math:`1`, the input
of the ``k``-th layer is the hidden state ``h_t`` of the ``k-1``-th layer.
Note that all input variables except the first layer may have different
shape from the first layer.
Args:
n_layers(int): The number of layers.
dropout_ratio(float): Dropout ratio.
hx (~chainer.Variable): Variable holding stacked hidden states.
Its shape is ``(2S, B, N)`` where ``S`` is the number of layers and
is equal to ``n_layers``, ``B`` is the mini-batch size, and ``N``
is the dimension of the hidden units. Because of bi-direction, the
first dimension length is ``2S``.
cx (~chainer.Variable): Variable holding stacked cell states.
It has the same shape as ``hx``.
ws (list of list of :class:`~chainer.Variable`): Weight matrices.
``ws[2 * l + m]`` represents the weights for the l-th layer of
the m-th direction. (``m == 0`` means the forward direction and
``m == 1`` means the backward direction.) Each ``ws[i]`` is a
list containing eight matrices. ``ws[i][j]`` corresponds to
:math:`W_j` in the equation. ``ws[0][j]`` and ``ws[1][j]`` where
``0 <= j < 4`` are ``(I, N)``-shaped because they are multiplied
with input variables, where ``I`` is the size of the input.
``ws[i][j]`` where ``2 <= i`` and ``0 <= j < 4`` are
``(N, 2N)``-shaped because they are multiplied with two hidden
layers :math:`h_t = [h^{f}_t; h^{b}_t]`. All other matrices are
``(N, N)``-shaped.
bs (list of list of :class:`~chainer.Variable`): Bias vectors.
``bs[2 * l + m]`` represents the weights for the l-th layer of
m-th direction. (``m == 0`` means the forward direction and
``m == 1`` means the backward direction.)
Each ``bs[i]`` is a list containing eight vectors.
``bs[i][j]`` corresponds to :math:`b_j` in the equation.
The shape of each matrix is ``(N,)``.
xs (list of :class:`~chainer.Variable`):
A list of :class:`~chainer.Variable`
holding input values. Each element ``xs[t]`` holds input value
for time ``t``. Its shape is ``(B_t, I)``, where ``B_t`` is the
mini-batch size for time ``t``. The sequences must be transposed.
:func:`~chainer.functions.transpose_sequence` can be used to
transpose a list of :class:`~chainer.Variable`\\ s each
representing a sequence.
When sequences has different lengths, they must be
sorted in descending order of their lengths before transposing.
So ``xs`` needs to satisfy
``xs[t].shape[0] >= xs[t + 1].shape[0]``.
Returns:
tuple: This function returns a tuple containing three elements,
``hy``, ``cy`` and ``ys``.
- ``hy`` is an updated hidden states whose shape is the same as
``hx``.
- ``cy`` is an updated cell states whose shape is the same as
``cx``.
- ``ys`` is a list of :class:`~chainer.Variable` . Each element
``ys[t]`` holds hidden states of the last layer corresponding
to an input ``xs[t]``. Its shape is ``(B_t, 2N)`` where ``B_t``
is the mini-batch size for time ``t``, and ``N`` is size of
hidden units. Note that ``B_t`` is the same value as ``xs[t]``.
.. admonition:: Example
>>> batchs = [3, 2, 1] # support variable length sequences
>>> in_size, out_size, n_layers = 3, 2, 2
>>> dropout_ratio = 0.0
>>> xs = [np.ones((b, in_size)).astype(np.float32) for b in batchs]
>>> [x.shape for x in xs]
[(3, 3), (2, 3), (1, 3)]
>>> h_shape = (n_layers * 2, batchs[0], out_size)
>>> hx = np.ones(h_shape).astype(np.float32)
>>> cx = np.ones(h_shape).astype(np.float32)
>>> def w_in(i, j):
... if i == 0 and j < 4:
... return in_size
... elif i > 0 and j < 4:
... return out_size * 2
... else:
... return out_size
...
>>> ws = []
>>> bs = []
>>> for n in range(n_layers):
... for direction in (0, 1):
... ws.append([np.ones((out_size, w_in(n, i))).\
astype(np.float32) for i in range(8)])
... bs.append([np.ones((out_size,)).astype(np.float32) \
for _ in range(8)])
...
>>> ws[0][0].shape # ws[0:2][:4].shape are (out_size, in_size)
(2, 3)
>>> ws[2][0].shape # ws[2:][:4].shape are (out_size, 2 * out_size)
(2, 4)
>>> ws[0][4].shape # others are (out_size, out_size)
(2, 2)
>>> bs[0][0].shape
(2,)
>>> hy, cy, ys = F.n_step_bilstm(
... n_layers, dropout_ratio, hx, cx, ws, bs, xs)
>>> hy.shape
(4, 3, 2)
>>> cy.shape
(4, 3, 2)
>>> [y.shape for y in ys]
[(3, 4), (2, 4), (1, 4)]
"""
return n_step_lstm_base(n_layers, dropout_ratio, hx, cx, ws, bs, xs,
use_bi_direction=True, **kwargs)
def n_step_lstm_base(
n_layers, dropout_ratio, hx, cx, ws, bs, xs, use_bi_direction,
**kwargs):
"""Base function for Stack LSTM/BiLSTM functions.
This function is used at :func:`chainer.functions.n_step_lstm` and
:func:`chainer.functions.n_step_bilstm`.
This function's behavior depends on following arguments,
``activation`` and ``use_bi_direction``.
Args:
n_layers(int): The number of layers.
dropout_ratio(float): Dropout ratio.
hx (~chainer.Variable): Variable holding stacked hidden states.
Its shape is ``(S, B, N)`` where ``S`` is the number of layers and
is equal to ``n_layers``, ``B`` is the mini-batch size, and ``N``
is the dimension of the hidden units.
cx (~chainer.Variable): Variable holding stacked cell states.
It has the same shape as ``hx``.
ws (list of list of :class:`~chainer.Variable`): Weight matrices.
``ws[i]`` represents the weights for the i-th layer.
Each ``ws[i]`` is a list containing eight matrices.
``ws[i][j]`` corresponds to :math:`W_j` in the equation.
Only ``ws[0][j]`` where ``0 <= j < 4`` are ``(I, N)``-shape as they
are multiplied with input variables, where ``I`` is the size of
the input and ``N`` is the dimension of the hidden units. All
other matrices are ``(N, N)``-shaped.
bs (list of list of :class:`~chainer.Variable`): Bias vectors.
``bs[i]`` represents the biases for the i-th layer.
Each ``bs[i]`` is a list containing eight vectors.
``bs[i][j]`` corresponds to :math:`b_j` in the equation.
The shape of each matrix is ``(N,)``.
xs (list of :class:`~chainer.Variable`):
A list of :class:`~chainer.Variable`
holding input values. Each element ``xs[t]`` holds input value
for time ``t``. Its shape is ``(B_t, I)``, where ``B_t`` is the
mini-batch size for time ``t``. The sequences must be transposed.
:func:`~chainer.functions.transpose_sequence` can be used to
transpose a list of :class:`~chainer.Variable`\\ s each
representing a sequence.
When sequences has different lengths, they must be
sorted in descending order of their lengths before transposing.
So ``xs`` needs to satisfy
``xs[t].shape[0] >= xs[t + 1].shape[0]``.
use_bi_direction (bool): If ``True``, this function uses Bi-directional
LSTM.
Returns:
tuple: This function returns a tuple containing three elements,
``hy``, ``cy`` and ``ys``.
- ``hy`` is an updated hidden states whose shape is the same as
``hx``.
- ``cy`` is an updated cell states whose shape is the same as
``cx``.
- ``ys`` is a list of :class:`~chainer.Variable` . Each element
``ys[t]`` holds hidden states of the last layer corresponding
to an input ``xs[t]``. Its shape is ``(B_t, N)`` where ``B_t`` is
the mini-batch size for time ``t``. Note that ``B_t`` is the same
value as ``xs[t]``.
.. seealso::
:func:`chainer.functions.n_step_lstm`
:func:`chainer.functions.n_step_bilstm`
"""
if kwargs:
argument.check_unexpected_kwargs(
kwargs, train='train argument is not supported anymore. '
'Use chainer.using_config',
use_cudnn='use_cudnn argument is not supported anymore. '
'Use chainer.using_config')
argument.assert_kwargs_empty(kwargs)
xp = backend.get_array_module(hx, hx.data)
if xp is not numpy and chainer.should_use_cudnn('>=auto', 5000):
states = cuda.get_cudnn_dropout_states()
states.set_dropout_ratio(dropout_ratio)
lengths = [len(x) for x in xs]
xs = chainer.functions.concat(xs, axis=0)
w = n_step_rnn.cudnn_rnn_weight_concat(
n_layers, states, use_bi_direction, 'lstm', ws, bs)
if use_bi_direction:
rnn = NStepBiLSTM
else:
rnn = NStepLSTM
hy, cy, ys = rnn(n_layers, states, lengths)(hx, cx, w, xs)
sections = numpy.cumsum(lengths[:-1])
ys = chainer.functions.split_axis(ys, sections, 0)
return hy, cy, ys
else:
return n_step_rnn.n_step_rnn_impl(
_lstm, n_layers, dropout_ratio, hx, cx, ws, bs, xs,
use_bi_direction)
def _lstm(x, h, c, w, b):
xw = _stack_weight([w[2], w[0], w[1], w[3]])
hw = _stack_weight([w[6], w[4], w[5], w[7]])
xb = _stack_weight([b[2], b[0], b[1], b[3]])
hb = _stack_weight([b[6], b[4], b[5], b[7]])
lstm_in = linear.linear(x, xw, xb) + linear.linear(h, hw, hb)
c_bar, h_bar = lstm.lstm(c, lstm_in)
return h_bar, c_bar
| {
"content_hash": "0e1e202d4a04053c6aabc9db79a72fc1",
"timestamp": "",
"source": "github",
"line_count": 441,
"max_line_length": 79,
"avg_line_length": 44.07936507936508,
"alnum_prop": 0.544009465507485,
"repo_name": "jnishi/chainer",
"id": "85a6b9c1182600203c5145cf80588c7c3e09d16b",
"size": "19439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chainer/functions/connection/n_step_lstm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "C",
"bytes": "70"
},
{
"name": "C++",
"bytes": "1460543"
},
{
"name": "CMake",
"bytes": "42279"
},
{
"name": "Cuda",
"bytes": "53858"
},
{
"name": "Dockerfile",
"bytes": "1457"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "5121452"
},
{
"name": "Shell",
"bytes": "22130"
}
],
"symlink_target": ""
} |
from graphscale.utils import execute_sql, param_check
def create_kvetch_objects_table_sql():
return """CREATE TABLE IF NOT EXISTS kvetch_objects (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
obj_id BINARY(16) NOT NULL,
type_id INT NOT NULL,
created DATETIME NOT NULL,
updated DATETIME NOT NULL,
body MEDIUMBLOB,
UNIQUE KEY (obj_id),
UNIQUE KEY (type_id, obj_id),
KEY (updated)
) ENGINE=InnoDB;
"""
def create_kvetch_index_table_sql(index_column, index_sql_type, target_column, index_name):
param_check(index_column, str, 'index_column')
param_check(target_column, str, 'target_column')
param_check(index_name, str, 'index_name')
# something is up here. the two indexing keys (not updated) should be unique
return """CREATE TABLE IF NOT EXISTS %s (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
%s %s NOT NULL,
%s BINARY(16) NOT NULL,
created DATETIME NOT NULL,
KEY (%s, %s),
KEY (%s, %s),
KEY (created)
) ENGINE=InnoDB;
""" % (index_name, index_column, index_sql_type, target_column,
index_column, target_column, target_column, index_column)
def create_kvetch_edge_table_sql():
return """CREATE TABLE IF NOT EXISTS kvetch_edges (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
edge_id INT NOT NULL,
from_id BINARY(16) NOT NULL,
to_id BINARY(16) NOT NULL,
created DATETIME NOT NULL,
updated DATETIME NOT NULL,
body MEDIUMBLOB,
UNIQUE KEY(edge_id, from_id, to_id),
UNIQUE KEY(edge_id, from_id, row_id),
KEY(updated)
) ENGINE=InnoDB;
"""
def create_kvetch_objects_table(shard):
execute_sql(shard.conn(), create_kvetch_objects_table_sql())
def create_kvetch_edges_table(shard):
execute_sql(shard.conn(), create_kvetch_edge_table_sql())
def create_kvetch_index_table(shard, shard_index):
sql = create_kvetch_index_table_sql(
shard_index.indexed_attr(),
shard_index.sql_type_of_index(),
'target_id',
shard_index.index_name())
execute_sql(shard.conn(), sql)
def init_shard_db_tables(shard, indexes):
param_check(indexes, list, 'indexes')
create_kvetch_objects_table(shard)
create_kvetch_edges_table(shard)
for shard_index in indexes:
create_kvetch_index_table(shard, shard_index)
def drop_shard_db_tables(shard, indexes):
param_check(indexes, list, 'indexes')
execute_sql(shard.conn(), 'DROP TABLE IF EXISTS kvetch_objects')
execute_sql(shard.conn(), 'DROP TABLE IF EXISTS kvetch_edges')
for shard_index in indexes:
execute_sql(shard.conn(), 'DROP TABLE IF EXISTS %s' % shard_index.index_name())
| {
"content_hash": "9f71bcb1e39c644085b3e3da2064aff9",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 91,
"avg_line_length": 34.73684210526316,
"alnum_prop": 0.6768939393939394,
"repo_name": "schrockntemp/graphscaletemp",
"id": "a6796bebd2eeab971caf25f5dce2a831da9b94e7",
"size": "2640",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphscale/kvetch/kvetch_dbschema.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "184175"
}
],
"symlink_target": ""
} |
from application import app
app.run(debug=True, host="0.0.0.0", port=5003)
| {
"content_hash": "df20a4f143a91b5bb3227e952a70a4d7",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 46,
"avg_line_length": 19.25,
"alnum_prop": 0.7142857142857143,
"repo_name": "LandRegistry/register-metadata",
"id": "c818a92ba97d3da54996d566de341154236bc0d3",
"size": "77",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "13325"
},
{
"name": "Shell",
"bytes": "338"
}
],
"symlink_target": ""
} |
from runner.koan import *
class AboutLists(Koan):
def test_creating_lists(self):
empty_list = list()
self.assertEqual(list, type(empty_list))
self.assertEqual(0, len(empty_list))
def test_list_literals(self):
nums = list()
self.assertEqual([], nums)
nums[0:] = [1]
self.assertEqual([1], nums)
nums[1:] = [2]
self.assertEqual([1, 2], nums)
nums.append(333)
self.assertEqual([1, 2, 333], nums)
def test_accessing_list_elements(self):
noms = ['peanut', 'butter', 'and', 'jelly']
self.assertEqual('peanut', noms[0])
self.assertEqual('jelly', noms[3])
self.assertEqual('jelly', noms[-1])
self.assertEqual('butter', noms[-3])
def test_slicing_lists(self):
noms = ['peanut', 'butter', 'and', 'jelly']
self.assertEqual(['peanut'], noms[0:1])
self.assertEqual(['peanut', 'butter'], noms[0:2])
self.assertEqual([], noms[2:2])
self.assertEqual(['and', 'jelly'], noms[2:20])
self.assertEqual([], noms[4:0])
self.assertEqual([], noms[4:100])
self.assertEqual([], noms[5:0])
def test_slicing_to_the_edge(self):
noms = ['peanut', 'butter', 'and', 'jelly']
self.assertEqual(['and', 'jelly'], noms[2:])
self.assertEqual(['peanut', 'butter'], noms[:2])
def test_lists_and_ranges(self):
self.assertEqual(list, type(range(5)))
self.assertEqual([0, 1, 2, 3, 4], range(5))
self.assertEqual([5, 6, 7, 8], range(5, 9))
def test_ranges_with_steps(self):
self.assertEqual([0, 2, 4, 6], range(0, 8, 2))
self.assertEqual([1, 4, 7], range(1, 8, 3))
self.assertEqual([5, 1, -3], range(5, -7, -4))
self.assertEqual([5, 1, -3, -7], range(5, -8, -4))
def test_insertions(self):
knight = ['you', 'shall', 'pass']
knight.insert(2, 'not')
self.assertEqual(['you', 'shall', 'not', 'pass'], knight)
knight.insert(0, 'Arthur')
self.assertEqual(['Arthur', 'you', 'shall', 'not', 'pass'], knight)
def test_popping_lists(self):
stack = [10, 20, 30, 40]
stack.append('last')
self.assertEqual([10,20,30, 40,'last'], stack)
popped_value = stack.pop()
self.assertEqual('last', popped_value)
self.assertEqual([10,20,30,40], stack)
popped_value = stack.pop(1)
self.assertEqual(20, popped_value)
self.assertEqual([10, 30, 40], stack)
# Notice that there is a "pop" but no "push" in python?
# Part of the Python philosophy is that there ideally should be one and
# only one way of doing anything. A 'push' is the same as an 'append'.
# To learn more about this try typing "import this" from the python
# console... ;)
def test_use_deques_for_making_queues(self):
from collections import deque
queue = deque([1, 2])
queue.append('last')
self.assertEqual([1, 2, 'last'], list(queue))
popped_value = queue.popleft()
self.assertEqual(1, popped_value)
self.assertEqual([2, 'last'], list(queue))
| {
"content_hash": "c6c3ec6c75d1adc9410fcd3f9fa652a0",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 32.19191919191919,
"alnum_prop": 0.5622842798870411,
"repo_name": "codetestcode/pyintdemo",
"id": "139dddab533074c5b82235a08a8c44a588d0195b",
"size": "3280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python2/koans/about_lists.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "166604"
},
{
"name": "Shell",
"bytes": "837"
}
],
"symlink_target": ""
} |
from circuits.web import Controller
from .helpers import urlencode, urlopen
class Root(Controller):
def index(self, *args, **kwargs):
args = tuple((
x.encode("utf-8") if type(x) != str else x
for x in args
))
return "{0}\n{1}".format(repr(args), repr(kwargs))
def test(webapp):
args = ("1", "2", "3")
kwargs = {"data": "\x00" * 4096}
url = "%s/%s" % (webapp.server.http.base, "/".join(args))
data = urlencode(kwargs).encode('utf-8')
f = urlopen(url, data)
data = f.read().split(b"\n")
assert eval(data[0]) == args
assert eval(data[1]) == kwargs
| {
"content_hash": "0af5b8681c2fc9a18f070ca76ba40322",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 61,
"avg_line_length": 26.458333333333332,
"alnum_prop": 0.5559055118110237,
"repo_name": "nizox/circuits",
"id": "1d7ac2a8baefe9d0a011485b2c76cac668efb0f7",
"size": "658",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/web/test_large_post.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9"
},
{
"name": "Python",
"bytes": "646627"
}
],
"symlink_target": ""
} |
"""Network units used in the Dozat and Manning (2017) biaffine parser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from dragnn.python import digraph_ops
from dragnn.python import network_units
from syntaxnet.util import check
class BiaffineDigraphNetwork(network_units.NetworkUnitInterface):
"""Network unit that computes biaffine digraph scores.
The D&M parser uses two MLPs to create two activation vectors for each token,
which represent the token when it it used as the source or target of an arc.
Arcs are scored using a "biaffine" function that includes a bilinear and
linear term:
sources[s] * arc_weights * targets[t] + sources[s] * source_weights
The digraph is "unlabeled" in that there is at most one arc between any pair
of tokens. If labels are required, the BiaffineLabelNetwork can be used to
label a set of selected arcs.
Note that in the typical use case where the source and target activations are
the same dimension and are produced by single-layer MLPs, it is arithmetically
equivalent to produce the source and target activations using a single MLP of
twice the size, and then split those activations in half. The |SplitNetwork|
can be used for this purpose.
Parameters:
None.
Features:
sources: [B * N, S] matrix of batched activations for source tokens.
targets: [B * N, T] matrix of batched activations for target tokens.
Layers:
adjacency: [B * N, N] matrix where entry b*N+s,t is the score of the arc
from s to t in batch b, if s != t, or the score for selecting t
as a root, if s == t.
"""
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
def create(self,
fixed_embeddings,
linked_embeddings,
context_tensor_arrays,
attention_tensor,
during_training,
stride=None):
"""Requires |stride|; otherwise see base class."""
check.NotNone(stride,
'BiaffineDigraphNetwork requires "stride" and must be called '
'in the bulk feature extractor component.')
# TODO(googleuser): Add dropout during training.
del during_training
# Retrieve (possibly averaged) weights.
weights_arc = self._component.get_variable('weights_arc')
weights_source = self._component.get_variable('weights_source')
root = self._component.get_variable('root')
# Extract the source and target token activations. Use |stride| to collapse
# batch and beam into a single dimension.
sources = network_units.lookup_named_tensor('sources', linked_embeddings)
targets = network_units.lookup_named_tensor('targets', linked_embeddings)
source_tokens_bxnxs = tf.reshape(sources.tensor,
[stride, -1, self._source_dim])
target_tokens_bxnxt = tf.reshape(targets.tensor,
[stride, -1, self._target_dim])
num_tokens = tf.shape(source_tokens_bxnxs)[1]
# Compute the arc, source, and root potentials.
arcs_bxnxn = digraph_ops.ArcPotentialsFromTokens(
source_tokens_bxnxs, target_tokens_bxnxt, weights_arc)
sources_bxnxn = digraph_ops.ArcSourcePotentialsFromTokens(
source_tokens_bxnxs, weights_source)
roots_bxn = digraph_ops.RootPotentialsFromTokens(
root, target_tokens_bxnxt, weights_arc)
# Combine them into a single matrix with the roots on the diagonal.
adjacency_bxnxn = digraph_ops.CombineArcAndRootPotentials(
arcs_bxnxn + sources_bxnxn, roots_bxn)
return [tf.reshape(adjacency_bxnxn, [-1, num_tokens])]
class BiaffineLabelNetwork(network_units.NetworkUnitInterface):
"""Network unit that computes biaffine label scores.
D&M parser uses a slightly modified version of the arc scoring function to
score labels. The differences are:
1. Each label has its own source and target MLPs and biaffine weights.
2. A linear term for the target token is added.
3. A bias term is added.
Parameters:
num_labels: The number of dependency labels, L.
Features:
sources: [B * N, S] matrix of batched activations for source tokens.
targets: [B * N, T] matrix of batched activations for target tokens.
Layers:
labels: [B * N, L] matrix where entry b*N+t,l is the score of the label of
the inbound arc for token t in batch b.
"""
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineLabelNetwork, self).__init__(component)
parameters = component.spec.network_unit.parameters
self._num_labels = int(parameters['num_labels'])
check.Gt(self._num_labels, 0, 'Expected some labels')
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_pair', [self._num_labels, self._source_dim, self._target_dim],
tf.float32, tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._num_labels, self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_target', [self._num_labels, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._biases = []
self._biases.append(tf.get_variable(
'biases', [self._num_labels], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights + self._biases)
self._regularized_weights.extend(self._weights)
self._layers.append(network_units.Layer(self, 'labels', self._num_labels))
def create(self,
fixed_embeddings,
linked_embeddings,
context_tensor_arrays,
attention_tensor,
during_training,
stride=None):
"""Requires |stride|; otherwise see base class."""
check.NotNone(stride,
'BiaffineLabelNetwork requires "stride" and must be called '
'in the bulk feature extractor component.')
# TODO(googleuser): Add dropout during training.
del during_training
# Retrieve (possibly averaged) weights.
weights_pair = self._component.get_variable('weights_pair')
weights_source = self._component.get_variable('weights_source')
weights_target = self._component.get_variable('weights_target')
biases = self._component.get_variable('biases')
# Extract and shape the source and target token activations. Use |stride|
# to collapse batch and beam into a single dimension.
sources = network_units.lookup_named_tensor('sources', linked_embeddings)
targets = network_units.lookup_named_tensor('targets', linked_embeddings)
sources_bxnxs = tf.reshape(sources.tensor, [stride, -1, self._source_dim])
targets_bxnxt = tf.reshape(targets.tensor, [stride, -1, self._target_dim])
# Compute the pair, source, and target potentials.
pairs_bxnxl = digraph_ops.LabelPotentialsFromTokenPairs(sources_bxnxs,
targets_bxnxt,
weights_pair)
sources_bxnxl = digraph_ops.LabelPotentialsFromTokens(sources_bxnxs,
weights_source)
targets_bxnxl = digraph_ops.LabelPotentialsFromTokens(targets_bxnxt,
weights_target)
# Combine them with the biases.
labels_bxnxl = pairs_bxnxl + sources_bxnxl + targets_bxnxl + biases
# Flatten out the batch dimension.
return [tf.reshape(labels_bxnxl, [-1, self._num_labels])]
| {
"content_hash": "ba0c3f953f8dd8dfc63867348e7dc199",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 80,
"avg_line_length": 40.895833333333336,
"alnum_prop": 0.6629648497198166,
"repo_name": "fx2003/tensorflow-study",
"id": "c34a2ed6a3c6dfb7117d8f8299dffdb757c0b469",
"size": "10493",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "TensorFlow实战/models/syntaxnet/dragnn/python/biaffine_units.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5440"
},
{
"name": "C++",
"bytes": "1291114"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "70949"
},
{
"name": "Makefile",
"bytes": "5174"
},
{
"name": "Protocol Buffer",
"bytes": "72897"
},
{
"name": "Python",
"bytes": "5306866"
},
{
"name": "Shell",
"bytes": "96467"
}
],
"symlink_target": ""
} |
""" Run a package found with qibuild find. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import qisys.parsers
import qisys.command
import qisys.envsetter
import qibuild.run
import qibuild.find
import qibuild.parsers
def configure_parser(parser):
""" Configure parser for this action. """
qibuild.parsers.cmake_build_parser(parser)
parser.add_argument("--no-exec", dest="exec_", action="store_false",
help="Do not use os.execve (Mostly useful for tests")
parser.add_argument("binary")
parser.add_argument("bin_args", metavar="-- Binary arguments", nargs="*",
help="Binary arguments -- to escape the leading '-'")
def do(args):
""" Main entry point. """
build_worktree = qibuild.parsers.get_build_worktree(args)
envsetter = qisys.envsetter.EnvSetter()
envsetter.read_config(build_worktree.build_config.qibuild_cfg)
qibuild.run.run(build_worktree.build_projects, args.binary, args.bin_args,
env=envsetter.get_build_env(), exec_=args.exec_)
| {
"content_hash": "308fefc8c6ce9433db764d56e35389c1",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 78,
"avg_line_length": 37.5,
"alnum_prop": 0.6808888888888889,
"repo_name": "aldebaran/qibuild",
"id": "7e44063aa25b52bf963672b78a51834cbb898b07",
"size": "1322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/qibuild/actions/run.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "6892"
},
{
"name": "C++",
"bytes": "23130"
},
{
"name": "CMake",
"bytes": "292637"
},
{
"name": "Makefile",
"bytes": "755"
},
{
"name": "Nix",
"bytes": "563"
},
{
"name": "Python",
"bytes": "1581825"
},
{
"name": "SWIG",
"bytes": "306"
},
{
"name": "Shell",
"bytes": "888"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.