code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# streamondemand - XBMC Plugin
# Conector para tutele
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
DEBUG = config.get_setting("debug")
def find_url_play(data, headers):
logger.info("[tutele.py] find_url_play")
'''
<script type='text/javascript'> width=700, height=420, channel='footmax1', token=document.domain, e='1';</script><script type='text/javascript' src='http://tutelehd.com/embedPlayer.js'></script>
'''
fid = scrapertools.find_single_match (data, "channel=['\"]([^'\"]+)[^<]+</script><script type=['\"]text/javascript['\"] src=['\"]http://tutelehd.com/embedPlayer.js['\"]")
#fid = scrapertools.find_single_match (data, "channel=['\"]([^'\"]+).*?<script type=['\"]text/javascript['\"] src=['\"]http://tutelehd.com/embedPlayer.js['\"]")
if fid == '':
return ''
pageurl = 'http://tutelehd.com/embed/embed.php?channel=%s&w=700&h=420' % fid # http://tutelehd.com/embed/embed.php?channel=footmax1&w=700&h=420
data2 = scrapertools.cachePage(pageurl, headers=headers)
if (DEBUG): logger.info("data2="+data2)
'''
<script type="text/javascript">
var so = new SWFObject('/player.swf', 'jwplayer1', '100%', '100%', '8');
so.addParam('allowscriptaccess', 'always');
so.addParam('allowfullscreen', 'true');
so.addParam('wmode','opaque');
so.addVariable('logo', '');
so.addVariable('dock','false')
so.addVariable('autostart', 'true');
so.addVariable('token', '0fea41113b03061a');
so.addVariable('abouttext', 'Player http://tutelehd.com');
so.addVariable('aboutlink', 'http://tutelehd.com');
so.addVariable('file', 'footmax1');
so.addVariable('image', '');
so.addVariable('logo.link','http://tutelehd.com/');
so.addVariable('logo.position','top-right');
so.addVariable('stretching','exactfit');
so.addVariable('backcolor','000000');
so.addVariable('frontcolor','ffffff');
so.addVariable('screencolor','000000');
so.addVariable('streamer', 'rtmpe://live.tutelehd.com/redirect?token=lkTEmeABiFNVNxbjh9SgsAExpired=1422985939');
so.addVariable('provider', 'rtmp');
so.write('jwplayer1');
</script>
'''
swfurl = 'http://tutelehd.com' + scrapertools.find_single_match (data2, 'new SWFObject\(["\']([^"\']+)')
filevalue = scrapertools.find_single_match (data2, '["\']file["\'][:,]\s*["\']([^"\']+)')
rtmpurl = scrapertools.find_single_match (data2, '["\']streamer["\'][:,]\s*["\']([^"\']+)')
tokenvalue = scrapertools.find_single_match (data2, '["\']token["\'][:,]\s*["\']([^"\']+)')
#appvalue = scrapertools.find_single_match (rtmpurl, 'rtmpe://live.tutelehd.com/([^"\']+)')
#url = '%s app=%s playpath=%s swfUrl=%s swfVfy=1 live=true token=%s flashver=WIN\\202012,0,0,77 pageUrl=%s' % (rtmpurl, appvalue, filevalue, swfurl, tokenvalue, pageurl)
url = '%s playpath=%s swfUrl=%s swfVfy=1 live=true token=%s pageUrl=%s' % (rtmpurl, filevalue, swfurl, tokenvalue, pageurl)
return url
| Zanzibar82/streamondemand.test | servers_sports/tutele.py | Python | gpl-3.0 | 3,586 |
import logging
import ssl
import sys
import threading
from typing import List
import websocket
from homematicip.base.enums import *
from homematicip.base.helpers import bytes2str
from homematicip.class_maps import *
from homematicip.connection import Connection
from homematicip.device import *
from homematicip.EventHook import *
from homematicip.group import *
from homematicip.rule import *
from homematicip.securityEvent import *
LOGGER = logging.getLogger(__name__)
class Weather(HomeMaticIPObject):
""" this class represents the weather of the home location"""
def __init__(self, connection):
super().__init__(connection)
#:float: the current temperature
self.temperature = 0.0
#:WeatherCondition: the current weather
self.weatherCondition = WeatherCondition.UNKNOWN
#:datetime: the current datime
self.weatherDayTime = WeatherDayTime.DAY
#:float: the minimum temperature of the day
self.minTemperature = 0.0
#:float: the maximum temperature of the day
self.maxTemperature = 0.0
#:float: the current humidity
self.humidity = 0
#:float: the current windspeed
self.windSpeed = 0.0
#:int: the current wind direction in 360° where 0° is north
self.windDirection = 0
#:float: the current vapor
self.vaporAmount = 0.0
def from_json(self, js):
super().from_json(js)
self.temperature = js["temperature"]
self.weatherCondition = WeatherCondition.from_str(js["weatherCondition"])
self.weatherDayTime = WeatherDayTime.from_str(js["weatherDayTime"])
self.minTemperature = js["minTemperature"]
self.maxTemperature = js["maxTemperature"]
self.humidity = js["humidity"]
self.windSpeed = js["windSpeed"]
self.windDirection = js["windDirection"]
self.vaporAmount = js["vaporAmount"]
def __str__(self):
return "temperature({}) weatherCondition({}) weatherDayTime({}) minTemperature({}) maxTemperature({}) humidity({}) vaporAmount({}) windSpeed({}) windDirection({})".format(
self.temperature,
self.weatherCondition,
self.weatherDayTime,
self.minTemperature,
self.maxTemperature,
self.humidity,
self.vaporAmount,
self.windSpeed,
self.windDirection,
)
class Location(HomeMaticIPObject):
"""This class represents the possible location"""
def __init__(self, connection):
super().__init__(connection)
#:str: the name of the city
self.city = "London"
#:float: the latitude of the location
self.latitude = 51.509865
#:float: the longitue of the location
self.longitude = -0.118092
def from_json(self, js):
super().from_json(js)
self.city = js["city"]
self.latitude = js["latitude"]
self.longitude = js["longitude"]
def __str__(self):
return "city({}) latitude({}) longitude({})".format(
self.city, self.latitude, self.longitude
)
class Client(HomeMaticIPObject):
"""A client is an app which has access to the access point.
e.g. smartphone, 3th party apps, google home, conrad connect
"""
def __init__(self, connection):
super().__init__(connection)
#:str: the unique id of the client
self.id = ""
#:str: a human understandable name of the client
self.label = ""
#:str: the home where the client belongs to
self.homeId = ""
#:str: the c2c service name
self.c2cServiceIdentifier = ""
#:ClientType: the type of this client
self.clientType = ClientType.APP
def from_json(self, js):
super().from_json(js)
self.id = js["id"]
self.label = js["label"]
self.homeId = js["homeId"]
self.clientType = ClientType.from_str(js["clientType"])
if "c2cServiceIdentifier" in js:
self.c2cServiceIdentifier = js["c2cServiceIdentifier"]
def __str__(self):
return "label({})".format(self.label)
class OAuthOTK(HomeMaticIPObject):
def __init__(self, connection):
super().__init__(connection)
self.authToken = None
self.expirationTimestamp = None
def from_json(self, js):
super().from_json(js)
self.authToken = js["authToken"]
self.expirationTimestamp = self.fromtimestamp(js["expirationTimestamp"])
class AccessPointUpdateState(HomeMaticIPObject):
def __init__(self, connection):
super().__init__(connection)
self.accessPointUpdateState = DeviceUpdateState.UP_TO_DATE
self.successfulUpdateTimestamp = None
self.updateStateChangedTimestamp = None
def from_json(self, js):
self.accessPointUpdateState = js["accessPointUpdateState"]
self.successfulUpdateTimestamp = self.fromtimestamp(
js["successfulUpdateTimestamp"]
)
self.updateStateChangedTimestamp = self.fromtimestamp(
js["updateStateChangedTimestamp"]
)
class Home(HomeMaticIPObject):
"""this class represents the 'Home' of the homematic ip"""
_typeClassMap = TYPE_CLASS_MAP
_typeGroupMap = TYPE_GROUP_MAP
_typeSecurityEventMap = TYPE_SECURITY_EVENT_MAP
_typeRuleMap = TYPE_RULE_MAP
_typeFunctionalHomeMap = TYPE_FUNCTIONALHOME_MAP
def __init__(self, connection=None):
if connection is None:
connection = Connection()
super().__init__(connection)
# List with create handlers.
self._on_create = []
self.apExchangeClientId = None
self.apExchangeState = ApExchangeState.NONE
self.availableAPVersion = None
self.carrierSense = None
#:bool:displays if the access point is connected to the hmip cloud or
# not
self.connected = None
#:str:the current version of the access point
self.currentAPVersion = None
self.deviceUpdateStrategy = DeviceUpdateStrategy.MANUALLY
self.dutyCycle = None
#:str:the SGTIN of the access point
self.id = None
self.lastReadyForUpdateTimestamp = None
#:Location:the location of the AP
self.location = None
#:bool:determines if a pin is set on this access point
self.pinAssigned = None
self.powerMeterCurrency = None
self.powerMeterUnitPrice = None
self.timeZoneId = None
self.updateState = HomeUpdateState.UP_TO_DATE
#:Weather:the current weather
self.weather = None
self.__webSocket = None
self.__webSocketThread = None
self.onEvent = EventHook()
self.onWsError = EventHook()
#:bool:switch to enable/disable automatic reconnection of the websocket (default=True)
self.websocket_reconnect_on_error = True
#:List[Device]: a collection of all devices in home
self.devices = []
#:List[Client]: a collection of all clients in home
self.clients = []
#:List[Group]: a collection of all groups in the home
self.groups = []
#:List[Rule]: a collection of all rules in the home
self.rules = []
#: a collection of all functionalHomes in the home
self.functionalHomes = []
#:Map: a map of all access points and their updateStates
self.accessPointUpdateStates = {}
def init(self, access_point_id, lookup=True):
self._connection.init(access_point_id, lookup)
def set_auth_token(self, auth_token):
self._connection.set_auth_token(auth_token)
def from_json(self, js_home):
super().from_json(js_home)
self.weather = Weather(self._connection)
self.weather.from_json(js_home["weather"])
if js_home["location"] != None:
self.location = Location(self._connection)
self.location.from_json(js_home["location"])
self.connected = js_home["connected"]
self.currentAPVersion = js_home["currentAPVersion"]
self.availableAPVersion = js_home["availableAPVersion"]
self.timeZoneId = js_home["timeZoneId"]
self.pinAssigned = js_home["pinAssigned"]
self.dutyCycle = js_home["dutyCycle"]
self.updateState = HomeUpdateState.from_str(js_home["updateState"])
self.powerMeterUnitPrice = js_home["powerMeterUnitPrice"]
self.powerMeterCurrency = js_home["powerMeterCurrency"]
self.deviceUpdateStrategy = DeviceUpdateStrategy.from_str(
js_home["deviceUpdateStrategy"]
)
self.lastReadyForUpdateTimestamp = js_home["lastReadyForUpdateTimestamp"]
self.apExchangeClientId = js_home["apExchangeClientId"]
self.apExchangeState = ApExchangeState.from_str(js_home["apExchangeState"])
self.id = js_home["id"]
self.carrierSense = js_home["carrierSense"]
for ap, state in js_home["accessPointUpdateStates"].items():
ap_state = AccessPointUpdateState(self._connection)
ap_state.from_json(state)
self.accessPointUpdateStates[ap] = ap_state
self._get_rules(js_home)
def on_create(self, handler):
"""Adds an event handler to the create method. Fires when a device
is created."""
self._on_create.append(handler)
def fire_create_event(self, *args, **kwargs):
"""Trigger the method tied to _on_create"""
for _handler in self._on_create:
_handler(*args, **kwargs)
def remove_callback(self, handler):
"""Remove event handler."""
super().remove_callback(handler)
if handler in self._on_create:
self._on_create.remove(handler)
def download_configuration(self) -> str:
"""downloads the current configuration from the cloud
Returns
the downloaded configuration or an errorCode
"""
return self._restCall(
"home/getCurrentState", json.dumps(self._connection.clientCharacteristics)
)
def get_current_state(self, clearConfig: bool = False):
"""downloads the current configuration and parses it into self
Args:
clearConfig(bool): if set to true, this function will remove all old objects
from self.devices, self.client, ... to have a fresh config instead of reparsing them
"""
json_state = self.download_configuration()
return self.update_home(json_state, clearConfig)
def update_home(self, json_state, clearConfig: bool = False):
"""parse a given json configuration into self.
This will update the whole home including devices, clients and groups.
Args:
clearConfig(bool): if set to true, this function will remove all old objects
from self.devices, self.client, ... to have a fresh config instead of reparsing them
"""
if "errorCode" in json_state:
LOGGER.error(
"Could not get the current configuration. Error: %s",
json_state["errorCode"],
)
return False
if clearConfig:
self.devices = []
self.clients = []
self.groups = []
self._get_devices(json_state)
self._get_clients(json_state)
self._get_groups(json_state)
self._load_functionalChannels()
js_home = json_state["home"]
return self.update_home_only(js_home, clearConfig)
def update_home_only(self, js_home, clearConfig: bool = False):
"""parse a given home json configuration into self.
This will update only the home without updating devices, clients and groups.
Args:
clearConfig(bool): if set to true, this function will remove all old objects
from self.devices, self.client, ... to have a fresh config instead of reparsing them
"""
if "errorCode" in js_home:
LOGGER.error(
"Could not get the current configuration. Error: %s",
js_home["errorCode"],
)
return False
if clearConfig:
self.rules = []
self.functionalHomes = []
self.from_json(js_home)
self._get_functionalHomes(js_home)
return True
def _get_devices(self, json_state):
self.devices = [x for x in self.devices if x.id in json_state["devices"].keys()]
for id_, raw in json_state["devices"].items():
_device = self.search_device_by_id(id_)
if _device:
_device.from_json(raw)
else:
self.devices.append(self._parse_device(raw))
def _parse_device(self, json_state):
try:
deviceType = DeviceType.from_str(json_state["type"])
d = self._typeClassMap[deviceType](self._connection)
d.from_json(json_state)
return d
except:
d = self._typeClassMap[DeviceType.DEVICE](self._connection)
d.from_json(json_state)
LOGGER.warning("There is no class for device '%s' yet", json_state["type"])
return d
def _get_rules(self, json_state):
self.rules = [
x for x in self.rules if x.id in json_state["ruleMetaDatas"].keys()
]
for id_, raw in json_state["ruleMetaDatas"].items():
_rule = self.search_rule_by_id(id_)
if _rule:
_rule.from_json(raw)
else:
self.rules.append(self._parse_rule(raw))
def _parse_rule(self, json_state):
try:
ruleType = AutomationRuleType.from_str(json_state["type"])
r = self._typeRuleMap[ruleType](self._connection)
r.from_json(json_state)
return r
except:
r = Rule(self._connection)
r.from_json(json_state)
LOGGER.warning("There is no class for rule '%s' yet", json_state["type"])
return r
def _get_clients(self, json_state):
self.clients = [x for x in self.clients if x.id in json_state["clients"].keys()]
for id_, raw in json_state["clients"].items():
_client = self.search_client_by_id(id_)
if _client:
_client.from_json(raw)
else:
c = Client(self._connection)
c.from_json(raw)
self.clients.append(c)
def _parse_group(self, json_state):
g = None
if json_state["type"] == "META":
g = MetaGroup(self._connection)
g.from_json(json_state, self.devices, self.groups)
else:
try:
groupType = GroupType.from_str(json_state["type"])
g = self._typeGroupMap[groupType](self._connection)
g.from_json(json_state, self.devices)
except:
g = self._typeGroupMap[GroupType.GROUP](self._connection)
g.from_json(json_state, self.devices)
LOGGER.warning(
"There is no class for group '%s' yet", json_state["type"]
)
return g
def _get_groups(self, json_state):
self.groups = [x for x in self.groups if x.id in json_state["groups"].keys()]
metaGroups = []
for id_, raw in json_state["groups"].items():
_group = self.search_group_by_id(id_)
if _group:
if isinstance(_group, MetaGroup):
_group.from_json(raw, self.devices, self.groups)
else:
_group.from_json(raw, self.devices)
else:
group_type = raw["type"]
if group_type == "META":
metaGroups.append(raw)
else:
self.groups.append(self._parse_group(raw))
for mg in metaGroups:
self.groups.append(self._parse_group(mg))
def _get_functionalHomes(self, json_state):
for solution, functionalHome in json_state["functionalHomes"].items():
try:
solutionType = FunctionalHomeType.from_str(solution)
h = None
for fh in self.functionalHomes:
if fh.solution == solution:
h = fh
break
if h is None:
h = self._typeFunctionalHomeMap[solutionType](self._connection)
self.functionalHomes.append(h)
h.from_json(functionalHome, self.groups)
except:
h = FunctionalHome(self._connection)
h.from_json(functionalHome, self.groups)
LOGGER.warning(
"There is no class for functionalHome '%s' yet", solution
)
self.functionalHomes.append(h)
def _load_functionalChannels(self):
for d in self.devices:
d.load_functionalChannels(self.groups)
def get_functionalHome(self, functionalHomeType: type) -> FunctionalHome:
""" gets the specified functionalHome
Args:
functionalHome(type): the type of the functionalHome which should be returned
Returns:
the FunctionalHome or None if it couldn't be found
"""
for x in self.functionalHomes:
if isinstance(x, functionalHomeType):
return x
return None
def search_device_by_id(self, deviceID) -> Device:
""" searches a device by given id
Args:
deviceID(str): the device to search for
Returns
the Device object or None if it couldn't find a device
"""
for d in self.devices:
if d.id == deviceID:
return d
return None
def search_group_by_id(self, groupID) -> Group:
""" searches a group by given id
Args:
groupID(str): groupID the group to search for
Returns
the group object or None if it couldn't find a group
"""
for g in self.groups:
if g.id == groupID:
return g
return None
def search_client_by_id(self, clientID) -> Client:
""" searches a client by given id
Args:
clientID(str): the client to search for
Returns
the client object or None if it couldn't find a client
"""
for c in self.clients:
if c.id == clientID:
return c
return None
def search_rule_by_id(self, ruleID) -> Rule:
""" searches a rule by given id
Args:
ruleID(str): the rule to search for
Returns
the rule object or None if it couldn't find a rule
"""
for r in self.rules:
if r.id == ruleID:
return r
return None
def get_security_zones_activation(self) -> (bool, bool):
""" returns the value of the security zones if they are armed or not
Returns
internal
True if the internal zone is armed
external
True if the external zone is armed
"""
internal_active = False
external_active = False
for g in self.groups:
if isinstance(g, SecurityZoneGroup):
if g.label == "EXTERNAL":
external_active = g.active
elif g.label == "INTERNAL":
internal_active = g.active
return internal_active, external_active
def set_security_zones_activation(self, internal=True, external=True):
""" this function will set the alarm system to armed or disable it
Args:
internal(bool): activates/deactivates the internal zone
external(bool): activates/deactivates the external zone
Examples:
arming while being at home
>>> home.set_security_zones_activation(False,True)
arming without being at home
>>> home.set_security_zones_activation(True,True)
disarming the alarm system
>>> home.set_security_zones_activation(False,False)
"""
data = {"zonesActivation": {"EXTERNAL": external, "INTERNAL": internal}}
return self._restCall("home/security/setZonesActivation", json.dumps(data))
def set_location(self, city, latitude, longitude):
data = {"city": city, "latitude": latitude, "longitude": longitude}
return self._restCall("home/setLocation", json.dumps(data))
def set_intrusion_alert_through_smoke_detectors(self, activate: bool = True):
""" activate or deactivate if smoke detectors should "ring" during an alarm
Args:
activate(bool): True will let the smoke detectors "ring" during an alarm
"""
data = {"intrusionAlertThroughSmokeDetectors": activate}
return self._restCall(
"home/security/setIntrusionAlertThroughSmokeDetectors", json.dumps(data)
)
def activate_absence_with_period(self, endtime: datetime):
""" activates the absence mode until the given time
Args:
endtime(datetime): the time when the absence should automatically be disabled
"""
data = {"endTime": endtime.strftime("%Y_%m_%d %H:%M")}
return self._restCall(
"home/heating/activateAbsenceWithPeriod", json.dumps(data)
)
def activate_absence_permanent(self):
""" activates the absence forever
"""
return self._restCall("home/heating/activateAbsencePermanent")
def activate_absence_with_duration(self, duration: int):
""" activates the absence mode for a given time
Args:
duration(int): the absence duration in minutes
"""
data = {"duration": duration}
return self._restCall(
"home/heating/activateAbsenceWithDuration", json.dumps(data)
)
def deactivate_absence(self):
""" deactivates the absence mode immediately"""
return self._restCall("home/heating/deactivateAbsence")
def activate_vacation(self, endtime: datetime, temperature: float):
""" activates the vatation mode until the given time
Args:
endtime(datetime): the time when the vatation mode should automatically be disabled
temperature(float): the settemperature during the vacation mode
"""
data = {
"endTime": endtime.strftime("%Y_%m_%d %H:%M"),
"temperature": temperature,
}
return self._restCall("home/heating/activateVacation", json.dumps(data))
def deactivate_vacation(self):
""" deactivates the vacation mode immediately"""
return self._restCall("home/heating/deactivateVacation")
def set_pin(self, newPin: str, oldPin: str = None) -> dict:
""" sets a new pin for the home
Args:
newPin(str): the new pin
oldPin(str): optional, if there is currently a pin active it must be given here.
Otherwise it will not be possible to set the new pin
Returns:
the result of the call
"""
if newPin is None:
newPin = ""
data = {"pin": newPin}
if oldPin:
self._connection.headers["PIN"] = str(oldPin)
result = self._restCall("home/setPin", body=json.dumps(data))
if oldPin:
del self._connection.headers["PIN"]
return result
def set_zone_activation_delay(self, delay):
data = {"zoneActivationDelay": delay}
return self._restCall(
"home/security/setZoneActivationDelay", body=json.dumps(data)
)
def get_security_journal(self):
journal = self._restCall("home/security/getSecurityJournal")
if "errorCode" in journal:
LOGGER.error(
"Could not get the security journal. Error: %s", journal["errorCode"]
)
return None
ret = []
for entry in journal["entries"]:
try:
eventType = SecurityEventType(entry["eventType"])
if eventType in self._typeSecurityEventMap:
j = self._typeSecurityEventMap[eventType](self._connection)
except:
j = SecurityEvent(self._connection)
LOGGER.warning("There is no class for %s yet", entry["eventType"])
j.from_json(entry)
ret.append(j)
return ret
def delete_group(self, group: Group):
"""deletes the given group from the cloud
Args:
group(Group):the group to delete
"""
return group.delete()
def get_OAuth_OTK(self):
token = OAuthOTK(self._connection)
token.from_json(self._restCall("home/getOAuthOTK"))
return token
def set_timezone(self, timezone: str):
""" sets the timezone for the AP. e.g. "Europe/Berlin"
Args:
timezone(str): the new timezone
"""
data = {"timezoneId": timezone}
return self._restCall("home/setTimezone", body=json.dumps(data))
def set_powermeter_unit_price(self, price):
data = {"powerMeterUnitPrice": price}
return self._restCall("home/setPowerMeterUnitPrice", body=json.dumps(data))
def set_zones_device_assignment(self, internal_devices, external_devices) -> dict:
""" sets the devices for the security zones
Args:
internal_devices(List[Device]): the devices which should be used for the internal zone
external_devices(List[Device]): the devices which should be used for the external(hull) zone
Returns:
the result of _restCall
"""
internal = [x.id for x in internal_devices]
external = [x.id for x in external_devices]
data = {"zonesDeviceAssignment": {"INTERNAL": internal, "EXTERNAL": external}}
return self._restCall(
"home/security/setZonesDeviceAssignment", body=json.dumps(data)
)
def start_inclusion(self, deviceId):
""" start inclusion mode for specific device
Args:
deviceId: sgtin of device
"""
data = {"deviceId": deviceId}
return self._restCall("home/startInclusionModeForDevice", body=json.dumps(data))
def enable_events(self, enable_trace=False, ping_interval=20):
websocket.enableTrace(enable_trace)
self.__webSocket = websocket.WebSocketApp(
self._connection.urlWebSocket,
header=[
"AUTHTOKEN: {}".format(self._connection.auth_token),
"CLIENTAUTH: {}".format(self._connection.clientauth_token),
],
on_message=self._ws_on_message,
on_error=self._ws_on_error,
on_close=self._ws_on_close,
)
websocket_kwargs = {"ping_interval": ping_interval}
if hasattr(sys, "_called_from_test"): # disable ssl during a test run
sslopt = {"cert_reqs": ssl.CERT_NONE}
websocket_kwargs = {"sslopt": sslopt, "ping_interval": 2, "ping_timeout": 1}
self.__webSocketThread = threading.Thread(
name="hmip-websocket",
target=self.__webSocket.run_forever,
kwargs=websocket_kwargs,
)
self.__webSocketThread.daemon = True
self.__webSocketThread.start()
def disable_events(self):
if self.__webSocket:
self.__webSocket.close()
self.__webSocket = None
def _ws_on_close(self, *_):
self.__webSocket = None
def _ws_on_error(self, _, err):
LOGGER.exception(err)
self.onWsError.fire(err)
if self.websocket_reconnect_on_error:
logger.debug("Trying to reconnect websocket")
self.disable_events()
self.enable_events()
def _ws_on_message(self, _, message):
# json.loads doesn't support bytes as parameter before python 3.6
js = json.loads(bytes2str(message))
# LOGGER.debug(js)
eventList = []
for event in js["events"].values():
try:
pushEventType = EventType(event["pushEventType"])
LOGGER.debug(pushEventType)
obj = None
if pushEventType == EventType.GROUP_CHANGED:
data = event["group"]
obj = self.search_group_by_id(data["id"])
if obj is None:
obj = self._parse_group(data)
self.groups.append(obj)
pushEventType = EventType.GROUP_ADDED
self.fire_create_event(obj, event_type=pushEventType, obj=obj)
if type(obj) is MetaGroup:
obj.from_json(data, self.devices, self.groups)
else:
obj.from_json(data, self.devices)
obj.fire_update_event(data, event_type=pushEventType, obj=obj)
elif pushEventType == EventType.HOME_CHANGED:
data = event["home"]
obj = self
obj.update_home_only(data)
obj.fire_update_event(data, event_type=pushEventType, obj=obj)
elif pushEventType == EventType.CLIENT_ADDED:
data = event["client"]
obj = Client(self._connection)
obj.from_json(data)
self.clients.append(obj)
elif pushEventType == EventType.CLIENT_CHANGED:
data = event["client"]
obj = self.search_client_by_id(data["id"])
obj.from_json(data)
elif pushEventType == EventType.CLIENT_REMOVED:
obj = self.search_client_by_id(event["id"])
self.clients.remove(obj)
elif pushEventType == EventType.DEVICE_ADDED:
data = event["device"]
obj = self._parse_device(data)
obj.load_functionalChannels(self.groups)
self.devices.append(obj)
self.fire_create_event(data, event_type=pushEventType, obj=obj)
elif pushEventType == EventType.DEVICE_CHANGED:
data = event["device"]
obj = self.search_device_by_id(data["id"])
if obj is None: # no DEVICE_ADDED Event?
obj = self._parse_device(data)
self.devices.append(obj)
pushEventType = EventType.DEVICE_ADDED
self.fire_create_event(data, event_type=pushEventType, obj=obj)
else:
obj.from_json(data)
obj.load_functionalChannels(self.groups)
obj.fire_update_event(data, event_type=pushEventType, obj=obj)
elif pushEventType == EventType.DEVICE_REMOVED:
obj = self.search_device_by_id(event["id"])
obj.fire_remove_event(obj, event_type=pushEventType, obj=obj)
self.devices.remove(obj)
elif pushEventType == EventType.GROUP_REMOVED:
obj = self.search_group_by_id(event["id"])
obj.fire_remove_event(obj, event_type=pushEventType, obj=obj)
self.groups.remove(obj)
elif pushEventType == EventType.GROUP_ADDED:
group = event["group"]
obj = self._parse_group(group)
self.groups.append(obj)
self.fire_create_event(obj, event_type=pushEventType, obj=obj)
elif pushEventType == EventType.SECURITY_JOURNAL_CHANGED:
pass # data is just none so nothing to do here
# TODO: implement INCLUSION_REQUESTED, NONE
eventList.append({"eventType": pushEventType, "data": obj})
except ValueError as valerr: # pragma: no cover
LOGGER.warning(
"Uknown EventType '%s' Data: %s", event["pushEventType"], event
)
except Exception as err: # pragma: no cover
LOGGER.exception(err)
self.onEvent.fire(eventList)
| coreGreenberet/homematicip-rest-api | homematicip/home.py | Python | gpl-3.0 | 33,503 |
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from consultas.views import *
urlpatterns = [
url(r'confirmar/crear/$',login_required(ConfirmacionesCrearView.as_view()), name="confirmaciones_crear"),
url(r'confirmar/lista/$',(ConfirmacionListView.as_view()), name="confirmacion_lista"),
url(r'confirmar/lista/pendientes/excel/$',(ConfirmacionPendientesExcelView.as_view()), name="confirmacion_pendiente_excel"),
url(r'confirmar/lista/pendientes/$',(ConfirmacionPendientesListView.as_view()), name="confirmacion_pendiente_lista"),
url(r'confirmar/gracias/$',ConfirmacionGraciasView.as_view(), name="confirmacion_gracias"),
url(r'consulta/editar/(?P<pk>\d+)/$',login_required(ConfirmacionUpdateView.as_view()), name="confirmacion_editar"),
url(r'confirmar/(?P<pk>\d+)/(?P<password>\w+)/$',ConfirmacionContestarView.as_view(), name="confirmacion_contestar"),
#url(r'confirmar/enviar/$',login_required(ConfirmacionSendView.as_view()), name="confirmacion_envio"),
url(r'nueva/$',login_required(ConsultaCreateView.as_view()), name="consulta_nueva"),
url(r'enviar/(?P<pk>\d+)/$',login_required(ConsultaEnviarView.as_view()), name="consulta_enviar"),
url(r'consultas/$', login_required(ConsultaListView.as_view()),name="consulta_lista"),
url(r'respuesta/(?P<consulta_id>\d+)/(?P<alumno_id>\d+)$', RespuestaCreateView.as_view(),name="consulta_responder"),
url(r'consulta/(?P<pk>\d+)/$',login_required(ConsultaDetailView.as_view()), name="consulta_detalle"),
url(r'consulta/(?P<pk>\d+)/borrar/$',login_required(ConsultaDeleteView.as_view()), name="consulta_borrar"),
url(r'consulta/(?P<pk>\d+)/editar/$',login_required(ConsultaUpdateView.as_view()), name="consulta_editar"),
]
| Etxea/gestioneide | consultas/urls.py | Python | gpl-3.0 | 1,772 |
#!/usr/bin/python
version=1.0
# orangen.py: simplified default pin generator for livebox 2.1 and livebox next
# usage: python orangen.py <4 last digits from wan mac> <4 last digits from serial>
# It will just return the PIN
# Algorithm and vulnerability by wifi-libre. For more details check https://www.wifi-libre.com/topic-869-todo-sobre-al-algoritmo-wps-livebox-arcadyan-orange-xxxx.html
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# Contact script author: [email protected]
# Copyleft (C) 2017 kcdtv @ www.wifi-libre.com
import sys
def wps_checksum(x):
accum = 0
t = x
while (t):
accum += 3 * (t % 10)
t /= 10
accum += t % 10
t /= 10
return (10 - accum % 10) % 10
mac = sys.argv[1]
seri = sys.argv[2]
s1 = int(seri[0], 16)
s2 = int(seri[1], 16)
s3 = int(seri[2], 16)
s4 = int(seri[3], 16)
m1 = int(mac[0], 16)
m2 = int(mac[1], 16)
m3 = int(mac[2], 16)
m4 = int(mac[3], 16)
k1 = ( s1 + s2 + m3 + m4 ) & 0xf
k2 = ( s3 + s4 + m1 + m2 ) & 0xf
d1 = k1 ^ s4
d2 = k1 ^ s3
d3 = k2 ^ m2
d4 = k2 ^ m3
d5 = s4 ^ m3
d6 = s3 ^ m4
d7 = k1 ^ s2
pin = int("0x%1x%1x%1x%1x%1x%1x%1x"%(d1, d2, d3, d4, d5, d6, d7), 16) % 10000000
pin = "%.7d%d" %(pin, wps_checksum(pin))
print (pin)
| kcdtv/nmk | orangen.py | Python | gpl-3.0 | 1,843 |
notes = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"]
def chords(root):
n = notes + notes
i = n.index(root)
return [[n[i], n[i + 4], n[i + 7]], [n[i], n[i + 3], n[i + 7]]]
| VladKha/CodeWars | 7 kyu/Chords/solve.py | Python | gpl-3.0 | 206 |
from opc.colormap import Colormap
from opc.colors import BLACK, BLUE, YELLOW, RED, GREEN
from .baseclasses.diamondsquare import DiamondSquare
class Art(DiamondSquare):
description = "Thin plasma using DiamondSquare and colormap rotation"
def __init__(self, matrix, config):
super(Art, self).__init__(matrix, self.generate, maxticks=20,
interpolate=False)
self.colormap = Colormap(130)
self.colormap.flat(0, 130, BLACK)
self.colormap.flat(20, 30, BLUE)
self.colormap.flat(50, 60, YELLOW)
self.colormap.flat(80, 90, RED)
self.colormap.flat(110, 120, GREEN)
self.colormap.soften()
self.diamond.generate()
def generate(self, matrix, diamond):
self.colormap.rotate()
self.diamond.translate(matrix, colormap=self.colormap)
| ak15199/rop | art/plasma2.py | Python | gpl-3.0 | 861 |
from django.conf.urls.defaults import *
urlpatterns = patterns('imaging.views',
(r'^iframe_form/$', 'iframe_form'),
(r'^ajax_delete/$', 'ajax_image_removal'),
)
| pielgrzym/django-minishop | imaging/urls.py | Python | gpl-3.0 | 174 |
from django.apps import AppConfig
class ProblemsAppConfig(AppConfig):
name = "oioioi.problems"
| sio2project/oioioi | oioioi/problems/apps.py | Python | gpl-3.0 | 101 |
#-*- coding:utf-8 -*-
#
# nngt_graph.py
#
# This file is part of the NNGT project to generate and analyze
# neuronal networks and their activity.
# Copyright (C) 2015-2019 Tanguy Fardet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Default (limited) graph if none of the graph libraries are available """
from collections import OrderedDict
from copy import deepcopy
import logging
import numpy as np
from scipy.sparse import coo_matrix, lil_matrix
import nngt
from nngt.lib import InvalidArgument, nonstring_container, is_integer
from nngt.lib.connect_tools import (_cleanup_edges, _set_dist_new_edges,
_set_default_edge_attributes)
from nngt.lib.graph_helpers import _get_edge_attr, _get_dtype, _post_del_update
from nngt.lib.converters import _np_dtype, _to_np_array
from nngt.lib.logger import _log_message
from .graph_interface import GraphInterface, BaseProperty
logger = logging.getLogger(__name__)
# ---------- #
# Properties #
# ---------- #
class _NProperty(BaseProperty):
''' Class for generic interactions with nodes properties (graph-tool) '''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.prop = OrderedDict()
def __getitem__(self, name):
dtype = _np_dtype(super().__getitem__(name))
return _to_np_array(self.prop[name], dtype=dtype)
def __setitem__(self, name, value):
if name in self:
size = self.parent().node_nb()
if len(value) == size:
self.prop[name] = list(value)
else:
raise ValueError("A list or a np.array with one entry per "
"node in the graph is required")
else:
raise InvalidArgument("Attribute does not exist yet, use "
"set_attribute to create it.")
def new_attribute(self, name, value_type, values=None, val=None):
dtype = object
if val is None:
if value_type == "int":
val = int(0)
dtype = int
elif value_type == "double":
val = np.NaN
dtype = float
elif value_type == "string":
val = ""
else:
val = None
value_type = "object"
if values is None:
values = _to_np_array(
[deepcopy(val) for _ in range(self.parent().node_nb())],
value_type)
if len(values) != self.parent().node_nb():
raise ValueError("A list or a np.array with one entry per "
"node in the graph is required")
# store name and value type in the dict
super().__setitem__(name, value_type)
# store the real values in the attribute
self.prop[name] = list(values)
self._num_values_set[name] = len(values)
def set_attribute(self, name, values, nodes=None):
'''
Set the node attribute.
Parameters
----------
name : str
Name of the node attribute.
values : array, size N
Values that should be set.
nodes : array-like, optional (default: all nodes)
Nodes for which the value of the property should be set. If `nodes`
is not None, it must be an array of size N.
'''
num_nodes = self.parent().node_nb()
num_n = len(nodes) if nodes is not None else num_nodes
if num_n == num_nodes:
self[name] = list(values)
self._num_values_set[name] = num_nodes
else:
if num_n != len(values):
raise ValueError("`nodes` and `values` must have the same "
"size; got respectively " + str(num_n) + \
" and " + str(len(values)) + " entries.")
if self._num_values_set[name] == num_nodes - num_n:
self.prop[name].extend(values)
else:
for n, val in zip(nodes, values):
self.prop[name][n] = val
self._num_values_set[name] = num_nodes
def remove(self, nodes):
''' Remove entries for a set of nodes '''
for key in self:
for n in reversed(sorted(nodes)):
self.prop[key].pop(n)
self._num_values_set[key] -= len(nodes)
class _EProperty(BaseProperty):
''' Class for generic interactions with nodes properties (graph-tool) '''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.prop = OrderedDict()
def __getitem__(self, name):
'''
Return the attributes of an edge or a list of edges.
'''
eprop = {}
graph = self.parent()
if isinstance(name, slice):
for k in self.keys():
dtype = _np_dtype(super().__getitem__(k))
eprop[k] = _to_np_array(self.prop[k], dtype)[name]
return eprop
elif nonstring_container(name):
if nonstring_container(name[0]):
eids = [graph.edge_id(e) for e in name]
for k in self.keys():
dtype = _np_dtype(super().__getitem__(k))
eprop[k] = _to_np_array(self.prop[k], dtype=dtype)[eids]
else:
eid = graph.edge_id(name)
for k in self.keys():
eprop[k] = self.prop[k][eid]
return eprop
dtype = _np_dtype(super().__getitem__(name))
return _to_np_array(self.prop[name], dtype=dtype)
def __setitem__(self, name, value):
if name in self:
size = self.parent().edge_nb()
if len(value) == size:
self.prop[name] = list(value)
else:
raise ValueError("A list or a np.array with one entry per "
"edge in the graph is required")
else:
raise InvalidArgument("Attribute does not exist yet, use "
"set_attribute to create it.")
self._num_values_set[name] = len(value)
def set_attribute(self, name, values, edges=None, last_edges=False):
'''
Set the edge property.
Parameters
----------
name : str
Name of the edge property.
values : array
Values that should be set.
edges : array-like, optional (default: None)
Edges for which the value of the property should be set. If `edges`
is not None, it must be an array of shape `(len(values), 2)`.
'''
num_edges = self.parent().edge_nb()
num_e = len(edges) if edges is not None else num_edges
if num_e != len(values):
raise ValueError("`edges` and `values` must have the same "
"size; got respectively " + str(num_e) + \
" and " + str(len(values)) + " entries.")
if edges is None:
self[name] = list(values)
else:
if last_edges:
self.prop[name].extend(values)
else:
eid = self.parent().edge_id
prop = self.prop[name]
# using list comprehension for fast loop
[_set_prop(prop, eid(e), val) for e, val in zip(edges, values)]
if num_e:
self._num_values_set[name] = num_edges
def new_attribute(self, name, value_type, values=None, val=None):
num_edges = self.parent().edge_nb()
if values is None and val is None:
self._num_values_set[name] = num_edges
if val is None:
if value_type == "int":
val = int(0)
elif value_type == "double":
val = np.NaN
elif value_type == "string":
val = ""
else:
val = None
if values is None:
values = _to_np_array(
[deepcopy(val) for _ in range(num_edges)],
value_type)
if len(values) != num_edges:
self._num_values_set[name] = 0
raise ValueError("A list or a np.array with one entry per "
"edge in the graph is required")
# store name and value type in the dict
super().__setitem__(name, value_type)
# store the real values in the attribute
self.prop[name] = list(values)
self._num_values_set[name] = len(values)
def edges_deleted(self, eids):
''' Remove the attributes of a set of edge ids '''
for key, val in self.prop.items():
self.prop[key] = [v for i, v in enumerate(val) if i not in eids]
self._num_values_set[key] -= len(eids)
# ----------------- #
# NNGT backup graph #
# ----------------- #
class _NNGTGraphObject:
'''
Minimal implementation of the GraphObject, which does not rely on any
graph-library.
'''
def __init__(self, nodes=0, weighted=True, directed=True):
''' Initialized independent graph '''
self._nodes = set(i for i in range(nodes))
self._out_deg = [0]*nodes
self._in_deg = [0]*nodes
if directed:
# for directed networks, edges and unique are the same
self._edges = self._unique = OrderedDict()
assert self._edges is self._unique
else:
# for undirected networks
self._edges = OrderedDict()
self._unique = OrderedDict()
self._directed = directed
self._weighted = weighted
def copy(self):
''' Returns a deep copy of the graph object '''
copy = _NNGTGraphObject(len(self._nodes), weighted=self._weighted,
directed=self._directed)
copy._nodes = self._nodes.copy()
if self._directed:
copy._unique = copy._edges = self._edges.copy()
assert copy._unique is copy._edges
else:
copy._edges = self._edges.copy()
copy._unique = self._unique.copy()
copy._out_deg = self._out_deg.copy()
copy._in_deg = self._in_deg.copy()
return copy
def is_directed(self):
return self._directed
@property
def nodes(self):
return list(self._nodes)
class _NNGTGraph(GraphInterface):
''' NNGT wrapper class for _NNGTGraphObject '''
#------------------------------------------------------------------#
# Constructor and instance properties
def __init__(self, nodes=0, weighted=True, directed=True,
copy_graph=None, **kwargs):
''' Initialized independent graph '''
self._nattr = _NProperty(self)
self._eattr = _EProperty(self)
self._max_eid = 0
# test if copying graph
if copy_graph is not None:
self._from_library_graph(copy_graph, copy=True)
self._max_eid = copy_graph._max_eid
else:
self._graph = _NNGTGraphObject(
nodes=nodes, weighted=weighted, directed=directed)
#------------------------------------------------------------------#
# Graph manipulation
def edge_id(self, edge):
'''
Return the ID a given edge or a list of edges in the graph.
Raises an error if the edge is not in the graph or if one of the
vertices in the edge is nonexistent.
Parameters
----------
edge : 2-tuple or array of edges
Edge descriptor (source, target).
Returns
-------
index : int or array of ints
Index of the given `edge`.
'''
g = self._graph
if is_integer(edge[0]):
return g._edges[tuple(edge)]
elif nonstring_container(edge[0]):
idx = [g._edges[tuple(e)] for e in edge]
return idx
else:
raise AttributeError("`edge` must be either a 2-tuple of ints or "
"an array of 2-tuples of ints.")
def has_edge(self, edge):
'''
Whether `edge` is present in the graph.
.. versionadded:: 2.0
'''
e = tuple(edge)
return e in self._graph._edges
@property
def edges_array(self):
'''
Edges of the graph, sorted by order of creation, as an array of
2-tuple.
'''
return np.asarray(list(self._graph._unique), dtype=int)
def _get_edges(self, source_node=None, target_node=None):
g = self._graph
edges = None
if source_node is not None:
source_node = \
[source_node] if is_integer(source_node) else source_node
if g.is_directed():
edges = [e for e in g._unique if e[0] in source_node]
else:
edges = set()
for e in g._unique:
if e[0] in source_node or e[1] in source_node:
if e[::-1] not in edges:
edges.add(e)
edges = list(edges)
return edges
target_node = \
[target_node] if is_integer(target_node) else target_node
if g.is_directed():
edges = [e for e in g._unique if e[1] in target_node]
else:
edges = set()
for e in g._unique:
if e[0] in target_node or e[1] in target_node:
if e[::-1] not in edges:
edges.add(e)
edges = list(edges)
return edges
def is_connected(self):
raise NotImplementedError("Not available with 'nngt' backend, please "
"install a graph library (networkx, igraph, "
"or graph-tool).")
def new_node(self, n=1, neuron_type=1, attributes=None, value_types=None,
positions=None, groups=None):
'''
Adding a node to the graph, with optional properties.
Parameters
----------
n : int, optional (default: 1)
Number of nodes to add.
neuron_type : int, optional (default: 1)
Type of neuron (1 for excitatory, -1 for inhibitory)
attributes : dict, optional (default: None)
Dictionary containing the attributes of the nodes.
value_types : dict, optional (default: None)
Dict of the `attributes` types, necessary only if the `attributes`
do not exist yet.
positions : array of shape (n, 2), optional (default: None)
Positions of the neurons. Valid only for
:class:`~nngt.SpatialGraph` or :class:`~nngt.SpatialNetwork`.
groups : str, int, or list, optional (default: None)
:class:`~nngt.core.NeuralGroup` to which the neurons belong. Valid
only for :class:`~nngt.Network` or :class:`~nngt.SpatialNetwork`.
Returns
-------
The node or a tuple of the nodes created.
'''
nodes = []
g = self._graph
if n == 1:
nodes.append(len(g._nodes))
g._in_deg.append(0)
g._out_deg.append(0)
else:
num_nodes = len(g._nodes)
nodes.extend(
[i for i in range(num_nodes, num_nodes + n)])
g._in_deg.extend([0 for _ in range(n)])
g._out_deg.extend([0 for _ in range(n)])
g._nodes.update(nodes)
attributes = {} if attributes is None else deepcopy(attributes)
if attributes:
for k, v in attributes.items():
if k not in self._nattr:
self._nattr.new_attribute(k, value_types[k], val=v)
else:
v = v if nonstring_container(v) else [v]
self._nattr.set_attribute(k, v, nodes=nodes)
# set default values for all attributes that were not set
for k in self.node_attributes:
if k not in attributes:
dtype = self.get_attribute_type(k)
if dtype == "double":
values = [np.NaN for _ in nodes]
self._nattr.set_attribute(k, values, nodes=nodes)
elif dtype == "int":
values = [0 for _ in nodes]
self._nattr.set_attribute(k, values, nodes=nodes)
elif dtype == "string":
values = ["" for _ in nodes]
self._nattr.set_attribute(k, values, nodes=nodes)
else:
values = [None for _ in nodes]
self._nattr.set_attribute(k, values, nodes=nodes)
if self.is_spatial():
old_pos = self._pos
self._pos = np.full((self.node_nb(), 2), np.NaN, dtype=float)
num_existing = len(old_pos) if old_pos is not None else 0
if num_existing != 0:
self._pos[:num_existing, :] = old_pos
if positions is not None:
assert self.is_spatial(), \
"`positions` argument requires a SpatialGraph/SpatialNetwork."
self._pos[nodes] = positions
if groups is not None:
assert self.is_network(), \
"`positions` argument requires a Network/SpatialNetwork."
if nonstring_container(groups):
assert len(groups) == n, "One group per neuron required."
for g, node in zip(groups, nodes):
self.population.add_to_group(g, node)
else:
self.population.add_to_group(groups, nodes)
if n == 1:
return nodes[0]
return nodes
def delete_nodes(self, nodes):
'''
Remove nodes (and associated edges) from the graph.
'''
g = self._graph
old_nodes = range(self.node_nb())
# update node set and degrees
if nonstring_container(nodes):
nodes = set(nodes)
g._nodes = g._nodes.difference(nodes)
else:
g._nodes.remove(nodes)
g._out_deg.pop(nodes)
g._in_deg.pop(nodes)
nodes = {nodes}
# remove node attributes
self._nattr.remove(nodes)
# map from old nodes to new node ids
idx = 0
remapping = {}
for n in old_nodes:
if n not in nodes:
remapping[n] = idx
idx += 1
# remove edges and remap edges
remove_eids = set()
new_edges = OrderedDict()
new_eid = 0
for e, eid in g._unique.items():
if e[0] in nodes or e[1] in nodes:
remove_eids.add(eid)
else:
new_edges[(remapping[e[0]], remapping[e[1]])] = new_eid
new_eid += 1
g._unique = new_edges
if not g._directed:
g._edges = new_edges.copy()
g._edges.update({e[::-1]: i for e, i in new_edges.items()})
else:
g._edges = g._unique
# tell edge attributes
self._eattr.edges_deleted(remove_eids)
g._nodes = set(range(len(g._nodes)))
# check spatial and structure properties
_post_del_update(self, nodes, remapping=remapping)
def new_edge(self, source, target, attributes=None, ignore=False,
self_loop=False):
'''
Adding a connection to the graph, with optional properties.
.. versionchanged :: 2.0
Added `self_loop` argument to enable adding self-loops.
Parameters
----------
source : :class:`int/node`
Source node.
target : :class:`int/node`
Target node.
attributes : :class:`dict`, optional (default: ``{}``)
Dictionary containing optional edge properties. If the graph is
weighted, defaults to ``{"weight": 1.}``, the unit weight for the
connection (synaptic strength in NEST).
ignore : bool, optional (default: False)
If set to True, ignore attempts to add an existing edge and accept
self-loops; otherwise an error is raised.
self_loop : bool, optional (default: False)
Whether to allow self-loops or not.
Returns
-------
The new connection or None if nothing was added.
'''
g = self._graph
attributes = {} if attributes is None else deepcopy(attributes)
# set default values for attributes that were not passed
_set_default_edge_attributes(self, attributes, num_edges=1)
# check that the edge does not already exist
edge = (source, target)
if source not in g._nodes:
raise InvalidArgument("There is no node {}.".format(source))
if target not in g._nodes:
raise InvalidArgument("There is no node {}.".format(target))
if source == target:
if not ignore and not self_loop:
raise InvalidArgument("Trying to add a self-loop.")
elif ignore:
_log_message(logger, "WARNING",
"Self-loop on {} ignored.".format(source))
return None
if (g._directed and edge not in g._unique) or edge not in g._edges:
edge_id = self._max_eid
# ~ edge_id = len(g._unique)
g._unique[edge] = edge_id
g._out_deg[source] += 1
g._in_deg[target] += 1
self._max_eid += 1
# check distance
_set_dist_new_edges(attributes, self, [edge])
# attributes
self._attr_new_edges([(source, target)], attributes=attributes)
if not g._directed:
# edges and unique are different objects, so update _edges
g._edges[edge] = edge_id
# add reciprocal
e_recip = (target, source)
g._edges[e_recip] = edge_id
g._out_deg[target] += 1
g._in_deg[source] += 1
else:
if not ignore:
raise InvalidArgument("Trying to add existing edge.")
_log_message(logger, "WARNING",
"Existing edge {} ignored.".format((source, target)))
return edge
def new_edges(self, edge_list, attributes=None, check_duplicates=False,
check_self_loops=True, check_existing=True,
ignore_invalid=False):
'''
Add a list of edges to the graph.
.. versionchanged:: 2.0
Can perform all possible checks before adding new edges via the
``check_duplicates`` ``check_self_loops``, and ``check_existing``
arguments.
Parameters
----------
edge_list : list of 2-tuples or np.array of shape (edge_nb, 2)
List of the edges that should be added as tuples (source, target)
attributes : :class:`dict`, optional (default: ``{}``)
Dictionary containing optional edge properties. If the graph is
weighted, defaults to ``{"weight": ones}``, where ``ones`` is an
array the same length as the `edge_list` containing a unit weight
for each connection (synaptic strength in NEST).
check_duplicates : bool, optional (default: False)
Check for duplicate edges within `edge_list`.
check_self_loops : bool, optional (default: True)
Check for self-loops.
check_existing : bool, optional (default: True)
Check whether some of the edges in `edge_list` already exist in the
graph or exist multiple times in `edge_list` (also performs
`check_duplicates`).
ignore_invalid : bool, optional (default: False)
Ignore invalid edges: they are not added to the graph and are
silently dropped. Unless this is set to true, an error is raised
whenever one of the three checks fails.
.. warning::
Setting `check_existing` to False will lead to undefined behavior
if existing edges are provided! Only use it (for speedup) if you
are sure that you are indeed only adding new edges.
Returns
-------
Returns new edges only.
'''
attributes = {} if attributes is None else deepcopy(attributes)
num_edges = len(edge_list)
g = self._graph
# set default values for attributes that were not passed
_set_default_edge_attributes(self, attributes, num_edges)
# check that all nodes exist
if np.max(edge_list) >= self.node_nb():
raise InvalidArgument("Some nodes do no exist.")
# check edges
new_attr = None
if check_duplicates or check_self_loops or check_existing:
edge_list, new_attr = _cleanup_edges(
self, edge_list, attributes, check_duplicates,
check_self_loops, check_existing, ignore_invalid)
else:
new_attr = attributes
# create the edges
initial_eid = self._max_eid
ws = None
num_added = len(edge_list)
if "weight" in new_attr:
if nonstring_container(new_attr["weight"]):
ws = new_attr["weight"]
else:
ws = (new_attr["weight"] for _ in range(num_added))
else:
ws = _get_edge_attr(self, edge_list, "weight", last_edges=True)
for i, (e, w) in enumerate(zip(edge_list, ws)):
eid = self._max_eid
g._unique[tuple(e)] = eid
self._max_eid += 1
g._out_deg[e[0]] += 1
g._in_deg[e[1]] += 1
if not g._directed:
# edges and unique are different objects, so update _edges
g._edges[tuple(e)] = eid
# reciprocal edge
g._edges[tuple(e[::-1])] = eid
g._out_deg[e[1]] += 1
g._in_deg[e[0]] += 1
# check distance
_set_dist_new_edges(new_attr, self, edge_list)
# call parent function to set the attributes
self._attr_new_edges(edge_list, attributes=new_attr)
return edge_list
def delete_edges(self, edges):
''' Remove a list of edges '''
g = self._graph
old_enum = len(g._unique)
if not nonstring_container(edges[0]):
edges = [edges]
if not isinstance(edges[0], tuple):
edges = [tuple(e) for e in edges]
# get edge ids
e_to_eid = g._unique
eids = {e_to_eid[e] for e in edges}
# remove
directed = g._directed
for e in edges:
if e in g._unique:
del g._unique[e]
if not directed:
if e in g._edges:
del g._edges[e]
del g._edges[e[::-1]]
# reset eids
for i, e in enumerate(g._unique):
g._unique[e] = i
if not directed:
e._edges[e] = i
e._edges[e[::-1]] = i
self._eattr.edges_deleted(eids)
def clear_all_edges(self):
g = self._graph
if g._directed:
g._edges = g._unique = OrderedDict()
assert g._edges is g._unique
else:
g._edges = OrderedDict()
g._unique = OrderedDict()
g._out_deg = [0 for _ in range(self.node_nb())]
g._out_deg = [0 for _ in range(self.node_nb())]
self._eattr.clear()
#------------------------------------------------------------------#
# Getters
def node_nb(self):
'''
Returns the number of nodes.
.. warning:: When using MPI, returns only the local number of nodes.
'''
return len(self._graph._nodes)
def edge_nb(self):
'''
Returns the number of edges.
.. warning:: When using MPI, returns only the local number of edges.
'''
return len(self._graph._unique)
def is_directed(self):
return g._directed
def get_degrees(self, mode="total", nodes=None, weights=None):
'''
Returns the degree of the nodes.
.. warning ::
When using MPI, returns only the degree related to local edges.
'''
g = self._graph
num_nodes = None
weights = 'weight' if weights is True else weights
if nodes is None:
num_nodes = self.node_nb()
nodes = slice(num_nodes)
elif nonstring_container(nodes):
nodes = list(nodes)
num_nodes = len(nodes)
else:
nodes = [nodes]
num_nodes = 1
# weighted
if nonstring_container(weights) or weights in self._eattr:
degrees = np.zeros(num_nodes)
adj_mat = self.adjacency_matrix(types=False, weights=weights)
if mode in ("in", "total") or not self.is_directed():
degrees += adj_mat.sum(axis=0).A1[nodes]
if mode in ("out", "total") and self.is_directed():
degrees += adj_mat.sum(axis=1).A1[nodes]
return degrees
elif weights not in {None, False}:
raise ValueError("Invalid `weights` {}".format(weights))
# unweighted
degrees = np.zeros(num_nodes, dtype=int)
if not g._directed or mode in ("in", "total"):
if isinstance(nodes, slice):
degrees += g._in_deg[nodes]
else:
degrees += [g._in_deg[i] for i in nodes]
if g._directed and mode in ("out", "total"):
if isinstance(nodes, slice):
degrees += g._out_deg[nodes]
else:
degrees += [g._out_deg[i] for i in nodes]
if num_nodes == 1:
return degrees[0]
return degrees
def neighbours(self, node, mode="all"):
'''
Return the neighbours of `node`.
Parameters
----------
node : int
Index of the node of interest.
mode : string, optional (default: "all")
Type of neighbours that will be returned: "all" returns all the
neighbours regardless of directionality, "in" returns the
in-neighbours (also called predecessors) and "out" retruns the
out-neighbours (or successors).
Returns
-------
neighbours : set
The neighbours of `node`.
'''
edges = self.edges_array
if mode == "all" or not self._graph._directed:
neighbours = set(edges[edges[:, 1] == node, 0])
return neighbours.union(edges[edges[:, 0] == node, 1])
if mode == "in":
return set(edges[edges[:, 1] == node, 0])
if mode == "out":
return set(edges[edges[:, 0] == node, 1])
raise ValueError(('Invalid `mode` argument {}; possible values'
'are "all", "out" or "in".').format(mode))
def _from_library_graph(self, graph, copy=True):
''' Initialize `self._graph` from existing library object. '''
self._graph = graph._graph.copy() if copy else graph._graph
for key, val in graph._nattr.items():
dtype = graph._nattr.value_type(key)
self._nattr.new_attribute(key, dtype, values=val)
for key, val in graph._eattr.items():
dtype = graph._eattr.value_type(key)
self._eattr.new_attribute(key, dtype, values=val)
# tool function to set edge properties
def _set_prop(array, eid, val):
array[eid] = val
| Silmathoron/NNGT | nngt/core/nngt_graph.py | Python | gpl-3.0 | 32,443 |
from setuptools import setup
version = '2.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('TODO.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django >= 1.6',
'django-extensions',
'lizard-ui >= 5.0',
'lizard-map >= 5.0',
'django-nose',
'pkginfo',
'django-treebeard',
'factory_boy'
],
tests_require = [
]
setup(name='lizard-maptree',
version=version,
description="Provides tree view functionality to lizard-map applications. ",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='Jack Ha',
author_email='[email protected]',
url='',
license='GPL',
packages=['lizard_maptree'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
]},
)
| lizardsystem/lizard-maptree | setup.py | Python | gpl-3.0 | 1,230 |
# Copyright (C) 2012 David Morton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
import random
from scipy.cluster.vq import kmeans, vq
import numpy
def sample_data(data, num_samples):
'''
Return a 'representative' sample of the data.
Inputs:
data: (samples, features) numpy array
num_samples: integer > 0
Returns:
result: (min(<num_samples>, len(<data>), features) numpy array
'''
if num_samples >= len(data):
return data
else:
# determine k
k = min(25, num_samples)
# cluster data
clusters = vq(data, kmeans(data, k, iter=1)[0])[0]
clustered_index_list = defaultdict(list)
for i, c in enumerate(clusters):
clustered_index_list[c].append(i)
# pull data from clusters randomly.
result = numpy.empty((num_samples, data.shape[1]), dtype=data.dtype)
# -- guarantee at least one element from each cluster --
sample_index_set = set()
for index_list in clustered_index_list.values():
index = random.choice(index_list)
result[len(sample_index_set)] = data[index]
sample_index_set.add(index)
while len(sample_index_set) < num_samples:
cluster = random.choice(clustered_index_list.keys())
index = random.choice(clustered_index_list[cluster])
if index not in sample_index_set:
result[len(sample_index_set)] = data[index]
sample_index_set.add(index)
return result
| davidlmorton/spikepy | spikepy/plotting_utils/sampler.py | Python | gpl-3.0 | 2,180 |
from django.conf import settings
from django.db import models
from django.forms.models import model_to_dict
import json
import requests
class Character(models.Model):
name = models.CharField(max_length=250)
server_name = models.CharField(max_length=250)
head = models.TextField(blank=True, null=True)
neck = models.TextField(blank=True, null=True)
back = models.TextField(blank=True, null=True)
chest = models.TextField(blank=True, null=True)
wrist = models.TextField(blank=True, null=True)
hands = models.TextField(blank=True, null=True)
waist = models.TextField(blank=True, null=True)
legs = models.TextField(blank=True, null=True)
feet = models.TextField(blank=True, null=True)
finger1 = models.TextField(blank=True, null=True)
finger2 = models.TextField(blank=True, null=True)
trinket1 = models.TextField(blank=True, null=True)
trinket2 = models.TextField(blank=True, null=True)
mainHand = models.TextField(blank=True, null=True)
artifactTraits = models.TextField(blank=True, null=True)
def __unicode__(self):
return '{server_name} - {name}'.format(
server_name=self.server_name,
name=self.name
)
def fetch_from_battlenet(self):
api_key = settings.BN_APIKEY
url = 'https://us.api.battle.net/wow/character/{server_name}/{char_name}?'\
'fields=items+talents&locale=en_US&apikey={api_key}'.format(
server_name=self.server_name,
char_name=self.name,
api_key=api_key
)
response = requests.get(url)
data = json.loads(response.content)
positions = [
'head', 'neck', 'back', 'chest', 'wrist', 'hands', 'waist', 'legs',
'feet', 'finger1', 'finger2', 'trinket1', 'trinket2', 'mainHand'
]
for position in positions:
setattr(
self,
position,
str(data['items'][position]['id']) + str(data['items'][position]['itemLevel'])
)
self.save()
return self
def to_dict(self):
return model_to_dict(self)
class SimcRank(models.Model):
character = models.ForeignKey(Character)
dps_rank = models.IntegerField()
rating_time = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return '{name} - {dps_rank}'.format(
name=self.character.name,
dps_rank=self.dps_rank
)
| tinyx/crabfactory | wow_monitor/models.py | Python | gpl-3.0 | 2,497 |
# Flexlay - A Generic 2D Game Editor
# Copyright (C) 2014 Ingo Ruhnke <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flexlay import Config
class SuperTuxMenuBar:
def __init__(self, gui_manager, editor):
self.gui_manager = gui_manager
# Create Menu
self.menubar = self.gui_manager.create_menubar()
file_menu = self.menubar.add_menu("&File")
submenu_new = file_menu.add_menu("New")
submenu_new.add_item("Level...", editor.gui_level_new)
submenu_new.add_item("Add-on...", editor.gui_addon_new)
file_menu.add_item("Open...", editor.gui_level_load)
self.recent_files_menu = file_menu.add_menu("Open Recent")
for filename in Config.current.recent_files:
self.recent_files_menu.add_item(filename, lambda filename=filename: editor.load_level(filename))
file_menu.add_item("Save...", editor.gui_level_save)
# file_menu.add_item("Save Commands...", menu_file_save_commands)
file_menu.add_item("Save As...", editor.gui_level_save_as)
file_menu.add_item("Properties...", editor.gui_edit_level)
file_menu.add_item("Quit", editor.gui.quit)
edit_menu = self.menubar.add_menu("&Edit")
edit_menu.add_item("Smooth Selection", editor.gui_smooth_level_struct)
edit_menu.add_item("Resize", editor.gui_resize_sector)
edit_menu.add_item("Resize to selection", editor.gui_resize_sector_to_selection)
edit_menu.add_item("Change Tileset", editor.gui_change_tileset)
zoom_menu = self.menubar.add_menu("&Zoom")
zoom_menu.add_item("1:4 (25%) ", lambda: editor.gui_set_zoom(0.25))
zoom_menu.add_item("1:2 (50%) ", lambda: editor.gui_set_zoom(0.5))
zoom_menu.add_item("1:1 (100%) ", lambda: editor.gui_set_zoom(1.0))
zoom_menu.add_item("2:1 (200%) ", lambda: editor.gui_set_zoom(2.0))
zoom_menu.add_item("4:1 (400%) ", lambda: editor.gui_set_zoom(4.0))
layer_menu = self.menubar.add_menu("&Layer")
layer_menu.add_item("Show All", editor.layer_selector.show_all)
layer_menu.add_item("Hide All", editor.layer_selector.show_all)
# layer_menu.add_item("Show Only Selected", (lambda: print("\"Show Only Selected\" is not implemented")))
sector_menu = self.menubar.add_menu("&Sector")
# sector = editor.workspace.get_map().metadata
# for i in sector.parent.get_sectors():
# if sector.name == i:
# current = " [current]"
# else:
# current = ""
#
# def on_sector_callback():
# print("Switching to %s" % i)
# editor.workspace.get_map().metadata.parent.activate_sector(i, editor.workspace)
#
# mymenu.add_item(mysprite, ("Sector (%s)%s" % [i, current]), on_sector_callback)
sector_menu.add_item("Create New Sector", editor.gui_add_sector)
sector_menu.add_item("Remove Current Sector", editor.gui_remove_sector)
sector_menu.add_item("Edit Sector Properties", editor.gui_edit_sector)
run_menu = self.menubar.add_menu("&Run")
run_menu.add_item("Run Level", editor.gui_run_level)
run_menu.add_item("Record Level Playthrough", editor.gui_record_level)
run_menu.add_item("Play A Demo", editor.gui_play_demo)
run_menu.add_item("Play Example Demo", editor.gui_watch_example)
self.editor = editor
def update_recent_files(self):
self.recent_files_menu.menu.clear()
for filename in Config.current.recent_files:
self.recent_files_menu.add_item(filename, lambda filename=filename: self.editor.load_level(filename))
# EOF #
| SuperTux/flexlay | supertux/menubar.py | Python | gpl-3.0 | 4,310 |
# coding=utf-8
import flask_pymongo
from app import mongo
from .tags import Tag
class PostsTags(object):
"""
标签文章关系模型
:_id 自动 id
:post_id 文章 id
:tag_id 标签 id
"""
def __init__(self):
pass
@staticmethod
def get_post_tags(post_id):
"""
获取文章的标签列表
:param post_id: 文章 id
:return: 文章下的标签列表 or None
"""
tags = []
try:
results = mongo.db.posts_tags.find({
'post_id': post_id
})
for result in results:
tag_id = result.get('tag_id')
tag = Tag.get_tag_by_id(tag_id)
tags.append(tag)
if len(tags) > 0:
return tags
return None
except:
return None
@staticmethod
def add_post_tags_relation(post_id, tag_ids):
"""
添加文章与标签关联记录
:param post_id: 文章 id
:param tag_ids: 标签 id 列表
:return: 添加成功返回插入的记录数量,否则返回 False
"""
count = 0
try:
mongo.db.posts_tags.delete_many({'post_id': post_id})
except:
pass
for tag_id in tag_ids:
tag_id = int(tag_id)
try:
result = mongo.db.posts_tags.update_one({
'post_id': post_id, 'tag_id': tag_id
}, {
'$setOnInsert': {
'post_id': post_id,
'tag_id': tag_id
}
}, upsert=True)
#count += result.modified_count or result.upserted_id
if result.upserted_id:
count += 1
except:
pass
return count if count else False
| thundernet8/Plog | app/core/models/posts_tags.py | Python | gpl-3.0 | 1,897 |
import socket as _socket
import os
import types
import _lightbluecommon
from _obexcommon import OBEXError
# public attributes
__all__ = ("sendfile", "recvfile")
def sendfile(address, channel, source):
if not isinstance(source, (types.StringTypes, types.FileType)):
raise TypeError("source must be string or built-in file object")
if isinstance(source, types.StringTypes):
try:
_socket.bt_obex_send_file(address, channel, unicode(source))
except Exception, e:
raise OBEXError(str(e))
else:
# given file object
if hasattr(source, "name"):
localpath = _tempfilename(source.name)
else:
localpath = _tempfilename()
try:
# write the source file object's data into a file, then send it
f = file(localpath, "wb")
f.write(source.read())
f.close()
try:
_socket.bt_obex_send_file(address, channel, unicode(localpath))
except Exception, e:
raise OBEXError(str(e))
finally:
# remove temporary file
if os.path.isfile(localpath):
try:
os.remove(localpath)
except Exception, e:
print "[lightblue.obex] unable to remove temporary file %s: %s" %\
(localpath, str(e))
def recvfile(sock, dest):
if not isinstance(dest, (types.StringTypes, types.FileType)):
raise TypeError("dest must be string or built-in file object")
if isinstance(dest, types.StringTypes):
_recvfile(sock, dest)
else:
# given file object
localpath = _tempfilename()
try:
# receive a file and then read it into the file object
_recvfile(sock, localpath)
recvdfile = file(localpath, "rb")
dest.write(recvdfile.read())
recvdfile.close()
finally:
# remove temporary file
if os.path.isfile(localpath):
try:
os.remove(localpath)
except Exception, e:
print "[lightblue.obex] unable to remove temporary file %s: %s" %\
(localpath, str(e))
# receives file and saves to local path
def _recvfile(sock, localpath):
# PyS60's bt_obex_receive() won't receive the file if given a file path
# that already exists (it tells the client there's a conflict error). So
# we need to handle this somehow, and preferably backup the original file
# so that we can put it back if the recv fails.
if os.path.isfile(localpath):
# if given an existing path, rename existing file
temppath = _tempfilename(localpath)
os.rename(localpath, temppath)
else:
temppath = None
try:
# receive a file (get internal _sock cos sock is our own SocketWrapper
# object)
_socket.bt_obex_receive(sock._sock, unicode(localpath))
except _socket.error, e:
try:
if temppath is not None:
# recv failed, put original file back
os.rename(temppath, localpath)
finally:
# if the renaming of the original file fails, this will still
# get raised
raise OBEXError(str(e))
else:
# recv successful, remove the original file
if temppath is not None:
os.remove(temppath)
# Must point to C:\ because can't write in start-up dir (on Z:?)
def _tempfilename(basename="C:\\lightblue_obex_received_file"):
version = 1
while os.path.isfile(basename):
version += 1
basename = basename[:-1] + str(version)
return basename | hfeeki/python-lightblue | src/series60/_obex.py | Python | gpl-3.0 | 3,871 |
#! /usr/bin/env python
from state_ai import StateAi
import rospy
from math import pi
from std_msgs.msg import Bool
class Practice(StateAi):
def __init__(self):
super(Practice, self).__init__("practice")
def on_start(self):
self.generate_circle(1.8, pi/4, 2 * pi - pi/4, pi/270.0, -1)
def on_goal_changed(self, goal_msg):
pass
def on_last_goal_reached(self, msg):
pass
if __name__ == "__main__":
try:
a = Practice()
except rospy.ROSInterruptException:
pass | clubcapra/Ibex | src/capra_ai/scripts/practice.py | Python | gpl-3.0 | 536 |
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
# ZODB imports
import ZODB
from ZODB import ConflictResolution, MappingStorage
from indico.tests.python.functional.seleniumTestCase import SeleniumTestCase
import transaction
from ZODB.POSException import ConflictError
# legacy imports
from indico.core.db import DBMgr
# indico imports
from indico.tests.python.unit.util import IndicoTestFeature
from indico.tests import default_actions
class TestMemStorage(MappingStorage.MappingStorage,
ConflictResolution.ConflictResolvingStorage):
"""
Test memory storage - useful for conflicts
"""
def __init__(self, name='foo'):
MappingStorage.MappingStorage.__init__(self, name)
ConflictResolution.ConflictResolvingStorage.__init__(self)
@ZODB.utils.locked(MappingStorage.MappingStorage.opened)
def store(self, oid, serial, data, version, transaction):
assert not version, "Versions are not supported"
if transaction is not self._transaction:
raise ZODB.POSException.StorageTransactionError(self, transaction)
old_tid = None
tid_data = self._data.get(oid)
if tid_data:
old_tid = tid_data.maxKey()
if serial != old_tid:
data = self.tryToResolveConflict(oid, old_tid, serial, data)
self._tdata[oid] = data
return self._tid
class Database_Feature(IndicoTestFeature):
"""
Connects/disconnects the database
"""
_requires = []
def start(self, obj):
super(Database_Feature, self).start(obj)
obj._dbmgr = DBMgr.getInstance()
retries = 10
# quite prone to DB conflicts
while retries:
try:
with obj._context('database', sync=True) as conn:
obj._home = default_actions.initialize_new_db(conn.root())
break
except ConflictError:
retries -= 1
def _action_startDBReq(obj):
obj._dbmgr.startRequest()
obj._conn = obj._dbmgr.getDBConnection()
return obj._conn
def _action_stopDBReq(obj):
transaction.commit()
obj._conn.close()
obj._conn = None
def _context_database(self, sync=False):
conn = self._startDBReq()
if sync:
conn.sync()
try:
yield conn
finally:
self._stopDBReq()
def destroy(self, obj):
obj._conn = None
class DummyUser_Feature(IndicoTestFeature):
"""
Creates a dummy user - needs database
"""
_requires = ['db.Database']
def start(self, obj):
super(DummyUser_Feature, self).start(obj)
use_password = isinstance(obj, SeleniumTestCase)
with obj._context('database', sync=True):
obj._avatars = default_actions.create_dummy_users(use_password)
for i in xrange(1, 5):
setattr(obj, '_avatar%d' % i, obj._avatars[i])
setattr(obj, '_dummy', obj._avatars[0])
class DummyGroup_Feature(IndicoTestFeature):
"""
Creates a dummy group - needs database
"""
_requires = ['db.Database']
def start(self, obj):
super(DummyGroup_Feature, self).start(obj)
with obj._context('database', sync=True):
obj._group_with_users = default_actions.create_dummy_group()
for usr in obj._avatars:
obj._group_with_users.addMember(usr)
obj._empty_group = default_actions.create_dummy_group()
setattr(obj, '_dummy_group_with_users', obj._group_with_users)
setattr(obj, '_dummy_empty_group', obj._empty_group)
| pferreir/indico-backup | indico/tests/python/unit/db.py | Python | gpl-3.0 | 4,383 |
# Copyright 2008-2010, Red Hat, Inc
# Dan Radez <[email protected]>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
__author__ = 'Dan Radez'
__version__ = '0.7.1'
__license__ = 'GPLv3'
| oppianmatt/django-loki | src/loki/__init__.py | Python | gpl-3.0 | 429 |
#!/usr/bin/env bash
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from restorepoint import RestorePoint
import argparse
import logging
import os
import shutil
import sys
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'-u',
'--username',
required=True,
help='Username to connect to RestorePoint'
)
parser.add_argument(
'-p',
'--password',
required=True,
help='Password to connect to RestorePoint'
)
parser.add_argument(
'-H',
'--hostname',
help='RestorePoint Hostname',
required=True
)
parser.add_argument(
'-k',
'--insecure',
action='store_true',
help='Skip SSL cert verification',
default=False
)
parser.add_argument(
'-s',
'--sleep',
type=int,
help='Sleep interval between device backups',
default=2
)
parser.add_argument(
'-e',
'--errors-only',
action='store_true',
help='Print errors only',
default=False
)
subparsers = parser.add_subparsers(
dest='action',
help='Available commands'
)
subparsers.add_parser(
'list',
help='List devices'
)
backup_parser = subparsers.add_parser(
'backup',
help='Backup one or more devices'
)
backup_parser.add_argument(
'--exclude',
action='append',
help='Exclude one or more devices from backup'
)
backup_parser.add_argument(
'DEVICE',
default='all',
nargs='*',
help='Optinal device name to backup (Default: all)'
)
export_parser = subparsers.add_parser(
'export',
help='Export the latest backup of one or more devices'
)
export_parser.add_argument(
'-d',
'--destination',
help='Destination directory (Default: PWD)',
default=None,
required=False
)
export_parser.add_argument(
'-i',
'--ignore-disabled',
help='Ignore disabled devices',
action='store_true',
default=True
)
export_parser.add_argument(
'-f',
'--force-backup',
help='Force a backup before exporting it',
action='store_true',
default=False
)
export_parser.add_argument(
'-c',
'--clean',
help='Empty destination dir if set',
action='store_true',
default=False
)
export_parser.add_argument(
'--prune',
help='Prune backups (keep 10 most recent only)',
action='store_true',
default=False
)
export_parser.add_argument(
'--exclude',
action='append',
help='Exclude one or more devices from export'
)
export_parser.add_argument(
'DEVICE',
default='all',
nargs='*',
help='Optinal device name to export (Default: all)'
)
prune_parser = subparsers.add_parser(
'prune',
help='Prune the latest backup of one or more devices'
)
prune_parser.add_argument(
'--exclude',
action='append',
help='Exclude one or more devices from prune'
)
prune_parser.add_argument(
'--keep',
type=int,
default=10,
help='Number of configurations to keep'
)
prune_parser.add_argument(
'DEVICE',
default='all',
nargs='*',
help='Optinal device name to prune (Default: all)'
)
return parser.parse_args()
def empty_dir(directory):
for f in os.listdir(directory):
file_path = os.path.join(directory, f)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
logger.error(e)
def determine_device_ids(rp, device_names):
# Determine the device IDs
device_ids = []
for dev in device_names:
dev_id = rp.get_device_id_from_name(dev)
if not dev_id:
logger.error('Could not determine device ID of device {}'.format(dev))
else:
device_ids.append(dev_id)
return device_ids
def get_device_ids(rp, device_names, excluded=None, ignore_disabled=False):
if device_names == ['all'] or device_names == 'all':
device_ids = rp.get_all_device_ids(ignore_disabled=ignore_disabled)
else:
device_ids = determine_device_ids(rp, device_names)
if excluded:
device_ids = [x for x in device_ids if rp.get_device_name_from_id(x) not in excluded]
return device_ids
def display_backup_results(rp, result, errors_only=False):
for dev_id, backup_result in result.items():
dev_name = rp.get_device(dev_id)['Name']
if errors_only:
if not backup_result:
print('{}: Backup failed!'.format(dev_name))
else:
print(
'{}: {}'.format(
dev_name,
'Backup succeeded' if backup_result else 'Backup failed!'
)
)
def display_export_results(rp, res, errors_only=False):
device_ids = rp.get_all_device_ids()
latest_backups = rp.latest_backups(device_ids)
for backup_id, backup_result in res:
dev_name = None
for b in latest_backups:
if b['ID'] == backup_id:
dev_name = rp.get_device(b['DeviceID'])['Name']
if errors_only:
if backup_result is None:
print('{}: Export failed!'.format(dev_name))
else:
print(
'{}: {}'.format(
dev_name,
'Export succeeded' if backup_result else 'Export failed!'
)
)
def main():
args = parse_args()
rp = RestorePoint(
hostname=args.hostname,
username=args.username,
password=args.password,
verify=not args.insecure
)
exit_code = 0
if args.action == 'list':
device_names = sorted(
[x['Name'] for x in rp.list_devices()],
key=lambda s: s.lower()
)
for dev in device_names:
print(dev)
elif args.action == 'backup':
device_ids = get_device_ids(rp, args.DEVICE, args.exclude)
if not device_ids:
print('No devices selected for backup', file=sys.stderr)
sys.exit(4)
# Backup the devices whose IDs could be determined
res = rp.backup_devices_block(device_ids, sleep_interval=args.sleep)
# Print results
display_backup_results(rp, res, args.errors_only)
# Set the exit code to 1 if at least one backup failed
exit_code = 0 if all(res.values()) else 1
elif args.action == 'export':
# Clean/empty the destination dir if requested
if args.clean and args.destination is None:
print(
'You need to set the destination dir when --clean is set',
file=sys.stderr
)
sys.exit(3)
elif args.clean:
empty_dir(args.destination)
device_ids = get_device_ids(rp, args.DEVICE, args.exclude)
if not device_ids:
print('No devices selected for export', file=sys.stderr)
sys.exit(4)
# Optionally force a new backup
if args.force_backup:
backup_res = rp.backup_devices_block(device_ids,
sleep_interval=args.sleep)
# Export the devices whose IDs could be determined
res = rp.export_latest_backups(device_ids, args.destination)
# Print results
if args.force_backup:
display_backup_results(rp, backup_res, args.errors_only)
exit_code = 0 if all(backup_res.values()) else 1
display_export_results(rp, res, args.errors_only)
if args.prune:
for dev_id in device_ids:
try:
rp.prune_backups(dev_id)
except Exception as exc:
print('Something went wrong while pruning backups of'
' {}: {}'.format(dev_id, exc))
elif args.action == 'prune':
device_ids = get_device_ids(rp, args.DEVICE, args.exclude)
for dev_id in device_ids:
rp.prune_backups(dev_id, keep=args.keep)
sys.exit(exit_code)
if __name__ == '__main__':
main()
| pschmitt/python-restorepoint | restorepoint/rp.py | Python | gpl-3.0 | 8,649 |
class Solution:
def simplifyPath(self, path: str) -> str:
path_components = path.split('/')
starting_path = []
for path_component in path_components:
if not path_component: # '//'
continue
if path_component == '.':
continue
if path_component == '..':
starting_path = starting_path[:-1]
continue
starting_path.append(path_component)
return '/' + '/'.join(starting_path)
| 1337/yesterday-i-learned | leetcode/71m.py | Python | gpl-3.0 | 518 |
"""
A binary gap within a positive integer N is any maximal sequence of
consecutive zeros that is surrounded by ones at both ends in the binary
representation of N.
For example, number 9 has binary representation 1001 and contains a binary gap
of length 2. The number 529 has binary representation 1000010001 and contains
two binary gaps: one of length 4 and one of length 3. The number 20 has binary
representation 10100 and contains one binary gap of length 1.
The number 15 has binary representation 1111 and has no binary gaps.
Write a function:
def solution(N)
that, given a positive integer N, returns the length of its longest binary gap.
The function should return 0 if N doesn't contain a binary gap.
For example, given N = 1041 the function should return 5, because N has binary
representation 10000010001 and so its longest binary gap is of length 5.
Assume that:
N is an integer within the range [1..2,147,483,647].
Complexity:
expected worst-case time complexity is O(log(N));
expected worst-case space complexity is O(1).
"""
def solution(N):
cnt = 0
result = 0
found_one = False
i = N
while i:
if i & 1 == 1:
if (found_one is False):
found_one = True
else:
result = max(result, cnt)
cnt = 0
else:
cnt += 1
i >>= 1
return result
| Dineshkarthik/codility_training | Lesson 01 - Iterations/binary_gap.py | Python | gpl-3.0 | 1,391 |
#!/usr/bin/env python
from __future__ import absolute_import, unicode_literals
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "_3caassurance.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| madicorp/3caassurance | manage.py | Python | gpl-3.0 | 320 |
import json
import random
import string
import logging
import requests
from collections import namedtuple
from os import path
logger = logging.getLogger()
config_file = path.join(path.dirname(path.dirname(__file__)), "config.json")
requests_timeout = 10
titles = [
"dr",
"mr",
"cheeky",
"duchess",
"duke",
"lord",
"fluffy",
"reverend",
"the right reverend",
"the right honorable",
"count",
"blind",
"daddy",
"mamma",
"howlin'",
"",
"professor",
"herr",
"frau",
"scratchy",
"admiral",
"your lord and saviour",
"madam",
"sir",
]
first_names = [
"fluffington",
"meowmeow",
"mittington",
"patrick",
"clawdia",
"paws",
"strange",
"tom",
"old tom",
"beverly",
"socks",
"sybil",
"claws",
"dusty",
"poo-foot",
"litterbox",
"socky",
"teeth",
"fangs",
"yumyums",
"super",
"keith",
"pussington",
"fido",
"alan",
"catty",
"fluffulus",
"hamcat",
]
last_names = [
"of tunatown",
"the fourth",
"the seventh",
"bumfluff",
"the minger",
"the crackhead",
"kibblesocks",
"biscuits",
"cuteington",
"bumtrousers",
"of dustbath",
"esquire",
"the shrew beheader",
"the maimer",
"the nocturnal",
"shitwiskers",
"the bastard",
"the disembowler",
"the mouse botherer",
"the shrew killer",
"the salmon shredder",
"the vomiter",
]
MailgunConfig = namedtuple("MailgunConfig", ["url", "api_key", "from_address"])
CatPicture = namedtuple("CatPicture", ["url", "reddit_url"])
def parse_config(filename):
raw = json.load(open(filename))
recipients = raw["recipients"]
assert type(recipients) == list
raw_mailgun = raw["mailgun"]
mailgun_config = MailgunConfig(
url=raw_mailgun["url"],
api_key=raw_mailgun["api-key"],
from_address=raw_mailgun["from_address"],
)
return recipients, mailgun_config
def generate_random_string(length=6):
return "".join(
random.choice(string.ascii_letters + string.digits) for _ in range(length)
)
def generate_random_user_agent():
return "TheDailyWhiskers" + generate_random_string()
def get_cat_name():
return " ".join(
[random.choice(titles), random.choice(first_names), random.choice(last_names)]
)
def send(mailgun_config, to, html, image_name, image_content, image_content_type):
response = requests.post(
mailgun_config.url,
auth=("api", mailgun_config.api_key),
files=[("inline", (image_name, image_content, image_content_type))],
data={
"from": mailgun_config.from_address,
"to": to,
"subject": "The Daily Whiskers",
"html": html,
},
timeout=requests_timeout,
)
response.raise_for_status()
def get_cat_picture(json_child):
try:
data = json_child["data"]
link_flair_text = data["link_flair_text"]
if link_flair_text != "Cat Picture":
logger.debug("Wrong link_flair_text: %s", link_flair_text)
return None
if "preview" in data:
logger.debug("Found single preview image")
# 1 because 0 is very low res.
url = data["preview"]["images"][0]["resolutions"][1]["url"]
elif "gallery_data" in data:
logger.debug("Found preview image gallery")
first_media_id = data["gallery_data"]["items"][0]["media_id"]
# Number 3 looks like a reasonable resolution? I'm not sure how these resolutions are chosen!
url = data["media_metadata"][first_media_id]["p"][3]["u"]
else:
raise ValueError("Not sure how to extract image from this JSON")
# For a reason known only to the API designer, this is necessary
url = url.replace("&", "&")
reddit_url = "https://www.reddit.com" + data["permalink"]
return CatPicture(url=url, reddit_url=reddit_url)
except (KeyError, IndexError):
logger.exception("Failed to get cat pic from JSON, which was: \n%s", json_child)
return None
def get_cat_pictures(top_cats):
children = top_cats["data"]["children"]
# + 1 because sometimes weird posts are stickied at the top
for json_child in children[1:]:
cat_picture = get_cat_picture(json_child)
if cat_picture is not None:
yield cat_picture
def build_html(cat_name, image_file, reddit_url):
return """
<h1 style="text-align: center;">{cat_name}</h1>
<img style="display: block; margin: auto; width: 100%;" src="cid:{image_file}">
<p><small>Credit: <a href="{reddit_url}">{reddit_url}</a></small></p>
""".format(
cat_name=cat_name, image_file=image_file, reddit_url=reddit_url
).strip()
def main():
logging.basicConfig()
logger.setLevel(logging.DEBUG)
logger.info("Dailywhiskers started")
(recipients, mailgun_config) = parse_config(config_file)
logger.debug("Loaded config")
session = requests.Session()
# Without this reddit gets very throttle-y
session.headers = {"user-agent": generate_random_user_agent()}
top_cats_resp = session.get(
"https://www.reddit.com/r/cats/top.json?t=day", timeout=requests_timeout
)
top_cats_resp.raise_for_status()
for recipient, cat_picture in zip(
recipients, get_cat_pictures(top_cats_resp.json())
):
response = session.get(cat_picture.url, timeout=requests_timeout)
response.raise_for_status()
logger.debug("Processing recipient: %s", recipient)
cat_name = get_cat_name()
# This random string solves Jess's iPhone issue where new pictures clobber old ones.
cat_pic_name = "cat_pic" + generate_random_string()
logger.info(
"Sending cat pic %s with name %s to %s",
cat_picture.reddit_url,
cat_pic_name,
recipient,
)
send(
mailgun_config=mailgun_config,
to=recipient,
html=build_html(cat_name, cat_pic_name, cat_picture.reddit_url),
image_name=cat_pic_name,
image_content=response.content,
image_content_type=response.headers["Content-Type"],
)
if __name__ == "__main__":
main()
def handler(event, context):
try:
main()
except:
logger.exception("DailyWhiskers failed :(")
| SimonStJG/TheDailyWhiskers | thedailywhiskers/dailywhiskers.py | Python | gpl-3.0 | 6,491 |
from unittest import TestCase, main
from requests import HTTPError, Response
from mock import patch
from ntfy.backends.notifico import notify
class TestNotifico(TestCase):
def setUp(self):
self.webhook = 'https://n.tkte.ch/h/1234/testing_webhook'
@patch('requests.get')
def test_basic(self, mock_get):
resp = Response()
resp.status_code = 200
mock_get.return_value = resp
notify('title', 'message', webhook=self.webhook)
mock_get.assert_called_once_with(
self.webhook, params={'payload': 'title\nmessage'})
@patch('requests.get')
def test_none_webhook(self, mock_get):
notify('title', 'message', webhook=None)
mock_get.assert_not_called()
@patch('requests.get')
def test_exception(self, mock_get):
resp = Response()
resp.status_code = 400
mock_get.return_value = resp
with self.assertRaises(HTTPError):
notify('title', 'message', webhook=self.webhook)
mock_get.assert_called_once_with(
self.webhook, params={'payload': 'title\nmessage'})
if __name__ == '__main__':
main()
| dschep/ntfy | tests/test_notifico.py | Python | gpl-3.0 | 1,153 |
from __future__ import unicode_literals
import socket
from django.conf import settings
try:
HOSTNAME = socket.gethostname()
except:
HOSTNAME = 'localhost'
def common_settings(request):
"""Passing custom CONSTANT in Settings into RequestContext."""
from django.contrib.sites.models import get_current_site
COMMON_CONTEXT = {
"DEBUG": settings.DEBUG,
"MAILCHIMP_UUID": settings.MAILCHIMP_UUID,
"MAILCHIMP_ACTION_URL": settings.MAILCHIMP_ACTION_URL,
"HOSTNAME": HOSTNAME,
"CURRENT_DOMAIN": get_current_site(request).domain,
}
try:
# set EXTRA_CONTEXT in local settings
COMMON_CONTEXT.update(settings.EXTRA_CONTEXT)
except:
pass
return COMMON_CONTEXT
| thetoine/eruditorg | erudit/base/context_processors.py | Python | gpl-3.0 | 753 |
from . import models
from . import materialized_views | sumihai-tekindo/account_sicepat | customer_classification/__init__.py | Python | gpl-3.0 | 54 |
# -*- coding: utf-8 -*-
#
# cosmic documentation build configuration file, created by
# sphinx-quickstart on Thu Apr 21 14:05:08 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import re
from cosmic import __version__ as cosmic_version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.autosummary',
'sphinx.ext.inheritance_diagram',
'sphinx.ext.linkcode',
'sphinx.ext.ifconfig',
'sphinx_automodapi.automodapi',
'sphinxcontrib.programoutput',
'matplotlib.sphinxext.plot_directive',
'IPython.sphinxext.ipython_console_highlighting',
'IPython.sphinxext.ipython_directive',
'numpydoc',
]
# -- Extensions ---------------------------------------------------------------
# -- autodoc ------------------------------------
autoclass_content = 'class'
autodoc_default_flags = ['show-inheritance', 'members', 'inherited-members']
# -- autosummary --------------------------------
autosummary_generate = True
# -- numpydoc -----------------------------------
# fix numpydoc autosummary
numpydoc_show_class_members = False
# use blockquotes (numpydoc>=0.8 only)
numpydoc_use_blockquotes = True
# auto-insert plot directive in examples
numpydoc_use_plots = True
# try and update the plot detection to include .show() calls
try: # requires numpydoc >= 0.8
from numpydoc import docscrape_sphinx
parts = re.split('[\(\)|]', docscrape_sphinx.IMPORT_MATPLOTLIB_RE)[1:-1]
except AttributeError:
pass
else:
parts.extend(('fig.show()', 'plot.show()'))
docscrape_sphinx.IMPORT_MATPLOTLIB_RE = r'\b({})\b'.format('|'.join(parts))
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
fortran_ext = ['f']
fortran_src = '../cosmic/src/'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cosmic'
copyright = u'2017, Katie Breivik'
author = u'Katie Breivik'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = cosmic_version
# The full version, including alpha/beta/rc tags.
release = cosmic_version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
# html_title = u'cosmic v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16
# or 32x32 pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'cosmicdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'cosmic.tex', u'cosmic Documentation',
u'Katie Breivik', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'cosmic', u'cosmic Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'cosmic', u'cosmic Documentation',
author, 'cosmic', 'White dwarf Accretion w/ COSMIC.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Extensions -----------------------------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
'astropy': ('http://docs.astropy.org/en/stable/', None),
}
# -- linkcode -----------------------------------------------------------------
def linkcode_resolve(domain, info):
"""Determine the URL corresponding to Python object
This code is stolen with thanks from the scipy team.
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
# try and sneak past a decorator
try:
obj = obj.im_func.func_closure[0].cell_contents
except (AttributeError, TypeError):
pass
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
try:
fn = inspect.getsourcefile(sys.modules[obj.__module__])
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.findsource(obj)
except:
lineno = None
if lineno:
linespec = "#L%d" % (lineno + 1)
else:
linespec = ""
fn = os.path.relpath(fn, start=os.path.dirname(cosmic.__file__))
if fn.startswith(os.path.pardir):
return None
return ("http://github.com/COSMIC-PopSynth/COSMIC/tree/%s/COSMIC/%s%s"
% (GWPY_VERSION['full-revisionid'], fn, linespec))
| aCOSMIC/aCOSMIC | docs/conf.py | Python | gpl-3.0 | 12,863 |
from token import *
from network import *
from logical_node import *
from role_criteria import *
##################################################################################
# Example code that roughly shows how the framework is to be used. Note: the
# relationship between the network and the logical nodes will likely change.
##################################################################################
class MyAwesomeRoleCriteria(RoleCriteria):
def __init__(self, name, happy, excited):
self.name = name
self.happy = happy
self.excited = excited
def evaluate_against(self, node_parameters):
return int(self.happy == node_parameters["happy"] and self.excited == node_parameters["excited"])
# Clients will define their own RoleCriteria, which will expect
# a certain set of parameters to evaluate on
role_criterias = [
MyAwesomeRoleCriteria("very sad", happy=False, excited=False),
MyAwesomeRoleCriteria("just content", happy=True, excited=False),
MyAwesomeRoleCriteria("freaking excited", happy=True, excited=True)
]
nodes = [
LogicalNode(0, { "happy": True, "excited": True }, role_criterias),
LogicalNode(1, { "happy": True, "excited": True }, role_criterias),
LogicalNode(2, { "happy": False, "excited": False }, role_criterias),
LogicalNode(3, { "happy": True, "excited": False }, role_criterias)
]
if __name__ == '__main__':
network = SimulatedNetwork(nodes)
token = nodes[0].begin_logical_assignment()
if token:
print "Error! Some roles couldn't be satisfied"
for role_id in token.unassigned_roles:
print "Role %d: %s" % (role_id, role_criterias[role_id].name)
else:
print "Success! All roles assigned!"
for node in nodes:
if node.assigned_role is not None:
print "Node %d's role: %s" % (node.node_id, role_criterias[node.assigned_role].name) | triskadecaepyon/DF_RoleMatrix | example_simple.py | Python | gpl-3.0 | 1,926 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class ReactorConfig(AppConfig):
name = 'reactor'
| cholarajaa/cold-temperature | hotorcold/reactor/apps.py | Python | gpl-3.0 | 154 |
#!/usr/bin/env python
#
# test_copy_residue.py
#
# unit tests for residue duplication functionality
#
__author__ = "Magdalena Musielak, Tomasz Puton, Kristian Rother"
__copyright__ = "Copyright 2008, The Moderna Project"
__credits__ = ["Janusz Bujnicki"]
__license__ = "GPL"
__maintainer__ = "Magdalena Musielak"
__email__ = "[email protected]"
__status__ = "Prototype"
from unittest import main, TestCase
from moderna.RNAResidue import RNAResidue
from moderna.analyze.BaseRecognizer import BaseRecognizer, BaseRecognitionError
from Bio.PDB import PDBParser
from moderna.util.Errors import RNAResidueError
from moderna.sequence.ModernaAlphabet import Alphabet
from test_data import *
class RNAResidueTests(TestCase):
def setUp(self):
"""Loads the A residue to start with."""
self.a=PDBParser().get_structure('test_struc',A_RESIDUE)[0].child_list[0].child_list[0]
self.chain=PDBParser().get_structure('test_struc',MINI_TEMPLATE)[0].child_list[0]
self.chain2=PDBParser().get_structure('test_struc',MINI_TEMPLATE)[0].child_list[0]
def tearDown(self):
self.a = None
self.chain = None
self.chain2 = None
def test_residue_identity(self):
"""Moderna residues need to be discinct by __eq__ unless they are the same object."""
r1 = RNAResidue(self.chain.child_list[2])
r2 = RNAResidue(self.chain2.child_list[2])
r3 = RNAResidue(self.chain.child_list[3])
r4 = RNAResidue(self.chain.child_list[2])
self.assertEqual(r1, r1)
self.assertNotEqual(r1, r2)
self.assertNotEqual(r1, r3)
self.assertNotEqual(r1, r4)
def test_atom_parent(self):
"""Atoms should link back to their parent."""
resi = RNAResidue(self.a)
for atom in resi:
self.assertEqual(atom.get_parent(),resi)
def test_atom_name(self):
"""Atoms should have the right names."""
resi = RNAResidue(self.a)
self.assertEqual(resi["C4'"].name,"C4'")
self.assertEqual(resi["C4'"].fullname," C4'")
self.assertEqual(resi["C4'"].element,'C')
def test_init(self):
"""Residues should be initializable."""
a = RNAResidue(self.a)
self.assertEqual(a.identifier,'1')
a = RNAResidue(self.chain.child_list[0])
self.assertEqual(a.identifier,'1')
a = RNAResidue(self.chain.child_list[-1])
self.assertEqual(a.identifier,'15')
def test_init_recog_base(self):
"""Base recognition should succeed regardless of parameter"""
alphabet = Alphabet()
# recognition with base recognizer
a = RNAResidue(self.chain.child_list[9])
self.assertEqual(a.long_abbrev,'m2G')
# assignment by alphabet entry
# IMPORTANT FOR BASE RECOGNIZER BYPASS
a = RNAResidue(self.chain.child_list[9], alphabet['m2G'])
self.assertEqual(a.long_abbrev,'m2G')
# assignment by wrong alphabet entry should succeed!
a = RNAResidue(self.chain.child_list[9], alphabet['m7G'])
self.assertEqual(a.long_abbrev,'m7G')
def test_renumber(self):
res = RNAResidue(self.chain.child_list[2])
res.change_number('64')
self.assertEqual(res.identifier, '64')
self.assertEqual(res.id, (' ',64, ' '))
res.change_number('133E')
self.assertEqual(res.identifier, '133E')
self.assertEqual(res.id, (' ',133, 'E'))
res.change_number('2')
self.assertEqual(res.identifier, '2')
self.assertEqual(res.id, (' ',2, ' '))
def test_glycosidic_n(self):
"""Finds N* in tough cases."""
chain = PDBParser().get_structure('test_struc', 'test_data/gaps/1h3e_B.pdb')[0].child_list[0]
resi1 = RNAResidue(chain[(' ', 15, ' ')])
self.assertTrue(resi1['N*'])
resi2 = RNAResidue(chain[(' ', 16, ' ')])
self.assertRaises(RNAResidueError, resi2.__getitem__, 'N*')
def test_purine(self):
"""RNAResidue recognizes purines."""
res = RNAResidue(self.chain.child_list[0])
self.assertTrue(res.purine)
res =RNAResidue(self.chain.child_list[9])
self.assertTrue(res.purine)
res =RNAResidue(self.chain.child_list[1])
self.assertFalse(res.purine)
res = RNAResidue(self.chain.child_list[11])
self.assertFalse(res.purine)
def test_pyrimidine(self):
"""ModernaResidue recognizes pyrimidines."""
res = RNAResidue(self.chain.child_list[1])
self.assertTrue(res.pyrimidine)
res = RNAResidue(self.chain.child_list[11])
self.assertTrue(res.pyrimidine)
res = RNAResidue(self.chain.child_list[0])
self.assertFalse(res.pyrimidine)
res = RNAResidue(self.chain.child_list[9])
self.assertFalse(res.pyrimidine)
if __name__ == '__main__':
main()
| lenarother/moderna | tests/test_rna_residue.py | Python | gpl-3.0 | 4,920 |
from lettuce import step, world
from nose.tools import assert_equals, assert_true, assert_false
import utils
import os
import bunch.special
path = os.path.abspath(__file__)
dir_path = os.path.dirname(path)
utils.init(dir_path)
config_file = os.path.join(dir_path, "config.yaml")
config = utils.load_yaml_config(config_file)
bunch_working_dir = dir_path
def dump(obj):
for attr in dir(obj):
print "obj.%s = %s" % (attr, getattr(obj, attr))
mysql_admin = config['db']['admin']
mysql_admin_pwd = config['db']['admin_pwd']
class step_assert(object):
def __init__(self, step):
self.step = step
def assert_true(self, expr):
msg = 'Step "%s" failed ' % self.step.sentence
assert_true(expr, msg)
def assert_false(self, expr):
msg = 'Step "%s" failed ' % self.step.sentence
assert_false(expr, msg)
@step(u'current user can execute sudo without password')
def check_current_user_sudo_nopwd(step):
step_assert(step).assert_true(utils.misc.can_execute_sudo_without_pwd())
@step(u'every RPM package available:')
def check_rpm_available(step):
for data in step.hashes:
step_assert(step).assert_true(utils.rpm.available(data['PackageName']))
@step(u'I clean yum cached data')
def clean_yum_caches(step):
step_assert(step).assert_true(utils.rpm.clean_all_cached_data())
@step(u'I setup OpenStack repository "(.*)" for environment "(.*)"')
def install_build_env_repo(step, repo, env_name):
step_assert(step).assert_true(utils.misc.install_build_env_repo(repo, env_name))
@step(u'yum repository with id "(.*)" is configured')
def check_yum_repository_with_id_exists(step, id):
step_assert(step).assert_true(utils.rpm.yum_repo_exists(id))
@step(u'I install RPM package\(s\):')
def install_rpm(step):
utils.rpm.clean_all_cached_data()
for data in step.hashes:
step_assert(step).assert_true(utils.rpm.install(data['PackageName']))
@step(u'every RPM package is installed:')
def check_rpm_installed(step):
for data in step.hashes:
step_assert(step).assert_true(utils.rpm.installed(data['PackageName']))
@step(u'I remove RPM package\(s\):')
def remove_rpm(step):
utils.rpm.clean_all_cached_data()
for data in step.hashes:
step_assert(step).assert_true(utils.rpm.remove(data['PackageName']))
@step(u'every RPM package is not installed:')
def check_rpm_not_installed(step):
for data in step.hashes:
step_assert(step).assert_false(utils.rpm.installed(data['PackageName']))
@step(u'I create MySQL database "(.*)"')
def create_mysql_db(step, db_name):
step_assert(step).assert_true(utils.mysql_cli.create_db(db_name, mysql_admin, mysql_admin_pwd))
@step(u'I grant all privileges on database "(.*)" to user "(.*)" identified by password "(.*)" at hosts:')
def setup_mysql_access_for_hosts(step, db_name, db_user, db_pwd):
for data in step.hashes:
step_assert(step).assert_true(utils.mysql_cli.grant_db_access_for_hosts(data['Hostname'],db_name, db_user, db_pwd, mysql_admin, mysql_admin_pwd))
@step(u'I grant all privileges on database "(.*)" to user "(.*)" identified by password "(.*)" locally')
def setup_mysql_access_local(step, db_name, db_user, db_pwd):
step_assert(step).assert_true(utils.mysql_cli.grant_db_access_local(db_name, db_user, db_pwd, mysql_admin, mysql_admin_pwd))
step_assert(step).assert_true(utils.mysql_cli.grant_db_access_local(db_name, mysql_admin, mysql_admin_pwd, mysql_admin, mysql_admin_pwd))
@step(u'every service is running:')
def every_service_is_running(step):
for data in step.hashes:
step_assert(step).assert_true(utils.service(data['ServiceName']).running())
@step(u'I start services:')
def start_services(step):
for data in step.hashes:
step_assert(step).assert_true(utils.service(data['ServiceName']).start())
@step(u'MySQL database "(.*)" exists')
def mysql_db_exists(step, db_name):
step_assert(step).assert_true(utils.mysql_cli.db_exists(db_name, mysql_admin, mysql_admin_pwd))
@step(u'user "(.*)" has all privileges on database "(.*)"')
def mysql_user_has_all_privileges(step, user, db_name):
step_assert(step).assert_true(utils.mysql_cli.user_has_all_privileges_on_db(user, db_name, mysql_admin, mysql_admin_pwd))
@step(u'I perform Nova DB sync')
def perform_nova_db_sync(step):
step_assert(step).assert_true(utils.nova_cli.db_sync())
@step(u'I stop services:')
def stop_services(step):
for data in step.hashes:
step_assert(step).assert_true(utils.service(data['ServiceName']).stop())
@step(u'every service is stopped:')
def every_service_is_stopped(step):
for data in step.hashes:
step_assert(step).assert_true(utils.service(data['ServiceName']).stopped())
@step(u'I clean state files:')
def clean_state_files(step):
for data in step.hashes:
step_assert(step).assert_true(utils.misc.remove_files_recursively_forced(data['PathWildCard']))
@step(u'no files exist:')
def no_files_exist(step):
for data in step.hashes:
step_assert(step).assert_true(utils.misc.no_files_exist(data['PathWildCard']))
@step(u'I change flag file "(.*)" by setting flag values:')
def change_flag_file(step,flag_file):
flags = [(flag['Name'],flag['Value']) for flag in step.hashes ]
step_assert(step).assert_true(utils.FlagFile(flag_file).apply_flags(flags).overwrite(flag_file))
@step(u'the following flags in file "(.*)" are set to:')
def verify_flag_file(step,flag_file):
flags = [(flag['Name'],flag['Value']) for flag in step.hashes ]
step_assert(step).assert_true(utils.FlagFile(flag_file).verify(flags))
@step(u'I create nova admin user "(.*)"')
def create_nova_admin(step, username):
step_assert(step).assert_true(utils.nova_cli.create_admin(username))
@step(u'nova user "(.*)" exists')
def nova_user_exists(step, user):
step_assert(step).assert_true(utils.nova_cli.user_exists(user))
@step(u'I create nova project "(.*)" for user "(.*)"')
def create_nova_project(step, name, user):
step_assert(step).assert_true(utils.nova_cli.create_project(name, user))
@step(u'nova project "(.*)" exists')
def nova_project_exists(step, project):
step_assert(step).assert_true(utils.nova_cli.project_exists(project))
@step(u'nova user "(.*)" is the manager of the nova project "(.*)"')
def nova_user_is_project_manager(step, user, project):
step_assert(step).assert_true(utils.nova_cli.user_is_project_admin(user, project))
@step(u'I create nova network "(.*)" with "(.*)" nets, "(.*)" IPs per network')
def create_nova_network(step, cidr, nets, ips):
step_assert(step).assert_true(utils.nova_cli.create_network(cidr, nets, ips))
@step(u'nova network "(.*)" exists')
def nova_network_exists(step, cidr):
step_assert(step).assert_true(utils.nova_cli.network_exists(cidr))
@step(u'novarc for project "(.*)", user "(.*)" is available')
def novarc_is_available(step, project, user):
utils.nova_cli.set_novarc(project, user, bunch_working_dir)
step_assert(step).assert_true(utils.nova_cli.novarc_available())
@step(u'VM image tarball is available at "(.*)"')
def http_resource_is_availaable(step, url):
step_assert(step).assert_true(utils.networking.http.probe(url))
@step(u'I download VM image tarball at "(.*)" and unpack it')
def download_tarball_then_unpack(step, url):
step_assert(step).assert_true(utils.networking.http.get(url, bunch_working_dir))
file = os.path.join(bunch_working_dir, utils.networking.http.basename(url))
step_assert(step).assert_true(utils.misc.extract_targz(file, bunch_working_dir))
@step(u'I register VM image "(.*)" for owner "(.*)" using disk "(.*)", ram "(.*)", kernel "(.*)"')
def register_all_images(step, name, owner, disk, ram, kernel):
step_assert(step).assert_true(utils.nova_cli.vm_image_register(name, owner,
os.path.join(bunch_working_dir,disk),
os.path.join(bunch_working_dir,ram),
os.path.join(bunch_working_dir, kernel)))
@step(u'VM image "(.*)" is registered')
def image_registered(step, name):
step_assert(step).assert_true(utils.nova_cli.vm_image_registered(name))
@step(u'I add keypair with name "(.*)" from file "(.*)"')
def add_keypair(step, name, file):
key_path = os.path.join(bunch_working_dir,file)
step_assert(step).assert_true(utils.nova_cli.add_keypair(name, key_path))
@step(u'keypair with name "(.*)" exists')
def keypair_exists(step, name):
step_assert(step).assert_true(utils.nova_cli.keypair_exists(name))
@step(u'I start VM instance "(.*)" using image "(.*)", flavor "(.*)" and keypair "(.*)"')
def start_vm_instance(step, name,image, flavor, keyname):
id_image_list = utils.nova_cli.get_image_id_list(image)
assert_equals(len(id_image_list), 1, "There are %s images with name %s: %s" % (len(id_image_list), name, str(id_image_list)))
id_flavor_list = utils.nova_cli.get_flavor_id_list(flavor)
assert_equals(len(id_flavor_list), 1, "There are %s flavors with name %s: %s" % (len(id_flavor_list), name, str(id_flavor_list)))
image_id = id_image_list[0]
flavor_id = id_flavor_list[0]
assert_true(image_id != '', image_id)
assert_true(flavor_id != '', flavor_id)
step_assert(step).assert_true(utils.nova_cli.start_vm_instance(name, image_id, flavor_id, keyname))
@step(u'I kill all processes:')
def kill_all_processes(step):
for data in step.hashes:
step_assert(step).assert_true(utils.misc.kill_process(data['Process']))
@step(u'VM instance "(.*)" comes up within "(.*)" seconds')
def wait_instance_comes_up_within(step, name, timeout):
step_assert(step).assert_true(utils.nova_cli.wait_instance_comes_up(name, int(timeout)))
@step(u'VM instance "(.*)" is pingable within "(.*)" seconds')
def vm_is_pingable(step, name, timeout):
ip = utils.nova_cli.get_instance_ip(name)
assert_true(ip != '', name)
step_assert(step).assert_true(utils.networking.icmp.probe(ip, int(timeout)))
@step(u'I see that "(.*)" port of VM instance "(.*)" is open and serves "(.*)" protocol')
def check_port_protocol(step, port, name, protocol):
ip = utils.nova_cli.get_instance_ip(name)
assert_true(ip != '', name)
step_assert(step).assert_true(utils.networking.nmap.open_port_serves_protocol(ip, port, protocol))
@step(u'I can log into VM "(.*)" via SSH as "(.*)"')
def check_can_log_via_ssh(step, name, user):
ip = utils.nova_cli.get_instance_ip(name)
assert_true(ip != '', name)
step_assert(step).assert_true(utils.ssh(ip, "exit", user).successful()) | griddynamics/bunch | samples/openstack-smoke/__init__.py | Python | gpl-3.0 | 10,675 |
import sys
from PIL import Image, ImageStat
# Covert image to greyscale, return average pixel brightness.
def brightness_method1( im_file ):
im = Image.open(im_file).convert('L')
stat = ImageStat.Stat(im)
return stat.mean[0]
# Covert image to greyscale, return RMS pixel brightness.
def brightness_method2( im_file ):
im = Image.open(im_file).convert('L')
stat = ImageStat.Stat(im)
return stat.rms[0]
# Return results of all methods
def brightness_all_method(im_file):
print(brightness_method1( im_file ))
print(brightness_method2( im_file ))
# Used with arguments
if( len(sys.argv) != 0 ):
brightness_all_method( sys.argv[1])
| mikehankey/fireball_camera | img/get_brightness.py | Python | gpl-3.0 | 669 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015-2020 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import backtrader as bt
class DataFilter(bt.AbstractDataBase):
'''
This class filters out bars from a given data source. In addition to the
standard parameters of a DataBase it takes a ``funcfilter`` parameter which
can be any callable
Logic:
- ``funcfilter`` will be called with the underlying data source
It can be any callable
- Return value ``True``: current data source bar values will used
- Return value ``False``: current data source bar values will discarded
'''
params = (('funcfilter', None),)
def preload(self):
if len(self.p.dataname) == self.p.dataname.buflen():
# if data is not preloaded .... do it
self.p.dataname.start()
self.p.dataname.preload()
self.p.dataname.home()
# Copy timeframe from data after start (some sources do autodetection)
self.p.timeframe = self._timeframe = self.p.dataname._timeframe
self.p.compression = self._compression = self.p.dataname._compression
super(DataFilter, self).preload()
def _load(self):
if not len(self.p.dataname):
self.p.dataname.start() # start data if not done somewhere else
# Tell underlying source to get next data
while self.p.dataname.next():
# Try to load the data from the underlying source
if not self.p.funcfilter(self.p.dataname):
continue
# Data is allowed - Copy size which is "number of lines"
for i in range(self.p.dataname.size()):
self.lines[i][0] = self.p.dataname.lines[i][0]
return True
return False # no more data from underlying source
| mementum/backtrader | backtrader/filters/datafilter.py | Python | gpl-3.0 | 2,743 |
# -*- coding: utf-8 -*-
from gettext import gettext as _
ACTIVITY_NAME = _('I know America')
PRESENTATION = [
_('Hello friends...'),
_('...tomorrow I have a\ntest about America...'),
_('...and I know nothing!!!'),
_('...what do I do???'),
_('Can I ask you something?'),
_('Will you help me?')
]
PREFIX = [
_('We have to find'),
_('Now we have to find'),
_('We need to find')
]
SUFIX = [
_('Can you tell me where it is?'),
_('Where is it?'),
_('Can you show me where it is?'),
_('Can you find it?')
]
CORRECT = [
_('Very well!'),
_('Brilliant!'),
_('You found it!'),
_('Yes!')
]
WRONG = [
_('No, that is not correct'),
_('No, it is not there'),
_('No, that seems to be wrong'),
_('That is not correct')
]
BYE_C = [
_("Now I'm ready\nfor tomorrow...\nI'm going to play...\nThanks for your help!"),
_("We made it!\nI'm going to play ball...\nBye and thanks\nfor helping me!"),
_("We did it!\nI'm ready for tomorrow...\nI'm playing a while...\nUntil next time!")
]
BYE_W = [
_("Better luck next time\nI'll play a while...\nThanks!"),
_("The next time will be better\nI'm going to play ball...\nThanks!"),
_("Please try again...\nI'm going to play...\nThanks!")
]
CREDITS = [
_("Author: Alan Aguiar"),
_("Send corrections, comments or suggestions to: [email protected]"),
"",
_("This program is free software developed by the community"),
"",
_("This program is based on 'Conozco Uruguay' (Author: Gabriel Eirea)"),
_("Sounds downloaded from freesound.org: btn117.wav courtesy of junggle."),
_("Font: Share-Regular.ttf of urbanfonts.com")
]
| AlanJAS/iknowAmerica | recursos/comun/datos/commons.py | Python | gpl-3.0 | 1,689 |
from hashlib import sha256
from django.test import Client
from requests.auth import AuthBase
SIG_KEY = "HTTP_X_SIGNATURE"
def _generate_signature(secret, path, post_data):
path = bytes(path, "utf-8")
body = post_data
secret = bytes(secret, "utf-8")
if isinstance(body, str):
body = bytes(body, "utf-8")
return sha256(path + body + secret).hexdigest()
class AliceClient(Client):
"""
Typically, requests need to have a signature added and the Django client
class doesn't exactly make that easy.
"""
SECRET = "alice_client_test_secret"
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
# This is the only part that isn't copypasta from Client.post
if SIG_KEY not in extra:
extra[SIG_KEY] = self.sign(path, data)
return Client.generic(
self,
method,
path,
data=data,
content_type=content_type,
secure=secure,
**extra
)
def sign(self, path, post_data):
return _generate_signature(self.SECRET, path, post_data)
class AliceAuthenticator(AuthBase):
"""
Alice authenticator that can be used with `requests`.
>>> from alice.tests.client import AliceAuthenticator
>>> import requests
>>> requests.get('http://localhost:8000/some_path/', auth=AliceAuthenticator('SECRET!!!'))
<Response [200]>
"""
def __init__(self, secret, header='X-Signature'):
super().__init__()
self.secret = secret
self.header = header
def __call__(self, r):
sig = _generate_signature(self.secret, r.path_url, r.body or '')
r.headers[self.header] = sig
return r
| UKTradeInvestment/export-wins-data | alice/tests/client.py | Python | gpl-3.0 | 1,802 |
########################################################################
# File : PilotStatusAgent.py
# Author : Stuart Paterson
########################################################################
""" The Pilot Status Agent updates the status of the pilot jobs in the
PilotAgents database.
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gConfig
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities import Time
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
from DIRAC.Core.Utilities.SiteCEMapping import getSiteForCE
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
from DIRAC.AccountingSystem.Client.Types.Pilot import Pilot as PilotAccounting
from DIRAC.AccountingSystem.Client.DataStoreClient import gDataStoreClient
from DIRAC.WorkloadManagementSystem.Client.PilotManagerClient import PilotManagerClient
from DIRAC.WorkloadManagementSystem.DB.PilotAgentsDB import PilotAgentsDB
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
MAX_JOBS_QUERY = 10
MAX_WAITING_STATE_LENGTH = 3
class PilotStatusAgent(AgentModule):
"""
The specific agents must provide the following methods:
- initialize() for initial settings
- beginExecution()
- execute() - the main method called in the agent cycle
- endExecution()
- finalize() - the graceful exit of the method, this one is usually used
for the agent restart
"""
queryStateList = ['Ready', 'Submitted', 'Running', 'Waiting', 'Scheduled']
finalStateList = ['Done', 'Aborted', 'Cleared', 'Deleted', 'Failed']
def __init__(self, *args, **kwargs):
""" c'tor
"""
AgentModule.__init__(self, *args, **kwargs)
self.jobDB = None
self.pilotDB = None
self.diracadmin = None
#############################################################################
def initialize(self):
"""Sets defaults
"""
self.am_setOption('PollingTime', 120)
self.am_setOption('GridEnv', '')
self.am_setOption('PilotStalledDays', 3)
self.pilotDB = PilotAgentsDB()
self.diracadmin = DiracAdmin()
self.jobDB = JobDB()
self.clearPilotsDelay = self.am_getOption('ClearPilotsDelay', 30)
self.clearAbortedDelay = self.am_getOption('ClearAbortedPilotsDelay', 7)
self.pilots = PilotManagerClient()
return S_OK()
#############################################################################
def execute(self):
"""The PilotAgent execution method.
"""
self.pilotStalledDays = self.am_getOption('PilotStalledDays', 3)
self.gridEnv = self.am_getOption('GridEnv')
if not self.gridEnv:
# No specific option found, try a general one
setup = gConfig.getValue('/DIRAC/Setup', '')
if setup:
instance = gConfig.getValue('/DIRAC/Setups/%s/WorkloadManagement' % setup, '')
if instance:
self.gridEnv = gConfig.getValue('/Systems/WorkloadManagement/%s/GridEnv' % instance, '')
result = self.pilotDB._getConnection()
if result['OK']:
connection = result['Value']
else:
return result
# Now handle pilots not updated in the last N days (most likely the Broker is no
# longer available) and declare them Deleted.
result = self.handleOldPilots(connection)
connection.close()
result = self.pilots.clearPilots(self.clearPilotsDelay, self.clearAbortedDelay)
if not result['OK']:
self.log.warn('Failed to clear old pilots in the PilotAgentsDB')
return S_OK()
def clearWaitingPilots(self, condDict):
""" Clear pilots in the faulty Waiting state
"""
last_update = Time.dateTime() - MAX_WAITING_STATE_LENGTH * Time.hour
clearDict = {'Status': 'Waiting',
'OwnerDN': condDict['OwnerDN'],
'OwnerGroup': condDict['OwnerGroup'],
'GridType': condDict['GridType'],
'Broker': condDict['Broker']}
result = self.pilotDB.selectPilots(clearDict, older=last_update)
if not result['OK']:
self.log.warn('Failed to get the Pilot Agents for Waiting state')
return result
if not result['Value']:
return S_OK()
refList = result['Value']
for pilotRef in refList:
self.log.info('Setting Waiting pilot to Stalled: %s' % pilotRef)
result = self.pilotDB.setPilotStatus(pilotRef, 'Stalled', statusReason='Exceeded max waiting time')
return S_OK()
def clearParentJob(self, pRef, pDict, connection):
""" Clear the parameteric parent job from the PilotAgentsDB
"""
childList = pDict['ChildRefs']
# Check that at least one child is in the database
children_ok = False
for child in childList:
result = self.pilotDB.getPilotInfo(child, conn=connection)
if result['OK']:
if result['Value']:
children_ok = True
if children_ok:
return self.pilotDB.deletePilot(pRef, conn=connection)
else:
self.log.verbose('Adding children for parent %s' % pRef)
result = self.pilotDB.getPilotInfo(pRef)
parentInfo = result['Value'][pRef]
tqID = parentInfo['TaskQueueID']
ownerDN = parentInfo['OwnerDN']
ownerGroup = parentInfo['OwnerGroup']
broker = parentInfo['Broker']
gridType = parentInfo['GridType']
result = self.pilotDB.addPilotTQReference(childList, tqID, ownerDN, ownerGroup,
broker=broker, gridType=gridType)
if not result['OK']:
return result
children_added = True
for chRef, chDict in pDict['ChildDicts'].items():
result = self.pilotDB.setPilotStatus(chRef, chDict['Status'],
destination=chDict['DestinationSite'],
conn=connection)
if not result['OK']:
children_added = False
if children_added:
result = self.pilotDB.deletePilot(pRef, conn=connection)
else:
return S_ERROR('Failed to add children')
return S_OK()
def handleOldPilots(self, connection):
"""
select all pilots that have not been updated in the last N days and declared them
Deleted, accounting for them.
"""
pilotsToAccount = {}
timeLimitToConsider = Time.toString(Time.dateTime() - Time.day * self.pilotStalledDays)
result = self.pilotDB.selectPilots({'Status': self.queryStateList},
older=timeLimitToConsider,
timeStamp='LastUpdateTime')
if not result['OK']:
self.log.error('Failed to get the Pilot Agents')
return result
if not result['Value']:
return S_OK()
refList = result['Value']
result = self.pilotDB.getPilotInfo(refList)
if not result['OK']:
self.log.error('Failed to get Info for Pilot Agents')
return result
pilotsDict = result['Value']
for pRef in pilotsDict:
if pilotsDict[pRef].get('Jobs') and self._checkJobLastUpdateTime(pilotsDict[pRef]['Jobs'], self.pilotStalledDays):
self.log.debug('%s should not be deleted since one job of %s is running.' %
(str(pRef), str(pilotsDict[pRef]['Jobs'])))
continue
deletedJobDict = pilotsDict[pRef]
deletedJobDict['Status'] = 'Deleted'
deletedJobDict['StatusDate'] = Time.dateTime()
pilotsToAccount[pRef] = deletedJobDict
if len(pilotsToAccount) > 100:
self.accountPilots(pilotsToAccount, connection)
self._killPilots(pilotsToAccount)
pilotsToAccount = {}
self.accountPilots(pilotsToAccount, connection)
self._killPilots(pilotsToAccount)
return S_OK()
def accountPilots(self, pilotsToAccount, connection):
""" account for pilots
"""
accountingFlag = False
pae = self.am_getOption('PilotAccountingEnabled', 'yes')
if pae.lower() == "yes":
accountingFlag = True
if not pilotsToAccount:
self.log.info('No pilots to Account')
return S_OK()
accountingSent = False
if accountingFlag:
retVal = self.pilotDB.getPilotInfo(pilotsToAccount.keys(), conn=connection)
if not retVal['OK']:
self.log.error('Fail to retrieve Info for pilots', retVal['Message'])
return retVal
dbData = retVal['Value']
for pref in dbData:
if pref in pilotsToAccount:
if dbData[pref]['Status'] not in self.finalStateList:
dbData[pref]['Status'] = pilotsToAccount[pref]['Status']
dbData[pref]['DestinationSite'] = pilotsToAccount[pref]['DestinationSite']
dbData[pref]['LastUpdateTime'] = pilotsToAccount[pref]['StatusDate']
retVal = self.__addPilotsAccountingReport(dbData)
if not retVal['OK']:
self.log.error('Fail to retrieve Info for pilots', retVal['Message'])
return retVal
self.log.info("Sending accounting records...")
retVal = gDataStoreClient.commit()
if not retVal['OK']:
self.log.error("Can't send accounting reports", retVal['Message'])
else:
self.log.info("Accounting sent for %s pilots" % len(pilotsToAccount))
accountingSent = True
if not accountingFlag or accountingSent:
for pRef in pilotsToAccount:
pDict = pilotsToAccount[pRef]
self.log.verbose('Setting Status for %s to %s' % (pRef, pDict['Status']))
self.pilotDB.setPilotStatus(pRef,
pDict['Status'],
pDict['DestinationSite'],
pDict['StatusDate'],
conn=connection)
return S_OK()
def __addPilotsAccountingReport(self, pilotsData):
""" fill accounting data
"""
for pRef in pilotsData:
pData = pilotsData[pRef]
pA = PilotAccounting()
pA.setEndTime(pData['LastUpdateTime'])
pA.setStartTime(pData['SubmissionTime'])
retVal = Registry.getUsernameForDN(pData['OwnerDN'])
if not retVal['OK']:
userName = 'unknown'
self.log.error("Can't determine username for dn:", pData['OwnerDN'])
else:
userName = retVal['Value']
pA.setValueByKey('User', userName)
pA.setValueByKey('UserGroup', pData['OwnerGroup'])
result = getSiteForCE(pData['DestinationSite'])
if result['OK'] and result['Value'].strip():
pA.setValueByKey('Site', result['Value'].strip())
else:
pA.setValueByKey('Site', 'Unknown')
pA.setValueByKey('GridCE', pData['DestinationSite'])
pA.setValueByKey('GridMiddleware', pData['GridType'])
pA.setValueByKey('GridResourceBroker', pData['Broker'])
pA.setValueByKey('GridStatus', pData['Status'])
if 'Jobs' not in pData:
pA.setValueByKey('Jobs', 0)
else:
pA.setValueByKey('Jobs', len(pData['Jobs']))
self.log.verbose("Added accounting record for pilot %s" % pData['PilotID'])
retVal = gDataStoreClient.addRegister(pA)
if not retVal['OK']:
return retVal
return S_OK()
def _killPilots(self, acc):
for i in sorted(acc.keys()):
result = self.diracadmin.getPilotInfo(i)
if result['OK'] and i in result['Value'] and 'Status' in result['Value'][i]:
ret = self.diracadmin.killPilot(str(i))
if ret['OK']:
self.log.info("Successfully deleted: %s (Status : %s)" % (i, result['Value'][i]['Status']))
else:
self.log.error("Failed to delete pilot: ", "%s : %s" % (i, ret['Message']))
else:
self.log.error("Failed to get pilot info", "%s : %s" % (i, str(result)))
def _checkJobLastUpdateTime(self, joblist, StalledDays):
timeLimitToConsider = Time.dateTime() - Time.day * StalledDays
ret = False
for jobID in joblist:
result = self.jobDB.getJobAttributes(int(jobID))
if result['OK']:
if 'LastUpdateTime' in result['Value']:
lastUpdateTime = result['Value']['LastUpdateTime']
if Time.fromString(lastUpdateTime) > timeLimitToConsider:
ret = True
self.log.debug(
'Since %s updates LastUpdateTime on %s this does not to need to be deleted.' %
(str(jobID), str(lastUpdateTime)))
break
else:
self.log.error("Error taking job info from DB", result['Message'])
return ret
| andresailer/DIRAC | WorkloadManagementSystem/Agent/PilotStatusAgent.py | Python | gpl-3.0 | 12,315 |
#!/usr/bin/env python3
goblin_attack = 40
goblin1_alive = True
goblin2_alive = True
goblins_alive = 2
goblin1_stunned = False
goblin2_stunned = False
goblin1_health = 30
goblin2_health = 30
rat_alive = True
rat_health = 15
fists = True
sword = False
shield = False
armor = False
end = False
room = 1
defense = 0
health = 50
weapon = 10
emerald = False
emerald_bribe = False
rope_cut = False
print("Welcome to the Poorly-Coded-Dungeon!")
print("What is your name, player?")
player = input("> ")
print("Well,", player, "you start...IN A DUNGEON!! (big surprise)")
while not end:
#starting room
if room == 1:
print("What do you want to do?")
action = input("> ")
if action == "look around":
if sword:
print("Looking around, there is nothing else of interest; just a way in either direction.")
else:
print("Looking around, you spot a sword. There are also ways out going in any direction.")
elif action == "grab sword":
print("You grab the sword and equip it.")
sword = True
weapon += 5
elif action == "take sword":
print("You take and equip the sword.")
sword = True
weapon += 5
elif action == "help":
print("Your commands are 'look around', 'move forward', 'move left', 'move right', 'move back', 'stats', 'take *item*', 'inspect *item and/or monster*, and 'attack *monster name*'")
elif action == "stats":
print("Health:", health, "Attack:", weapon, "Defense:", defense)
elif action == "move forward":
print("You move forward.")
print("This room is empty. There is only a way forwards and the way you came.")
print("The way forwards leads outside!")
print("You hear some chattering in the distance.")
room = 2
elif action == "move back":
print("You move back.")
print("This room is filled with a lot of discarded items.")
room = 3
elif action == "move left":
print("You move to the left.")
print("You spring a trap!")
health -= 5
print("Don't you know anything about scrolling videogames?")
print("Always go to the RIGHT!")
print("Also, you're not alone in here.")
room = 4
elif action == "move right":
print("You move to the right.")
room = 5
if emerald == False:
if rope_cut == True:
print("This room is mostly empty, save for an emerald that you left in the dead-center of the room")
print("You could easily grab this emerald now.")
else:
print("This room is mostly empty, save for an emerald in the dead-center of the room.")
else:
print("This room is completely empty now. Nothing left of interest.")
else:
print("Sorry, try a different command.")
#Room 2
if room == 2:
print("What do you want to do?")
action = input("> ")
if action == "move back":
print("You move back to the starting room")
room = 1
elif action == "move forward":
print("You move forwards, outside of the dungeon! It's dark outside.")
room = 6
if goblins_alive == 2:
if emerald == True:
print("It seems the chattering you heard was two goblins! They spot you and wield their weapons!")
print("They are about to step forward to attack, but suddenly stop, noticing your shiny emerald.")
else:
print("It seems the chattering you heard was two goblins! They spot you and wield their weapons!")
else:
print("There is nothing left in this room.")
else:
print("Sorry, try a different command.")
#Room 3
if room == 3:
print("What do you want to do?")
action = input("> ")
if action == "search items":
print("Searching through the rabble, you find a shield!")
print("Take shield with you?")
action = input("(yes/no) ")
if action == "yes":
print("You take the shield and put it on")
defense += 10
shield = True
else:
print("You leave the shield as it is.")
elif action == "inspect items":
print("Searching through the rabble, you find a shield!")
print("Take shield with you?")
action = input("> ")
if action == "yes":
print("You take the shield and put it on")
defense += 10
shield = True
else:
print("You leave the shield as it is.")
elif action == "move forward":
print("You go back to the starting room.")
room = 1
elif action == "look around":
print("Several unuseable items are thrown about all over the place.")
print("Who knows why for.")
print("There may be a chance some of it may be useable, but you'd have to get your hands dirty and search.")
else:
print("I'm sorry, please try a different command.")
# Room 4
if room == 4:
print("What do you want to do?")
action = input("> ")
if action == "look around":
if rat_alive == True:
print("You spot a giant rat!")
print("Or rather, he spots you!")
print("Why, oh WHY does EVERY MMORPG start you off killing rats for experience!?")
print("This rat has", rat_health, "health")
else:
print("With the rat dead, you see he was guarding something shiny")
print("Inspect shiny?")
action = input("> ")
if action == "yes":
print("It's a plate of armor!")
print("Take with you?")
action = input("> ")
if action == "yes":
print("You take the armor with you!")
armor = True
defense += 20
else:
print("You leave the armor be")
else:
print("You leave shiny be. Could have been something heavy anyway.")
elif action == "attack rat":
print("You strike at the rat!")
if sword == True:
print("In one mighty blow, you take off it's head!")
rat_alive = False
rat_health = 0
print("You get the sense that the rat was guarding something.")
else:
print("You hit the rat with your fist! The rat takes damage!")
rat_health -= 5
if rat_health == 0:
print("The rat is dead!")
rat_alive = False
print("You get the sense that the rat was guarding something.")
else:
print("The rat fights back! Gnawing at your injured foot!")
health -= 5
if health <= 0:
print("You have died!")
print("---GAME OVER---")
end = True
elif action == "inspect rat":
print("The rat's health is", rat_health)
elif action == "stats":
print("Health:", health, "Attack:", weapon, "Defense:", defense)
elif action == "move back":
if rat_alive == True:
print("You start to move back, but the rat prevents escape and attacks!")
health -=10
if health <= 0:
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("You move back.")
room = 1
else:
print("Sorry, please try a different command.")
#Room 5
if room == 5:
print("What would you like to do?")
action = input("> ")
if action == "take emerald":
if emerald == True:
print("There is no emerald, you took it already.")
print("Greedy.")
else:
if rope_cut == True:
print("You easily take the emerald you left here earlier")
emerald = True
else:
print("You attempt to take the emerald, but find that it pulls a rope in the ground.")
print("The floor gives way and you fall down an endless pit")
print("Congratulations, you now have eternity to think on how your greed was the end of you!")
print("---GAME OVER---")
end = True
elif action == "inspect emerald":
if emerald == True:
print("There is no emerald, you took it already.")
print("You're just full of greed, aren't ya?")
else:
if rope_cut == True:
print("What's there to inspect? It's just the emerald you left here earlier.")
else:
print("Upon inspection, you see that there is a rope attatched to the bottom of the emerald.")
print("Try to cut rope?")
answer = input("(yes/no) ")
if answer == "yes":
if sword == True:
print("You take your sword and Cut the Rope! The emerald is free!")
rope_cut = True
print("Take emerald?")
choice = input("(yes/no) ")
if choice == "no":
print("Well, that seemed rather pointless then.")
elif choice == "yes":
print("You take the emerald with you.")
emerald = True
else:
print("How difficult is it to type in 'yes' or 'no'?")
else:
print("You try with all your might, but you can't Cut the Rope!")
elif answer == "no":
print("Alright....you leave it as it is.")
else:
print("Your options are 'yes' and 'no'. No more have been coded in")
elif action == "move back":
print("You move back")
room = 1
elif action== "stats":
print("Health:", health, "Attack", attack, "Defense", defense)
else:
print("Sorry, try another command.")
#Room 6
if room == 6:
print("What do you want to do?")
action = input("> ")
if action == "attack goblin":
if goblin1_alive == False:
if goblin2_alive == False:
print("There are no more goblins alive.")
print("You killed them both.")
goblins_alive = 0
room = 7
else:
print("You attack the second goblin!")
room = 9
elif goblin2_alive == False:
if goblin1_alive == False:
print("There are no more goblins alive.")
print("You killed them both.")
goblins_alive = 0
room = 7
else:
print("You attack the first goblin!")
room = 10
else:
print("Which Goblin?")
choice = input("1/2: ")
if choice == "1":
print("Attack Goblin", choice, "with what?")
choice2 = input("shield/weapon: ")
if choice2 == "shield":
if shield == True:
print("You bash the first goblin with your shield!")
print("Goblin", choice, "takes 1 damage!")
goblin1_health -=1
print("Goblin", choice, "is stunned!")
goblin1_stunned = True
if goblin2_alive == True:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
goblin2_stunned = False
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("Get ready for the next round!")
else:
print("You don't have the shield!")
elif choice2 == "weapon":
print("You take your weapon and strike at the first goblin!")
goblin1_health -= weapon
if goblin1_health <= 0:
print("The first goblin died!")
goblin1_alive = False
if goblin2_alive == False:
print("You've killed both goblins!")
goblins_alive = 0
room = 7
else:
goblins_alive = 1
elif goblin1_stunned == False:
print("The goblin fights back!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin2_alive == False:
print("Get ready for the next round!")
else:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin2_alive == False:
print("Get ready for the next round!")
else:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin2_alive == False:
print("Get ready for the next round!")
else:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin2_alive == False:
print("Get ready for the next round!")
else:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
goblin1_stunned = False
if goblin2_alive == False:
print("Get ready for the next round!")
else:
if goblin2_stunned == True:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
else:
print("The second goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif choice == "2":
print("Attack Goblin", choice, "with what?")
choice2 = input("shield/weapon: ")
if choice2 == "shield":
if shield == True:
print("You bash the second goblin with your shield!")
print("Goblin", choice, "takes 1 damage!")
goblin2_health -=1
print("Goblin", choice, "is stunned!")
goblin2_stunned = True
if goblin1_alive == True:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
goblin1_stunned = False
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("Get ready for the next round!")
else:
print("You don't have the shield!")
elif choice2 == "weapon":
print("You take your weapon and strike at the second goblin!")
goblin2_health -= weapon
if goblin2_health <= 0:
print("The second goblin died!")
goblin2_alive = False
if goblin1_alive == False:
print("You've killed both goblins!")
goblins_alive = 0
room = 7
else:
goblins_alive = 1
elif goblin2_stunned == False:
print("The goblin fights back!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin1_alive == False:
print("Get ready for the next round!")
else:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin1_alive == False:
print("Get ready for the next round!")
else:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin1_alive == False:
print("Get ready for the next round!")
else:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin1_alive == False:
print("Get ready for the next round!")
else:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The second goblin skips out on attacking and regains control!")
print("The second goblin is no longer stunned!")
goblin2_stunned = False
if goblin1_alive == False:
print("Get ready for the next round!")
else:
if goblin1_stunned == True:
print("The first goblin skips out on attacking and regains control!")
print("The first goblin is no longer stunned!")
else:
print("The first goblin attacks!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("Sorry, please type 'weapon' or 'shield'")
else:
print("Sorry, please type '1' or '2'")
elif action == "inspect goblin":
print("Which goblin?")
choice = input("1/2 ")
if choice == "1":
print("The first goblin's health is:", goblin1_health)
elif choice == "2":
print("The second goblin's health is:", goblin2_health)
else:
print("Sorry, please choose '1' or '2'")
elif action == "stats":
print("Health:", health, "Attack:", weapon, "Defense:", defense)
elif action == "inspect goblin 1":
print("The first goblin's health is:", goblin1_health)
elif action == "inspect goblin 2":
print("The second goblin's health is:", goblin2_health)
elif action == "bribe goblins":
if emerald == True:
print("You wave your pretty emerald in front of their greedy little faces, they are entranced by it's beauty.")
print("Throw emerald at them?")
action2 = input("(yes/no) ")
if action2 == "yes":
print("You throw the emerald at them and they immediately jump for it!")
print("One of them grabs it and the other starts fighting for it.")
print("It seems whoever has the emerald tries running from the other.")
print("Soon enough, both goblins are gone, and the fighting continues elsewhere.")
print("You are safe, and both goblins are gone!")
goblins_alive = 0
goblin1_alive = False
goblin2_alive = False
emerald = False
emerald_bribe = True
room = 7
elif action2 == "no":
print("Turns out, flashing the emerald at them wasn't such a good idea.")
print("They jump for it, scratching, clawing, biting, and slicing at you until it's in their grasp.")
print("Through their determination, speed, and the fact that they are both on you, you are unable to attack or push them back.")
print("They soon realize that you are the one holding the emerald up and slice your throat.")
print("With you dead, the goblins take the emerald and fight over it themselves, running from one another when it is in their grasp.")
print("The goblins have killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("I don't know what you typed in...the coder didn't put any other options, sooo...")
else:
print("You don't have anything they want!")
elif action == "look around":
print("What do you want? There are goblins in front of you! Nothing to aid your combat either!")
elif action == "move back":
print("You can't escape!")
else:
print("You don't have any other options, just follow the script.")
#Room 7
if room == 7:
if emerald_bribe == True:
print("Well, you survived the cave, got a cool sword, bribed the goblins, and are here alive!")
print("But is that really all there is to this game? Is there any more? What happens next?")
print("Just as you ask yourself these questions, you hear leaves rustling in the distance.")
print("Investigate?")
action = input("(yes/no) ")
if action == "yes":
print("You move forward to investigate...")
room = 8
elif action == "no":
print("Okay....there's nothing left to do here...soo.....The end I guess?")
print("You decide to stand around for eternity!")
print("Turns out that you need things like food, water, movement, etc. to live!")
print("You have died!")
print("---GAME OVER---")
end = True
elif emerald == True:
print("Well, you survived the cave, got a shiny emerald, defeated two goblins, and are here alive!")
print("But is that really all there is to this game? Is there any more? What happens next?")
print("Just as you ask yourself these questions, you hear leaves rustling in the distance.")
print("Investigate?")
action = input("(yes/no) ")
if action == "yes":
print("You move forward to investigate...")
room = 8
elif action == "no":
print("Okay....there's nothing left to do here...soo.....The end I guess?")
print("You decide to stand around for eternity!")
print("Turns out that you need things like food, water, movement, etc. to live!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("Well, you survived the cave, defeated two goblins, and are here alive!")
print("But is that really all there is to this game? Is there any more? What happens next?")
print("Just as you ask yourself these questions, you hear leaves rustling in the distance.")
print("Investigate?")
action = input("(yes/no) ")
if action == "yes":
print("You move forward to investigate...")
room = 8
elif action == "no":
print("Okay....there's nothing left to do here...soo.....The end I guess?")
print("You decide to stand around for eternity!")
print("Turns out that you need things like food, water, movement, etc. to live!")
print("You have died!")
print("---GAME OVER---")
end = True
#Room 8 Final room
if room == 8:
print("The path goes on and it gets brighter and brighter. Before long, you have to shield your eyes.")
print("By the time the path stops you can barely see anything. Suddenly, all the lights turn dim enough for you to be able to see.")
print("It takes a few moments, of course, for your eyes to adjust. When they do, you finally see that...")
print("before you stands a creature of Light! It is completely white, save for it's golden-yellow eyes!")
print("It takes you a few moments more to realize that this creature looks exactly like you!")
print("Save for the fact that it's made out of Light, of course.")
print("It simply stares menacingly at you. Almost as if expecting you to do something.")
print("Provoke the creature?")
answer = input("(yes/no) ")
if answer == "yes":
print("Immediately, you are lifted in the air; your Light doppleganger simply stares at you as you flail around.")
print("Suddenly, everything ends. The last thing you hear is a deep male voice coming from behind you, saying only...")
print("'You shouldn't have done that.'")
print("---GAME OVER---")
end = True
elif answer == "no":
print("You decide against attacking the Light creature; it /is/ quite the good looking person, if you do say so yourself.")
print("A deep male voice speaks from behind you; saying 'You are very wise,", player,"'")
print("Turning around, you see a tall, pale man dressed in a formal, victorian era, suit, wearing his overcoat like a cape.")
print("The man speaks: 'The final battle with the goblins already got this poorly-coded game past 1000 lines of code.'")
print("'If The Creator had simply known what he was doing, we may have had a good run, this game.'")
print("'But there is still a chance that The Creator will make another, more decently coded, game...'")
print("'Should that occur, we may meet again. Until then...consider this game 'won' '")
print("The man lifts his arm, saying: 'Congratulations,", player,"'")
print("The man snaps his fingers. Both him and your doppleganger vanish!")
print("Congratulations! You have beaten the game!!")
print("---GAME OVER---")
end = True
else:
print("A deep male voice comes from behind you, saying 'I will take none of your nonsense!'")
print("Suddenly, everything stops.")
print("The last thing you hear is that voice, saying 'You shouldn't have done that!'")
print("You have died!")
print("---GAME OVER---")
end = True
#Room 9 (goblin 2 attack)
if room == 9:
print("Attack the second goblin with what?")
choice2 = input("shield/weapon: ")
if choice2 == "shield":
if shield == True:
print("You bash the second goblin with your shield!")
print("Goblin 2 takes 1 damage!")
goblin2_health -=1
print("Goblin 2 is stunned!")
goblin2_stunned = True
else:
print("You don't have the shield!")
elif choice2 == "weapon":
print("You take your weapon and strike at the second goblin!")
goblin2_health -= weapon
if goblin2_health <= 0:
print("The second goblin died!")
goblin2_alive = False
if goblin1_alive == False:
print("You've killed both goblins!")
goblins_alive = 0
room = 7
else:
goblins_alive = 1
elif goblin2_stunned == False:
print("The goblin fights back!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin2_stunned == True:
print("The goblin skips out on attacking and regains control!")
print("The goblin is no longer stunned!")
goblin2_stunned = False
else:
print("Sorry, please type 'weapon' or 'shield'")
#Room 10 (goblin 1 attack)
if room == 10:
print("Attack the first goblin with what?")
choice2 = input("shield/weapon: ")
if choice2 == "shield":
if shield == True:
print("You bash the first goblin with your shield!")
print("Goblin 1 takes 1 damage!")
goblin1_health -=1
print("Goblin 1 is stunned!")
goblin1_stunned = True
else:
print("You don't have the shield!")
elif choice2 == "weapon":
print("You take your weapon and strike at the first goblin!")
goblin1_health -= weapon
if goblin1_health <= 0:
print("The first goblin died!")
goblin1_alive = False
if goblin2_alive == False:
print("You've killed both goblins!")
goblins_alive = 0
room = 7
else:
goblins_alive = 1
elif goblin1_stunned == False:
print("The goblin fights back!")
if defense == 30:
print("The goblin hits you for 10 damage!")
health -=10
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 20:
print("The goblin hits you for 20 damage!")
health -= 20
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif defense == 10:
print("The goblin hits you for 30 damage!")
health -= 30
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
else:
print("The goblin hits you full force!")
health -= 40
if health <= 0:
print("The goblin killed you!")
print("You have died!")
print("---GAME OVER---")
end = True
elif goblin1_stunned == True:
print("The goblin skips out on attacking and regains control!")
print("The goblin is no longer stunned!")
goblin1_stunned = False
else:
print("Sorry, please type 'weapon' or 'shield'")
| CoderLune/First-projects | dungeon.py | Python | gpl-3.0 | 50,140 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ********************************************************************************
# Copyright © 2019 jianglin
# File Name: __init__.py
# Author: jianglin
# Email: [email protected]
# Created: 2019-05-13 16:36:40 (CST)
# Last Update: Wednesday 2019-07-10 19:22:40 (CST)
# By:
# Description:
# ********************************************************************************
from flask import Blueprint
from maple.extension import csrf
from maple.utils import lazyconf
from . import api, config
from .router import FileShowView
site = Blueprint('storage', __name__)
site.add_url_rule(
"/",
defaults={"filename": "default/index.html"},
view_func=FileShowView.as_view("index"),
)
site.add_url_rule(
"/<path:filename>",
view_func=FileShowView.as_view("show"),
)
def init_app(app):
lazyconf(app, config, "STORAGE")
csrf.exempt(site)
api.init_api(site)
app.register_blueprint(site, subdomain=config.SUBDOMAIN)
| honmaple/maple-blog | maple/storage/__init__.py | Python | gpl-3.0 | 1,001 |
# Visualisation tools
#
# Copyright (C) Andrew Bartlett 2015, 2018
#
# by Douglas Bagnall <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import os
import sys
from collections import defaultdict
import tempfile
import samba
import samba.getopt as options
from samba.netcmd import Command, SuperCommand, CommandError, Option
from samba.samdb import SamDB
from samba.graph import dot_graph
from samba.graph import distance_matrix, COLOUR_SETS
from ldb import SCOPE_BASE, SCOPE_SUBTREE, LdbError
import time
from samba.kcc import KCC
from samba.kcc.kcc_utils import KCCError
from samba.compat import text_type
COMMON_OPTIONS = [
Option("-H", "--URL", help="LDB URL for database or target server",
type=str, metavar="URL", dest="H"),
Option("-o", "--output", help="write here (default stdout)",
type=str, metavar="FILE", default=None),
Option("--dot", help="Graphviz dot output", dest='format',
const='dot', action='store_const'),
Option("--distance", help="Distance matrix graph output (default)",
dest='format', const='distance', action='store_const'),
Option("--utf8", help="Use utf-8 Unicode characters",
action='store_true'),
Option("--color", help="use color (yes, no, auto)",
choices=['yes', 'no', 'auto']),
Option("--color-scheme", help=("use this colour scheme "
"(implies --color=yes)"),
choices=list(COLOUR_SETS.keys())),
Option("-S", "--shorten-names",
help="don't print long common suffixes",
action='store_true', default=False),
Option("-r", "--talk-to-remote", help="query other DCs' databases",
action='store_true', default=False),
Option("--no-key", help="omit the explanatory key",
action='store_false', default=True, dest='key'),
]
TEMP_FILE = '__temp__'
class GraphCommand(Command):
"""Base class for graphing commands"""
synopsis = "%prog [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = COMMON_OPTIONS
takes_args = ()
def get_db(self, H, sambaopts, credopts):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
samdb = SamDB(url=H, credentials=creds, lp=lp)
return samdb
def get_kcc_and_dsas(self, H, lp, creds):
"""Get a readonly KCC object and the list of DSAs it knows about."""
unix_now = int(time.time())
kcc = KCC(unix_now, readonly=True)
kcc.load_samdb(H, lp, creds)
dsa_list = kcc.list_dsas()
dsas = set(dsa_list)
if len(dsas) != len(dsa_list):
print("There seem to be duplicate dsas", file=sys.stderr)
return kcc, dsas
def write(self, s, fn=None, suffix='.dot'):
"""Decide whether we're dealing with a filename, a tempfile, or
stdout, and write accordingly.
:param s: the string to write
:param fn: a destination
:param suffix: suffix, if destination is a tempfile
If fn is None or "-", write to stdout.
If fn is visualize.TEMP_FILE, write to a temporary file
Otherwise fn should be a filename to write to.
"""
if fn is None or fn == '-':
# we're just using stdout (a.k.a self.outf)
print(s, file=self.outf)
return
if fn is TEMP_FILE:
fd, fn = tempfile.mkstemp(prefix='samba-tool-visualise',
suffix=suffix)
f = open(fn, 'w')
os.close(fd)
else:
f = open(fn, 'w')
f.write(s)
f.close()
return fn
def calc_output_format(self, format, output):
"""Heuristics to work out what output format was wanted."""
if not format:
# They told us nothing! We have to work it out for ourselves.
if output and output.lower().endswith('.dot'):
return 'dot'
else:
return 'distance'
return format
def calc_distance_color_scheme(self, color, color_scheme, output):
"""Heuristics to work out the colour scheme for distance matrices.
Returning None means no colour, otherwise it sould be a colour
from graph.COLOUR_SETS"""
if color == 'no':
return None
if color == 'auto':
if isinstance(output, str) and output != '-':
return None
if not hasattr(self.outf, 'isatty'):
# not a real file, perhaps cStringIO in testing
return None
if not self.outf.isatty():
return None
if color_scheme is None:
if '256color' in os.environ.get('TERM', ''):
return 'xterm-256color-heatmap'
return 'ansi'
return color_scheme
def colour_hash(x):
"""Generate a randomish but consistent darkish colour based on the
given object."""
from hashlib import md5
tmp_str = str(x)
if isinstance(tmp_str, text_type):
tmp_str = tmp_str.encode('utf8')
c = int(md5(tmp_str).hexdigest()[:6], base=16) & 0x7f7f7f
return '#%06x' % c
def get_partition_maps(samdb):
"""Generate dictionaries mapping short partition names to the
appropriate DNs."""
base_dn = samdb.domain_dn()
short_to_long = {
"DOMAIN": base_dn,
"CONFIGURATION": str(samdb.get_config_basedn()),
"SCHEMA": "CN=Schema,%s" % samdb.get_config_basedn(),
"DNSDOMAIN": "DC=DomainDnsZones,%s" % base_dn,
"DNSFOREST": "DC=ForestDnsZones,%s" % base_dn
}
long_to_short = {}
for s, l in short_to_long.items():
long_to_short[l] = s
return short_to_long, long_to_short
class cmd_reps(GraphCommand):
"repsFrom/repsTo from every DSA"
takes_options = COMMON_OPTIONS + [
Option("-p", "--partition", help="restrict to this partition",
default=None),
]
def run(self, H=None, output=None, shorten_names=False,
key=True, talk_to_remote=False,
sambaopts=None, credopts=None, versionopts=None,
mode='self', partition=None, color=None, color_scheme=None,
utf8=None, format=None):
# We use the KCC libraries in readonly mode to get the
# replication graph.
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
local_kcc, dsas = self.get_kcc_and_dsas(H, lp, creds)
unix_now = local_kcc.unix_now
# Allow people to say "--partition=DOMAIN" rather than
# "--partition=DC=blah,DC=..."
short_partitions, long_partitions = get_partition_maps(local_kcc.samdb)
if partition is not None:
partition = short_partitions.get(partition.upper(), partition)
if partition not in long_partitions:
raise CommandError("unknown partition %s" % partition)
# nc_reps is an autovivifying dictionary of dictionaries of lists.
# nc_reps[partition]['current' | 'needed'] is a list of
# (dsa dn string, repsFromTo object) pairs.
nc_reps = defaultdict(lambda: defaultdict(list))
guid_to_dnstr = {}
# We run a new KCC for each DSA even if we aren't talking to
# the remote, because after kcc.run (or kcc.list_dsas) the kcc
# ends up in a messy state.
for dsa_dn in dsas:
kcc = KCC(unix_now, readonly=True)
if talk_to_remote:
res = local_kcc.samdb.search(dsa_dn,
scope=SCOPE_BASE,
attrs=["dNSHostName"])
dns_name = res[0]["dNSHostName"][0]
print("Attempting to contact ldap://%s (%s)" %
(dns_name, dsa_dn),
file=sys.stderr)
try:
kcc.load_samdb("ldap://%s" % dns_name, lp, creds)
except KCCError as e:
print("Could not contact ldap://%s (%s)" % (dns_name, e),
file=sys.stderr)
continue
kcc.run(H, lp, creds)
else:
kcc.load_samdb(H, lp, creds)
kcc.run(H, lp, creds, forced_local_dsa=dsa_dn)
dsas_from_here = set(kcc.list_dsas())
if dsas != dsas_from_here:
print("found extra DSAs:", file=sys.stderr)
for dsa in (dsas_from_here - dsas):
print(" %s" % dsa, file=sys.stderr)
print("missing DSAs (known locally, not by %s):" % dsa_dn,
file=sys.stderr)
for dsa in (dsas - dsas_from_here):
print(" %s" % dsa, file=sys.stderr)
for remote_dn in dsas_from_here:
if mode == 'others' and remote_dn == dsa_dn:
continue
elif mode == 'self' and remote_dn != dsa_dn:
continue
remote_dsa = kcc.get_dsa('CN=NTDS Settings,' + remote_dn)
kcc.translate_ntdsconn(remote_dsa)
guid_to_dnstr[str(remote_dsa.dsa_guid)] = remote_dn
# get_reps_tables() returns two dictionaries mapping
# dns to NCReplica objects
c, n = remote_dsa.get_rep_tables()
for part, rep in c.items():
if partition is None or part == partition:
nc_reps[part]['current'].append((dsa_dn, rep))
for part, rep in n.items():
if partition is None or part == partition:
nc_reps[part]['needed'].append((dsa_dn, rep))
all_edges = {'needed': {'to': [], 'from': []},
'current': {'to': [], 'from': []}}
for partname, part in nc_reps.items():
for state, edgelists in all_edges.items():
for dsa_dn, rep in part[state]:
short_name = long_partitions.get(partname, partname)
for r in rep.rep_repsFrom:
edgelists['from'].append(
(dsa_dn,
guid_to_dnstr[str(r.source_dsa_obj_guid)],
short_name))
for r in rep.rep_repsTo:
edgelists['to'].append(
(guid_to_dnstr[str(r.source_dsa_obj_guid)],
dsa_dn,
short_name))
# Here we have the set of edges. From now it is a matter of
# interpretation and presentation.
if self.calc_output_format(format, output) == 'distance':
color_scheme = self.calc_distance_color_scheme(color,
color_scheme,
output)
header_strings = {
'from': "RepsFrom objects for %s",
'to': "RepsTo objects for %s",
}
for state, edgelists in all_edges.items():
for direction, items in edgelists.items():
part_edges = defaultdict(list)
for src, dest, part in items:
part_edges[part].append((src, dest))
for part, edges in part_edges.items():
s = distance_matrix(None, edges,
utf8=utf8,
colour=color_scheme,
shorten_names=shorten_names,
generate_key=key)
s = "\n%s\n%s" % (header_strings[direction] % part, s)
self.write(s, output)
return
edge_colours = []
edge_styles = []
dot_edges = []
dot_vertices = set()
used_colours = {}
key_set = set()
for state, edgelist in all_edges.items():
for direction, items in edgelist.items():
for src, dest, part in items:
colour = used_colours.setdefault((part),
colour_hash((part,
direction)))
linestyle = 'dotted' if state == 'needed' else 'solid'
arrow = 'open' if direction == 'to' else 'empty'
dot_vertices.add(src)
dot_vertices.add(dest)
dot_edges.append((src, dest))
edge_colours.append(colour)
style = 'style="%s"; arrowhead=%s' % (linestyle, arrow)
edge_styles.append(style)
key_set.add((part, 'reps' + direction.title(),
colour, style))
key_items = []
if key:
for part, direction, colour, linestyle in sorted(key_set):
key_items.append((False,
'color="%s"; %s' % (colour, linestyle),
"%s %s" % (part, direction)))
key_items.append((False,
'style="dotted"; arrowhead="open"',
"repsFromTo is needed"))
key_items.append((False,
'style="solid"; arrowhead="open"',
"repsFromTo currently exists"))
s = dot_graph(dot_vertices, dot_edges,
directed=True,
edge_colors=edge_colours,
edge_styles=edge_styles,
shorten_names=shorten_names,
key_items=key_items)
self.write(s, output)
class NTDSConn(object):
"""Collects observation counts for NTDS connections, so we know
whether all DSAs agree."""
def __init__(self, src, dest):
self.observations = 0
self.src_attests = False
self.dest_attests = False
self.src = src
self.dest = dest
def attest(self, attester):
self.observations += 1
if attester == self.src:
self.src_attests = True
if attester == self.dest:
self.dest_attests = True
class cmd_ntdsconn(GraphCommand):
"Draw the NTDSConnection graph"
def run(self, H=None, output=None, shorten_names=False,
key=True, talk_to_remote=False,
sambaopts=None, credopts=None, versionopts=None,
color=None, color_scheme=None,
utf8=None, format=None):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
local_kcc, dsas = self.get_kcc_and_dsas(H, lp, creds)
local_dsa_dn = local_kcc.my_dsa_dnstr.split(',', 1)[1]
vertices = set()
attested_edges = []
for dsa_dn in dsas:
if talk_to_remote:
res = local_kcc.samdb.search(dsa_dn,
scope=SCOPE_BASE,
attrs=["dNSHostName"])
dns_name = res[0]["dNSHostName"][0]
try:
samdb = self.get_db("ldap://%s" % dns_name, sambaopts,
credopts)
except LdbError as e:
print("Could not contact ldap://%s (%s)" % (dns_name, e),
file=sys.stderr)
continue
ntds_dn = samdb.get_dsServiceName()
dn = samdb.domain_dn()
else:
samdb = self.get_db(H, sambaopts, credopts)
ntds_dn = 'CN=NTDS Settings,' + dsa_dn
dn = dsa_dn
vertices.add(ntds_dn)
# XXX we could also look at schedule
res = samdb.search(dn,
scope=SCOPE_SUBTREE,
expression="(objectClass=nTDSConnection)",
attrs=['fromServer'],
# XXX can't be critical for ldif test
#controls=["search_options:1:2"],
controls=["search_options:0:2"],
)
for msg in res:
msgdn = str(msg.dn)
dest_dn = msgdn[msgdn.index(',') + 1:]
attested_edges.append((msg['fromServer'][0],
dest_dn, ntds_dn))
# now we overlay all the graphs and generate styles accordingly
edges = {}
for src, dest, attester in attested_edges:
k = (src, dest)
if k in edges:
e = edges[k]
else:
e = NTDSConn(*k)
edges[k] = e
e.attest(attester)
if self.calc_output_format(format, output) == 'distance':
color_scheme = self.calc_distance_color_scheme(color,
color_scheme,
output)
if not talk_to_remote:
# If we are not talking to remote servers, we list all
# the connections.
graph_edges = edges.keys()
title = 'NTDS Connections known to %s' % local_dsa_dn
epilog = ''
else:
# If we are talking to the remotes, there are
# interesting cases we can discover. What matters most
# is that the destination (i.e. owner) knowns about
# the connection, but it would be worth noting if the
# source doesn't. Another strange situation could be
# when a DC thinks there is a connection elsewhere,
# but the computers allegedly involved don't believe
# it exists.
#
# With limited bandwidth in the table, we mark the
# edges known to the destination, and note the other
# cases in a list after the diagram.
graph_edges = []
source_denies = []
dest_denies = []
both_deny = []
for e, conn in edges.items():
if conn.dest_attests:
graph_edges.append(e)
if not conn.src_attests:
source_denies.append(e)
elif conn.src_attests:
dest_denies.append(e)
else:
both_deny.append(e)
title = 'NTDS Connections known to each destination DC'
epilog = []
if both_deny:
epilog.append('The following connections are alleged by '
'DCs other than the source and '
'destination:\n')
for e in both_deny:
epilog.append(' %s -> %s\n' % e)
if dest_denies:
epilog.append('The following connections are alleged by '
'DCs other than the destination but '
'including the source:\n')
for e in dest_denies:
epilog.append(' %s -> %s\n' % e)
if source_denies:
epilog.append('The following connections '
'(included in the chart) '
'are not known to the source DC:\n')
for e in source_denies:
epilog.append(' %s -> %s\n' % e)
epilog = ''.join(epilog)
s = distance_matrix(sorted(vertices), graph_edges,
utf8=utf8,
colour=color_scheme,
shorten_names=shorten_names,
generate_key=key)
self.write('\n%s\n%s\n%s' % (title, s, epilog), output)
return
dot_edges = []
edge_colours = []
edge_styles = []
edge_labels = []
n_servers = len(dsas)
for k, e in sorted(edges.items()):
dot_edges.append(k)
if e.observations == n_servers or not talk_to_remote:
edge_colours.append('#000000')
edge_styles.append('')
elif e.dest_attests:
edge_styles.append('')
if e.src_attests:
edge_colours.append('#0000ff')
else:
edge_colours.append('#cc00ff')
elif e.src_attests:
edge_colours.append('#ff0000')
edge_styles.append('style=dashed')
else:
edge_colours.append('#ff0000')
edge_styles.append('style=dotted')
key_items = []
if key:
key_items.append((False,
'color="#000000"',
"NTDS Connection"))
for colour, desc in (('#0000ff', "missing from some DCs"),
('#cc00ff', "missing from source DC")):
if colour in edge_colours:
key_items.append((False, 'color="%s"' % colour, desc))
for style, desc in (('style=dashed', "unknown to destination"),
('style=dotted',
"unknown to source and destination")):
if style in edge_styles:
key_items.append((False,
'color="#ff0000; %s"' % style,
desc))
if talk_to_remote:
title = 'NTDS Connections'
else:
title = 'NTDS Connections known to %s' % local_dsa_dn
s = dot_graph(sorted(vertices), dot_edges,
directed=True,
title=title,
edge_colors=edge_colours,
edge_labels=edge_labels,
edge_styles=edge_styles,
shorten_names=shorten_names,
key_items=key_items)
self.write(s, output)
class cmd_visualize(SuperCommand):
"""Produces graphical representations of Samba network state"""
subcommands = {}
for k, v in globals().items():
if k.startswith('cmd_'):
subcommands[k[4:]] = v()
| sathieu/samba | python/samba/netcmd/visualize.py | Python | gpl-3.0 | 23,538 |
# =======================================================================
# Copyright 2013 Christos Sioutis <[email protected]>
# =======================================================================
# This file is part of indicator-internode.
#
# indicator-internode is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# indicator-internode is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# Licensse along with indicator-internode.
# If not, see <http://www.gnu.org/licenses/>.
# =======================================================================
import gconf
import appindicator as appi
import gtk
GCONF_ROOT = "/apps"
class IndicatorBase(appi.Indicator):
def __init__(self,strAppId, strIconName, strIconThemePath):
self.ind = appi.Indicator(strAppId, strIconName, appi.CATEGORY_APPLICATION_STATUS, strIconThemePath)
self.gc = gconf.client_get_default()
self.ind.set_status (appi.STATUS_ACTIVE)
self.menus = {"root":gtk.Menu()}
self.labels = {}
self.cfgpath = GCONF_ROOT + "/" + strAppId + "/"
def finalize_menu(self):
self.ind.set_menu(self.menus["root"])
def add_submenu(self,strParent,strLabel):
item = gtk.MenuItem(strLabel)
submenu = gtk.Menu()
item.set_submenu(submenu)
self.menus[strLabel] = submenu
self.menus[strParent].append(item)
item.show()
return submenu
def add_btn_menuitem(self,strParent,strLabel):
item = gtk.MenuItem(strLabel)
self.menus[strParent].append(item)
item.connect("activate",self.on_btn_menuitem_activated,strLabel)
item.show()
return item
def on_btn_menuitem_activated(self,gtkMenuItem,strSelection):
print "IndicatorBase.on_cmd_menuitem_activated selection="+strSelection
def add_chk_menuitem(self,strParent,strLabel,boolActive):
item = gtk.CheckMenuItem(strLabel)
if self.get_config(strLabel) == None:
item.set_active(boolActive)
self.set_config(strLabel,str(boolActive))
else:
item.set_active(self.get_config_bool(strLabel))
self.menus[strParent].append(item)
item.connect("toggled",self.on_chk_menuitem_toggled,strLabel)
item.show()
return item
def on_chk_menuitem_toggled(self,gtkMenuItem,strSelection):
self.set_config(strSelection, str(gtkMenuItem.get_active()))
def add_separator(self,strParent):
separator = gtk.SeparatorMenuItem()
separator.show()
self.menus[strParent].append(separator)
def add_lbl_menuitem(self,strParent,strID,strLabel):
item = gtk.MenuItem()
lbl = gtk.Label(strLabel)
self.labels[strID] = lbl
item.add(lbl)
item.show()
self.menus[strParent].append(item)
return item
def set_lbl_menuitem(self,strID,strLabel):
self.labels[strID].set_text(strLabel)
def set_lbl_main(self,strLabel):
self.ind.set_label(strLabel)
def set_icn_main(self,strIconPath):
self.ind.set_icon(strIconPath)
def set_config(self,strKey,strValue):
return self.gc.set_string(str(self.cfgpath)+strKey,strValue)
def get_config(self,strKey):
return self.gc.get_string(str(self.cfgpath)+strKey)
def get_config_bool(self,strKey):
val = self.get_config(strKey)
if val == "True":
return True;
return False
| sioutisc/indicator-internode | src/pynode/indicatorbase.py | Python | gpl-3.0 | 3,516 |
""" DIRAC FileCatalog Security Manager mix-in class
"""
__RCSID__ = "$Id$"
import os
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Security.Properties import FC_MANAGEMENT
_readMethods = ['exists', 'isFile', 'getFileSize', 'getFileMetadata',
'getReplicas','getReplicaStatus','getFileAncestors',
'getFileDescendents','listDirectory','isDirectory',
'getDirectoryReplicas', 'getDirectorySize', 'getDirectoryMetadata']
_writeMethods = ['changePathOwner', 'changePathGroup', 'changePathMode',
'addFile','setFileStatus','removeFile','addReplica',
'removeReplica','setReplicaStatus','setReplicaHost',
'setFileOwner','setFileGroup','setFileMode',
'addFileAncestors','createDirectory','removeDirectory',
'setMetadata','__removeMetadata']
class SecurityManagerBase( object ):
def __init__( self, database = None ):
self.db = database
def setDatabase( self, database ):
self.db = database
def getPathPermissions( self, paths, credDict ):
""" Get path permissions according to the policy
"""
return S_ERROR( 'The getPathPermissions method must be implemented in the inheriting class' )
def hasAccess(self,opType,paths,credDict):
# Map the method name to Read/Write
if opType in _readMethods:
opType = 'Read'
elif opType in _writeMethods:
opType = 'Write'
# Check if admin access is granted first
result = self.hasAdminAccess( credDict )
if not result['OK']:
return result
if result['Value']:
# We are admins, allow everything
permissions = {}
for path in paths:
permissions[path] = True
return S_OK( {'Successful':permissions, 'Failed':{}} )
successful = {}
failed = {}
if not opType.lower() in ['read', 'write', 'execute']:
return S_ERROR( "Operation type not known" )
if self.db.globalReadAccess and ( opType.lower() == 'read' ):
for path in paths:
successful[path] = True
resDict = {'Successful':successful, 'Failed':{}}
return S_OK( resDict )
result = self.getPathPermissions( paths, credDict )
if not result['OK']:
return result
permissions = result['Value']['Successful']
for path, permDict in permissions.items():
if permDict[opType]:
successful[path] = True
else:
successful[path] = False
failed.update( result['Value']['Failed'] )
resDict = {'Successful':successful, 'Failed':failed}
return S_OK( resDict )
def hasAdminAccess( self, credDict ):
if FC_MANAGEMENT in credDict['properties']:
return S_OK( True )
return S_OK( False )
class NoSecurityManager( SecurityManagerBase ):
def getPathPermissions( self, paths, credDict ):
""" Get path permissions according to the policy
"""
permissions = {}
for path in paths:
permissions[path] = {'Read':True, 'Write':True, 'Execute':True}
return S_OK( {'Successful':permissions, 'Failed':{}} )
def hasAccess( self, opType, paths, credDict ):
successful = {}
for path in paths:
successful[path] = True
resDict = {'Successful':successful, 'Failed':{}}
return S_OK( resDict )
def hasAdminAccess( self, credDict ):
return S_OK( True )
class DirectorySecurityManager( SecurityManagerBase ):
def getPathPermissions( self, paths, credDict ):
""" Get path permissions according to the policy
"""
toGet = dict( zip( paths, [ [path] for path in paths ] ) )
permissions = {}
failed = {}
while toGet:
res = self.db.dtree.getPathPermissions( toGet.keys(), credDict )
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop( path )
for path, error in res['Value']['Failed'].items():
if error != 'No such file or directory':
for resolvedPath in toGet[path]:
failed[resolvedPath] = error
toGet.pop( path )
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read':True, 'Write':True, 'Execute':True}
if not toGet.has_key( os.path.dirname( path ) ):
toGet[os.path.dirname( path )] = []
toGet[os.path.dirname( path )] += resolvedPaths
toGet.pop( path )
if self.db.globalReadAccess:
for path in permissions:
permissions[path]['Read'] = True
return S_OK( {'Successful':permissions, 'Failed':failed} )
class FullSecurityManager( SecurityManagerBase ):
def getPathPermissions( self, paths, credDict ):
""" Get path permissions according to the policy
"""
toGet = dict( zip( paths, [ [path] for path in paths ] ) )
permissions = {}
failed = {}
res = self.db.fileManager.getPathPermissions( paths, credDict )
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop( path )
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read':True, 'Write':True, 'Execute':True}
if not toGet.has_key( os.path.dirname( path ) ):
toGet[os.path.dirname( path )] = []
toGet[os.path.dirname( path )] += resolvedPaths
toGet.pop( path )
while toGet:
paths = toGet.keys()
res = self.db.dtree.getPathPermissions( paths, credDict )
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop( path )
for path, error in res['Value']['Failed'].items():
if error != 'No such file or directory':
for resolvedPath in toGet[path]:
failed[resolvedPath] = error
toGet.pop( path )
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read':True, 'Write':True, 'Execute':True}
if not toGet.has_key( os.path.dirname( path ) ):
toGet[os.path.dirname( path )] = []
toGet[os.path.dirname( path )] += resolvedPaths
toGet.pop( path )
if self.db.globalReadAccess:
for path in permissions:
permissions[path]['Read'] = True
return S_OK( {'Successful':permissions, 'Failed':failed} )
class DirectorySecurityManagerWithDelete( DirectorySecurityManager ):
""" This security manager implements a Delete operation.
For Read, Write, Execute, it's behavior is the one of DirectorySecurityManager.
For Delete, if the directory does not exist, we return True.
If the directory exists, then we test the Write permission
"""
def hasAccess( self, opType, paths, credDict ):
# The other SecurityManager do not support the Delete operation,
# and it is transformed in Write
# so we keep the original one
if opType in ['removeFile', 'removeReplica', 'removeDirectory']:
self.opType = 'Delete'
elif opType in _readMethods:
self.opType = 'Read'
elif opType in _writeMethods:
self.opType = 'Write'
res = super( DirectorySecurityManagerWithDelete, self ).hasAccess( opType, paths, credDict )
# We reinitialize self.opType in case someone would call getPathPermissions directly
self.opType = ''
return res
def getPathPermissions( self, paths, credDict ):
""" Get path permissions according to the policy
"""
# If we are testing in anything else than a Delete, just return the parent methods
if hasattr( self, 'opType' ) and self.opType.lower() != 'delete':
return super( DirectorySecurityManagerWithDelete, self ).getPathPermissions( paths, credDict )
# If the object (file or dir) does not exist, we grant the permission
res = self.db.dtree.exists( paths )
if not res['OK']:
return res
nonExistingDirectories = set( path for path in res['Value']['Successful'] if not res['Value']['Successful'][path] )
res = self.db.fileManager.exists( paths )
if not res['OK']:
return res
nonExistingFiles = set( path for path in res['Value']['Successful'] if not res['Value']['Successful'][path] )
nonExistingObjects = nonExistingDirectories & nonExistingFiles
permissions = {}
failed = {}
for path in nonExistingObjects:
permissions[path] = {'Read':True, 'Write':True, 'Execute':True}
# The try catch is just to protect in case there are duplicate in the paths
try:
paths.remove( path )
except Exception, _e:
pass
# For all the paths that exist, check the write permission
if paths:
res = super( DirectorySecurityManagerWithDelete, self ).getPathPermissions( paths, credDict )
if not res['OK']:
return res
failed = res['Value']['Failed']
permissions.update( res['Value']['Successful'] )
return S_OK( {'Successful':permissions, 'Failed':failed} )
class PolicyBasedSecurityManager( SecurityManagerBase ):
""" This security manager loads a python plugin and forwards the
calls to it. The python plugin has to be defined in the CS under
/Systems/DataManagement/YourSetup/FileCatalog/SecurityPolicy
"""
def __init__( self, database = False ):
super( PolicyBasedSecurityManager, self ).__init__( database )
from DIRAC.ConfigurationSystem.Client.PathFinder import getServiceSection
from DIRAC import gConfig
from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgPath
serviceSection = getServiceSection( 'DataManagement/FileCatalog' )
pluginPath = gConfig.getValue( cfgPath( serviceSection, 'SecurityPolicy' ) )
if not pluginPath:
raise Exception( "SecurityPolicy not defined in service options" )
pluginCls = self.__loadPlugin( pluginPath )
self.policyObj = pluginCls( database = database )
@staticmethod
def __loadPlugin( pluginPath ):
""" Create an instance of requested plugin class, loading and importing it when needed.
This function could raise ImportError when plugin cannot be found or TypeError when
loaded class object isn't inherited from SecurityManagerBase class.
:param str pluginName: dotted path to plugin, specified as in import statement, i.e.
"DIRAC.CheesShopSystem.private.Cheddar" or alternatively in 'normal' path format
"DIRAC/CheesShopSystem/private/Cheddar"
:return: object instance
This function try to load and instantiate an object from given path. It is assumed that:
- :pluginPath: is pointing to module directory "importable" by python interpreter, i.e.: it's
package's top level directory is in $PYTHONPATH env variable,
- the module should consist a class definition following module name,
- the class itself is inherited from SecurityManagerBase
If above conditions aren't meet, function is throwing exceptions:
- ImportError when class cannot be imported
- TypeError when class isn't inherited from SecurityManagerBase
"""
if "/" in pluginPath:
pluginPath = ".".join( [ chunk for chunk in pluginPath.split( "/" ) if chunk ] )
pluginName = pluginPath.split( "." )[-1]
if pluginName not in globals():
mod = __import__( pluginPath, globals(), fromlist = [ pluginName ] )
pluginClassObj = getattr( mod, pluginName )
else:
pluginClassObj = globals()[pluginName]
if not issubclass( pluginClassObj, SecurityManagerBase ):
raise TypeError( "Security policy '%s' isn't inherited from SecurityManagerBase class" % pluginName )
return pluginClassObj
def hasAccess( self, opType, paths, credDict ):
return self.policyObj.hasAccess( opType, paths, credDict )
def getPathPermissions( self, paths, credDict ):
return self.policyObj.getPathPermissions( paths, credDict )
| marcelovilaca/DIRAC | DataManagementSystem/DB/FileCatalogComponents/SecurityManager.py | Python | gpl-3.0 | 12,106 |
import asyncio
class MyProtocol(asyncio.Protocol):
def __init__(self, loop):
self.loop = loop
def connection_made(self, transport):
self.transport = transport
self.transport.write(b"Ciao!")
def data_received(self, data):
print(data.decode())
self.transport.close()
def connection_lost(self, exc):
self.loop.stop()
def main():
loop = asyncio.get_event_loop()
coro = loop.create_connection(lambda: MyProtocol(loop), "localhost", 8000)
loop.run_until_complete(coro)
loop.run_forever()
loop.close()
if __name__ == "__main__":
main()
| DavideCanton/Python3 | prove_aio/prova_client.py | Python | gpl-3.0 | 626 |
#!/usr/bin/python
"""Run a Machination update cycle
"""
import sys
import itertools
from machination import utils, xmltools, fetcher, hierarchy, context
import topsort
from lxml import etree
workers = {}
class OLWorker:
def __init__(self, wid):
self.wid = wid
self.progdir = context.config.xpath("/config/workers")[0].get("progdir")
for f in os.listdir(self.progdir):
if f == wid or f.startswith(wid + "."):
self.progfile = f
self.progpath = os.path.join(self.progdir, self.progfile)
break
if self.progfile == None:
raise Exception("No worker named " + wid)
def generate_status(self):
pass
def do_work(self, wus):
pass
def main(args):
# initialise from config
logger = context.logger
# get config assertions and compile into desired_status.xml
ca_list = hierarchy.fetch_calist()
dst_elt = hierarchy.compile_calist(ca_list)
cst_elt = generate_base_status()
# workers: generate_status
for welt in dst_elt.xpath("/status/worker"):
w = get_worker(welt)
wcst_elt = w.generate_status()
# workers might not implement generate_status() - better be
# prepared for no status
if wcst_elt == None:
wcst_elt = get_previous_status(welt.get("id"))
# stitch them together into a big status document for later
# comparison
cst_elt.append(wcst_elt)
# find work
xmlcmp = xmltools.XMLCompare(dst_elt, cst_elt, workerdesc)
xmlcmp.compare()
xmlcmp.find_work()
stdeps = dst_elt.xpath("/status/deps/dep")
wudeps = xmlcmp.dependencies_state_to_wu(stdeps, xmlcmp.worklist, xmlcmp.byxpath)
first = True
previous_failures = set()
for i_workset in iter(topsort.topsort_levels(wudeps)):
# wuwus within an 'i_workset' are independent of each other
# wuwus that aren't mentioned in wudeps should be in the
# first i_workset
if first:
first = False
i_workset = i_workset.union(find_nodeps(xmlcmp.worklist, wudeps))
# fetcher: downloads and workers: do_work
# parallelisation perhaps?
results = spawn_work(parcel_work(i_workset, previous_failures))
# mark any failures
previous_failures = previous_failures.union(results.failures())
# gather resultant_status
def spawn_work(parcels):
"""Take a work parcels dictionary and do the work"""
for workername in parcels:
workerdesc = xmltools.WorkerDescription(os.path.join(context.status_dir(), "workers", workername, "description.xml"))
# if the worker is ordered:
# get copy of worker's current status (working_status)
# apply removes and mods to working status
# copy final desired_status to cur_des_status
# loop over siblings at wu level in cur_des_status:
# if sibling not in cur_des_st and is not to be added:
# drop from cur_des_st
# if sibling not in cur_des_st but is to be added:
# find position arg for add
# if sibling in both but wrong position:
# find correct move/reorder instruction
def get_worker(welt):
wid = welt.get("id")
if wid in workers:
return workers[wid]
try:
w = __import__("workers." + wid)
except ImportError as e:
if e.message.startswith('No module named '):
# TODO: assume no python module for this worker,
# try to find and execute an OL worker
try:
w = OLWorker(wid)
except Exception:
logger.emsg("No worker %s, giving up!" % wid)
raise
def generate_base_status():
elt = etree.Element("status")
return elt
if __name__ == '__main__':
main(sys.argv[1:])
| machination/machination | bin/update.py | Python | gpl-3.0 | 3,871 |
# -*- coding: utf-8 -*-
from __future__ import division
from otree.common import Currency as c, currency_range, safe_json
from . import models
from ._builtin import Page, WaitPage
from .models import Constants
class FinalPage(Page):
def is_displayed(self):
return self.round_number == Constants.num_rounds
def vars_for_template(self):
return {
'cumulated_score': int(self.participant.vars['app_cumulated_score']),
'rank': self.participant.vars['app_rank'],
'session': self.session.code,
'paid': self.participant.vars['app_paid'],
'ID': self.participant.label
}
pass
class SurveyPage(Page):
form_model = models.Player
form_fields = ['sex', #'sisters_brothers',
#'religion', 'religion_practice',
'student', 'field_of_studies',
#'couple',
#'boring_task',
#'risk_aversion'
'check_partners','check_matching',
'comments']
class MerciPage(Page):
pass
page_sequence = [
SurveyPage,
FinalPage
# MerciPage
]
| anthropo-lab/XP | UIMM_project/atl_Survey/views.py | Python | gpl-3.0 | 1,142 |
from django.contrib import admin
from sentences.models import Sentence
from .models import Tweet
admin.site.register(Tweet)
# class SentenceInline(admin.TabularInline):
# model = Sentence
# readonly_fields = ('content', 'question',)
# max_num = 0
# can_delete = False
#
#
# class TweetAdmin(admin.ModelAdmin):
# inlines = [
# SentenceInline,
# ]
#
# admin.site.register(Tweet, TweetAdmin)
| efe/lesib | twitter/admin.py | Python | gpl-3.0 | 425 |
# -*- coding: utf-8 -*-
from models.user import User
class Authenticator(object):
def __init__(self, username, password):
self.username = username
self.password = password
def authenticate(self):
return True
class StudentAuthenticator(Authenticator):
def __init__(self,
student_grade=0,
student_class=0,
student_number=0,
# student_name='',
password='',
**kwargs):
# 年級
self.student_grade = student_grade
# 班級
self.student_class = student_class
# 座號
self.student_number = student_number
# 學生姓名
''' omit the name field
self.username = student_name
'''
self.username = "%02s_%02s_%02s" % (student_grade, student_class, student_number)
# 密碼
self.password = password
super(StudentAuthenticator, self).__init__(
self.username, self.password, **kwargs)
def authenticate(self):
try:
# unique key
user = User.query.filter_by(
student_grade=self.student_grade,
student_class=self.student_class,
student_number=self.student_number
).first()
# user not found
if user is None:
return False
else:
''' omit the name field
# password are 'student_name' and 'default_password'
if (user.student_name == self.username) and (user.default_password == self.password):
'''
if user.default_password == self.password:
return True
else:
return False
except:
return False
| lyshie/afc-github | app/authenticator.py | Python | gpl-3.0 | 1,848 |
"""Collection of function that may be used by JCM template files (*.jcmt) to
create the project or that may be useful/necessary to process the results.
Contains a default processing function (`processing_default`).
Authors : Carlo Barth
Credit: Partly based on MATLAB-versions written by Sven Burger and Martin
Hammerschmidt.
"""
import numpy as np
from scipy import constants
c0 = constants.speed_of_light
mu0 = constants.mu_0
eps0 = constants.epsilon_0
Z0 = np.sqrt(mu0/eps0)
# =============================================================================
def processing_default(pps, keys):
"""Calculates the scattering efficiency `qsca` and the absorption efficieny
`qabs` by normalizing the `ElectromagneticFieldEnergyFlux` calculated in
the JCMsuite post process to the incident flux. It also returns the
extinction efficiency `qext`, which is the sum of `qsca` and `qabs`.
"""
results = {}
# Check if the correct number of post processes was passed
if not len(pps) == 2:
raise ValueError('This processing function is designed for a list of 2'+
' post processes, but these are {}'.format(len(pps)))
return
# Hard coded values
uol = 1e-6
vacuum_wavelength = 5.5e-7
# Set default keys
default_keys = {'info_level' : 10,
'storage_format' : 'Binary',
'initial_p_adaption' : True,
'n_refinement_steps' : 0,
'refinement_strategy' : 'HAdaptive'}
for dkey in default_keys:
if not dkey in keys:
keys[dkey] = default_keys[dkey]
# Calculate the energy flux normalization factor
geo_cross_section = np.pi*np.square(keys['radius']*uol)
p_in = 0.5/Z0*geo_cross_section
# Read the field energy and calculate the absorption in the sphere
# (should be 0)
omega = 2.*np.pi*c0/vacuum_wavelength
field_energy = pps[0]['ElectricFieldEnergy'][0]
results['qabs'] = -2.*omega*field_energy[1].imag/p_in
# Calculate the scattering cross section from the
# ElectromagneticFieldEnergyFlux-post process
results['qsca'] = pps[1]['ElectromagneticFieldEnergyFlux'][0][0].real/p_in
# Calculate the extinction efficiency
results['qext'] = results['qsca'] + results['qabs']
return results
def mie_analytical(radii, vacuum_wavelength, out_param='qsca',
cross_section=False, **mie_params):
"""Returns the analytical values for the efficiencies using the
`pymiecoated`-package. Pass additional parameters to the `Mie`-class
using the `mie_params`, e.g. by writing `m=1.52` to pass the refractive
index of the sphere. `out_param` can be each method of the `Mie`-class,
e.g. 'qext', 'qsca' or 'qabs'. Use `cross_section=True` to return the
related cross section instead of the efficiency."""
from pymiecoated import Mie
import collections
_is_iter = True
if not isinstance(radii, collections.Iterable):
_is_iter = False
radii = np.array([radii])
out_vals = []
for radius in radii:
x = 2 * np.pi * radius / vacuum_wavelength
mie = Mie(x=x, **mie_params)
out_func = getattr(mie, out_param)
out_val = out_func()
if cross_section:
out_val = np.pi * np.square(radius) * out_val
out_vals.append(out_val)
if not _is_iter:
return out_vals[0]
return np.array(out_vals)
if __name__ == '__main__':
pass
| cbpygit/pypmj | projects/scattering/mie/mie3D/project_utils.py | Python | gpl-3.0 | 3,547 |
from tests.base_widget_testcase import BaseWidgetTest
| RedFantom/ttkwidgets | tests/__init__.py | Python | gpl-3.0 | 54 |
# -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
"""
Éste módulo tiene información acerca de los directorios necesarios para
la aplicación.
"""
import sys
import os
# Home
HOME = os.path.expanduser("~")
# Código fuente
if getattr(sys, 'frozen', ''):
# Ejecutable, cx_Freeze
PATH = os.path.realpath(os.path.dirname(sys.argv[0]))
else:
PATH = os.path.join(os.path.realpath(os.path.dirname(__file__)), "..")
EDIS = os.path.join(HOME, ".edis")
# Archivo de configuración
CONFIGURACION = os.path.join(EDIS, "edis_config.ini")
# Archivo de log
LOG = os.path.join(EDIS, "edis_log.log")
# Proyecto
PROJECT_DIR = os.path.join(HOME, "EdisProjects")
# Se crea el directorio .edis en el HOME
if not os.path.isdir(EDIS):
os.mkdir(EDIS)
| centaurialpha/edis | src/core/paths.py | Python | gpl-3.0 | 927 |
from collections import namedtuple
ScoredObject = namedtuple('ScoredObject',
['key', 'score', 'variable1', 'variable2', 'rounds', 'opponents', 'wins', 'loses']
) | ubc/compair | compair/algorithms/scored_object.py | Python | gpl-3.0 | 165 |
'''
Created on 05.01.2018
@author: michael
'''
from alexplugins.systematic.tkgui import SystematicPointSelectionPresenter,\
SystematicPointSelectionDialog
from tkinter.ttk import Button
from alexplugins.systematic.base import SystematicIdentifier, SystematicPoint
from alexandriabase.domain import Tree
from manual.dialogs_test import DialogTest, DialogTestRunner
from alexplugins.cdexporter.tkgui import ChronoDialogPresenter, ChronoDialog,\
ExportInfoWizardPresenter, ExportInfoWizard
from tkgui.Dialogs import GenericStringEditDialog
from alexpresenters.DialogPresenters import GenericInputDialogPresenter
from alexplugins.cdexporter.base import ExportInfo
class SystematicServiceStub():
systematic_points = (SystematicPoint(SystematicIdentifier(None), "Root"),
SystematicPoint(SystematicIdentifier("0"), 'Node 0'),
SystematicPoint(SystematicIdentifier("1"), 'Node 1'),
SystematicPoint(SystematicIdentifier("1.1"), 'Node 1.1'),
)
def get_systematic_tree(self):
return Tree(self.systematic_points)
def fetch_systematic_entries_for_document(self, document):
return (self.systematic_points[document.id],)
class SystematicPointSelectionTest(DialogTest):
def __init__(self, window_manager):
super().__init__(window_manager)
self.name = "Systematic node selection"
def test_component(self, master, message_label):
self.master = master
self.message_label = message_label
presenter = SystematicPointSelectionPresenter(SystematicServiceStub(),
self.message_broker)
self.dialog = SystematicPointSelectionDialog(self.window_manager, presenter)
Button(self.master, text='Start dialog', command=self._start_dialog).pack()
def _start_dialog(self):
self.dialog.activate(self._systematic_dialog_callback, label="Test label")
def _systematic_dialog_callback(self, node):
self.message_label.set("Selection: %s Type: %s" % (node, type(node)))
class ChronoDialogTest(DialogTest):
def __init__(self, window_manager):
super().__init__(window_manager)
self.name = "Chrono dialog"
def test_component(self, master, message_label):
self.master = master
self.message_label = message_label
presenter = ChronoDialogPresenter()
self.dialog = ChronoDialog(self.window_manager, presenter)
Button(self.master, text='Start dialog', command=self._start_dialog).pack()
def _start_dialog(self):
self.dialog.activate(self._chrono_dialog_callback)
def _chrono_dialog_callback(self, info):
self.message_label.set(info)
class ExportInfoWizardTest(DialogTest):
def __init__(self, window_manager):
super().__init__(window_manager)
self.name = "Export info dialog"
def test_component(self, master, message_label):
self.master = master
self.message_label = message_label
location_dialog = GenericStringEditDialog(self.window_manager, GenericInputDialogPresenter())
presenter = ExportInfoWizardPresenter()
self.dialog = ExportInfoWizard(self.window_manager, presenter, location_dialog)
Button(self.master, text='Start dialog', command=self._start_dialog).pack()
def _start_dialog(self):
self.dialog.activate(self._export_info_callback, export_info=ExportInfo())
def _export_info_callback(self, info):
self.message_label.set(info)
if __name__ == '__main__':
test_classes = []
test_classes.append(SystematicPointSelectionTest)
test_classes.append(ChronoDialogTest)
test_classes.append(ExportInfoWizardTest)
test_runner = DialogTestRunner(test_classes)
test_runner.run()
| archivsozialebewegungen/AlexandriaPlugins | manualtests/manual/dialogs_test.py | Python | gpl-3.0 | 3,944 |
"""Abstract base audio mixer."""
from abc import ABCMeta, abstractmethod
import smokesignal
from volcorner import signals
__all__ = ['Mixer']
class Mixer(metaclass=ABCMeta):
@abstractmethod
def open(self):
"""Open the mixer and start monitoring for volume changes."""
@abstractmethod
def close(self):
"""Close the mixer."""
@property
@abstractmethod
def volume(self):
"""
Get the current volume as a float.
:return: volume, between 0.0 and 1.0
:rtype: float
"""
@volume.setter
@abstractmethod
def volume(self, value):
"""
Set the current volume as a float.
:param float value: the new volume, between 0.0 and 1.0
"""
def on_volume_changed(self, value):
"""
Subclasses should call this when the volume is changed outside of this app.
:param float value: the new volume, between 0.0 and 1.0
"""
smokesignal.emit(signals.CHANGE_VOLUME, value)
| cknave/volcorner | volcorner/mixer.py | Python | gpl-3.0 | 1,024 |
import unittest
import os.path
from math import pi
from sapphire import corsika
data_file_dir = os.path.dirname(__file__)
DATA_FILE = os.path.join(data_file_dir, 'test_data/1_2/DAT000000')
class CorsikaFileTests(unittest.TestCase):
def setUp(self):
self.file = corsika.reader.CorsikaFile(DATA_FILE)
def tearDown(self):
self.file.finish()
def test_validate_file(self):
"""Verify that the data file is valid"""
self.assertTrue(self.file.check())
def test_run_header(self):
"""Verify that the Run header is properly read"""
header = self.file.get_header()
self.assertIsInstance(header, corsika.blocks.RunHeader)
self.assertEqual(header.id, b'RUNH')
self.assertAlmostEqual(header.version, 7.4, 4)
for h in [10., 5000., 30000., 50000., 110000.]:
t = header.height_to_thickness(h)
self.assertAlmostEqual(header.thickness_to_height(t), h, 8)
def test_run_end(self):
"""Verify that the Run end is properly read"""
end = self.file.get_end()
self.assertIsInstance(end, corsika.blocks.RunEnd)
self.assertEqual(end.id, b'RUNE')
self.assertEqual(end.n_events_processed, 1)
def test_events(self):
"""Verify that the Events are properly read"""
events = self.file.get_events()
event = next(events)
self.assertIsInstance(event, corsika.reader.CorsikaEvent)
self.assertEqual(event.last_particle_index, 1086892)
def test_event_header(self):
"""Verify that the Event header is properly read"""
events = self.file.get_events()
event = next(events)
header = event.get_header()
self.assertIsInstance(header, corsika.blocks.EventHeader)
self.assertEqual(header.id, b'EVTH')
self.assertEqual(corsika.particles.name(header.particle_id), 'proton')
self.assertEqual(header.energy, 1e14)
self.assertEqual(header.azimuth, -pi / 2.)
self.assertEqual(header.zenith, 0.0)
self.assertEqual(header.hadron_model_high, 'QGSJET')
def test_event_end(self):
"""Verify that the Event end is properly read"""
events = self.file.get_events()
event = next(events)
end = event.get_end()
self.assertIsInstance(end, corsika.blocks.EventEnd)
self.assertEqual(end.id, b'EVTE')
self.assertEqual(end.n_muons_output, 1729)
def test_particles(self):
"""Verify that the Particles are properly read"""
events = self.file.get_events()
event = next(events)
particles = event.get_particles()
particle = next(particles)
self.assertIsInstance(particle, tuple)
self.assertEqual(len(particle), 11)
self.assertEqual(corsika.particles.name(int(particle[6])), 'muon_p')
self.assertAlmostEqual(particle[3], -56.2846679688)
self.assertAlmostEqual(particle[4], -172.535859375)
self.assertAlmostEqual(particle[7], 181.484397728)
particle = next(particles)
self.assertEqual(corsika.particles.name(int(particle[6])), 'muon_m')
if __name__ == '__main__':
unittest.main()
| tomkooij/sapphire | sapphire/tests/corsika/test_corsika.py | Python | gpl-3.0 | 3,194 |
"""
Abstract base classes define the primitives that renderers and
graphics contexts must implement to serve as a matplotlib backend
:class:`RendererBase`
An abstract base class to handle drawing/rendering operations.
:class:`FigureCanvasBase`
The abstraction layer that separates the
:class:`matplotlib.figure.Figure` from the backend specific
details like a user interface drawing area
:class:`GraphicsContextBase`
An abstract base class that provides color, line styles, etc...
:class:`Event`
The base class for all of the matplotlib event
handling. Derived classes suh as :class:`KeyEvent` and
:class:`MouseEvent` store the meta data like keys and buttons
pressed, x and y locations in pixel and
:class:`~matplotlib.axes.Axes` coordinates.
:class:`ShowBase`
The base class for the Show class of each interactive backend;
the 'show' callable is then set to Show.__call__, inherited from
ShowBase.
"""
import os
import warnings
import time
import io
import numpy as np
import matplotlib.cbook as cbook
import matplotlib.colors as colors
import matplotlib.transforms as transforms
import matplotlib.widgets as widgets
#import matplotlib.path as path
from matplotlib import rcParams
from matplotlib import is_interactive
from matplotlib import get_backend
from matplotlib._pylab_helpers import Gcf
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
import matplotlib.tight_bbox as tight_bbox
import matplotlib.textpath as textpath
from matplotlib.path import Path
from matplotlib.cbook import mplDeprecation
try:
from PIL import Image
_has_pil = True
except ImportError:
_has_pil = False
_backend_d = {}
def register_backend(format, backend_class):
_backend_d[format] = backend_class
class ShowBase(object):
"""
Simple base class to generate a show() callable in backends.
Subclass must override mainloop() method.
"""
def __call__(self, block=None):
"""
Show all figures. If *block* is not None, then
it is a boolean that overrides all other factors
determining whether show blocks by calling mainloop().
The other factors are:
it does not block if run inside "ipython --pylab";
it does not block in interactive mode.
"""
managers = Gcf.get_all_fig_managers()
if not managers:
return
for manager in managers:
manager.show()
if block is not None:
if block:
self.mainloop()
return
else:
return
# Hack: determine at runtime whether we are
# inside ipython in pylab mode.
from matplotlib import pyplot
try:
ipython_pylab = not pyplot.show._needmain
# IPython versions >= 0.10 tack the _needmain
# attribute onto pyplot.show, and always set
# it to False, when in --pylab mode.
ipython_pylab = ipython_pylab and get_backend() != 'WebAgg'
# TODO: The above is a hack to get the WebAgg backend
# working with `ipython --pylab` until proper integration
# is implemented.
except AttributeError:
ipython_pylab = False
# Leave the following as a separate step in case we
# want to control this behavior with an rcParam.
if ipython_pylab:
return
if not is_interactive() or get_backend() == 'WebAgg':
self.mainloop()
def mainloop(self):
pass
class RendererBase:
"""An abstract base class to handle drawing/rendering operations.
The following methods *must* be implemented in the backend:
* :meth:`draw_path`
* :meth:`draw_image`
* :meth:`draw_text`
* :meth:`get_text_width_height_descent`
The following methods *should* be implemented in the backend for
optimization reasons:
* :meth:`draw_markers`
* :meth:`draw_path_collection`
* :meth:`draw_quad_mesh`
"""
def __init__(self):
self._texmanager = None
self._text2path = textpath.TextToPath()
def open_group(self, s, gid=None):
"""
Open a grouping element with label *s*. If *gid* is given, use
*gid* as the id of the group. Is only currently used by
:mod:`~matplotlib.backends.backend_svg`.
"""
pass
def close_group(self, s):
"""
Close a grouping element with label *s*
Is only currently used by :mod:`~matplotlib.backends.backend_svg`
"""
pass
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draws a :class:`~matplotlib.path.Path` instance using the
given affine transform.
"""
raise NotImplementedError
def draw_markers(self, gc, marker_path, marker_trans, path,
trans, rgbFace=None):
"""
Draws a marker at each of the vertices in path. This includes
all vertices, including control points on curves. To avoid
that behavior, those vertices should be removed before calling
this function.
*gc*
the :class:`GraphicsContextBase` instance
*marker_trans*
is an affine transform applied to the marker.
*trans*
is an affine transform applied to the path.
This provides a fallback implementation of draw_markers that
makes multiple calls to :meth:`draw_path`. Some backends may
want to override this method in order to draw the marker only
once and reuse it multiple times.
"""
for vertices, codes in path.iter_segments(trans, simplify=False):
if len(vertices):
x, y = vertices[-2:]
self.draw_path(gc, marker_path,
marker_trans +
transforms.Affine2D().translate(x, y),
rgbFace)
def draw_path_collection(self, gc, master_transform, paths, all_transforms,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls,
offset_position):
"""
Draws a collection of paths selecting drawing properties from
the lists *facecolors*, *edgecolors*, *linewidths*,
*linestyles* and *antialiaseds*. *offsets* is a list of
offsets to apply to each of the paths. The offsets in
*offsets* are first transformed by *offsetTrans* before being
applied. *offset_position* may be either "screen" or "data"
depending on the space that the offsets are in.
This provides a fallback implementation of
:meth:`draw_path_collection` that makes multiple calls to
:meth:`draw_path`. Some backends may want to override this in
order to render each set of path data only once, and then
reference that path multiple times with the different offsets,
colors, styles etc. The generator methods
:meth:`_iter_collection_raw_paths` and
:meth:`_iter_collection` are provided to help with (and
standardize) the implementation across backends. It is highly
recommended to use those generators, so that changes to the
behavior of :meth:`draw_path_collection` can be made globally.
"""
path_ids = []
for path, transform in self._iter_collection_raw_paths(
master_transform, paths, all_transforms):
path_ids.append((path, transform))
for xo, yo, path_id, gc0, rgbFace in self._iter_collection(
gc, master_transform, all_transforms, path_ids, offsets,
offsetTrans, facecolors, edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
path, transform = path_id
transform = transforms.Affine2D(
transform.get_matrix()).translate(xo, yo)
self.draw_path(gc0, path, transform, rgbFace)
def draw_quad_mesh(self, gc, master_transform, meshWidth, meshHeight,
coordinates, offsets, offsetTrans, facecolors,
antialiased, edgecolors):
"""
This provides a fallback implementation of
:meth:`draw_quad_mesh` that generates paths and then calls
:meth:`draw_path_collection`.
"""
from matplotlib.collections import QuadMesh
paths = QuadMesh.convert_mesh_to_paths(
meshWidth, meshHeight, coordinates)
if edgecolors is None:
edgecolors = facecolors
linewidths = np.array([gc.get_linewidth()], np.float_)
return self.draw_path_collection(
gc, master_transform, paths, [], offsets, offsetTrans, facecolors,
edgecolors, linewidths, [], [antialiased], [None], 'screen')
def draw_gouraud_triangle(self, gc, points, colors, transform):
"""
Draw a Gouraud-shaded triangle.
*points* is a 3x2 array of (x, y) points for the triangle.
*colors* is a 3x4 array of RGBA colors for each point of the
triangle.
*transform* is an affine transform to apply to the points.
"""
raise NotImplementedError
def draw_gouraud_triangles(self, gc, triangles_array, colors_array,
transform):
"""
Draws a series of Gouraud triangles.
*points* is a Nx3x2 array of (x, y) points for the trianglex.
*colors* is a Nx3x4 array of RGBA colors for each point of the
triangles.
*transform* is an affine transform to apply to the points.
"""
transform = transform.frozen()
for tri, col in zip(triangles_array, colors_array):
self.draw_gouraud_triangle(gc, tri, col, transform)
def _iter_collection_raw_paths(self, master_transform, paths,
all_transforms):
"""
This is a helper method (along with :meth:`_iter_collection`) to make
it easier to write a space-efficent :meth:`draw_path_collection`
implementation in a backend.
This method yields all of the base path/transform
combinations, given a master transform, a list of paths and
list of transforms.
The arguments should be exactly what is passed in to
:meth:`draw_path_collection`.
The backend should take each yielded path and transform and
create an object that can be referenced (reused) later.
"""
Npaths = len(paths)
Ntransforms = len(all_transforms)
N = max(Npaths, Ntransforms)
if Npaths == 0:
return
transform = transforms.IdentityTransform()
for i in range(N):
path = paths[i % Npaths]
if Ntransforms:
transform = all_transforms[i % Ntransforms]
yield path, transform + master_transform
def _iter_collection(self, gc, master_transform, all_transforms,
path_ids, offsets, offsetTrans, facecolors,
edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
"""
This is a helper method (along with
:meth:`_iter_collection_raw_paths`) to make it easier to write
a space-efficent :meth:`draw_path_collection` implementation in a
backend.
This method yields all of the path, offset and graphics
context combinations to draw the path collection. The caller
should already have looped over the results of
:meth:`_iter_collection_raw_paths` to draw this collection.
The arguments should be the same as that passed into
:meth:`draw_path_collection`, with the exception of
*path_ids*, which is a list of arbitrary objects that the
backend will use to reference one of the paths created in the
:meth:`_iter_collection_raw_paths` stage.
Each yielded result is of the form::
xo, yo, path_id, gc, rgbFace
where *xo*, *yo* is an offset; *path_id* is one of the elements of
*path_ids*; *gc* is a graphics context and *rgbFace* is a color to
use for filling the path.
"""
Ntransforms = len(all_transforms)
Npaths = len(path_ids)
Noffsets = len(offsets)
N = max(Npaths, Noffsets)
Nfacecolors = len(facecolors)
Nedgecolors = len(edgecolors)
Nlinewidths = len(linewidths)
Nlinestyles = len(linestyles)
Naa = len(antialiaseds)
Nurls = len(urls)
if (Nfacecolors == 0 and Nedgecolors == 0) or Npaths == 0:
return
if Noffsets:
toffsets = offsetTrans.transform(offsets)
gc0 = self.new_gc()
gc0.copy_properties(gc)
if Nfacecolors == 0:
rgbFace = None
if Nedgecolors == 0:
gc0.set_linewidth(0.0)
xo, yo = 0, 0
for i in range(N):
path_id = path_ids[i % Npaths]
if Noffsets:
xo, yo = toffsets[i % Noffsets]
if offset_position == 'data':
if Ntransforms:
transform = (all_transforms[i % Ntransforms] +
master_transform)
else:
transform = master_transform
xo, yo = transform.transform_point((xo, yo))
xp, yp = transform.transform_point((0, 0))
xo = -(xp - xo)
yo = -(yp - yo)
if not (np.isfinite(xo) and np.isfinite(yo)):
continue
if Nfacecolors:
rgbFace = facecolors[i % Nfacecolors]
if Nedgecolors:
if Nlinewidths:
gc0.set_linewidth(linewidths[i % Nlinewidths])
if Nlinestyles:
gc0.set_dashes(*linestyles[i % Nlinestyles])
fg = edgecolors[i % Nedgecolors]
if len(fg) == 4:
if fg[3] == 0.0:
gc0.set_linewidth(0)
else:
gc0.set_foreground(fg)
else:
gc0.set_foreground(fg)
if rgbFace is not None and len(rgbFace) == 4:
if rgbFace[3] == 0:
rgbFace = None
gc0.set_antialiased(antialiaseds[i % Naa])
if Nurls:
gc0.set_url(urls[i % Nurls])
yield xo, yo, path_id, gc0, rgbFace
gc0.restore()
def get_image_magnification(self):
"""
Get the factor by which to magnify images passed to :meth:`draw_image`.
Allows a backend to have images at a different resolution to other
artists.
"""
return 1.0
def draw_image(self, gc, x, y, im):
"""
Draw the image instance into the current axes;
*gc*
a GraphicsContext containing clipping information
*x*
is the distance in pixels from the left hand side of the canvas.
*y*
the distance from the origin. That is, if origin is
upper, y is the distance from top. If origin is lower, y
is the distance from bottom
*im*
the :class:`matplotlib._image.Image` instance
"""
raise NotImplementedError
def option_image_nocomposite(self):
"""
override this method for renderers that do not necessarily
want to rescale and composite raster images. (like SVG)
"""
return False
def option_scale_image(self):
"""
override this method for renderers that support arbitrary
scaling of image (most of the vector backend).
"""
return False
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!', mtext=None):
"""
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath="TeX")
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
"""
Draw the text instance
*gc*
the :class:`GraphicsContextBase` instance
*x*
the x location of the text in display coords
*y*
the y location of the text baseline in display coords
*s*
the text string
*prop*
a :class:`matplotlib.font_manager.FontProperties` instance
*angle*
the rotation angle in degrees
*mtext*
a :class:`matplotlib.text.Text` instance
**backend implementers note**
When you are trying to determine if you have gotten your bounding box
right (which is what enables the text layout/alignment to work
properly), it helps to change the line in text.py::
if 0: bbox_artist(self, renderer)
to if 1, and then the actual bounding box will be plotted along with
your text.
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath)
def _get_text_path_transform(self, x, y, s, prop, angle, ismath):
"""
return the text path and transform
*prop*
font property
*s*
text to be converted
*usetex*
If True, use matplotlib usetex mode.
*ismath*
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
text2path = self._text2path
fontsize = self.points_to_pixels(prop.get_size_in_points())
if ismath == "TeX":
verts, codes = text2path.get_text_path(prop, s, ismath=False,
usetex=True)
else:
verts, codes = text2path.get_text_path(prop, s, ismath=ismath,
usetex=False)
path = Path(verts, codes)
angle = angle / 180. * 3.141592
if self.flipy():
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, self.height - y)
else:
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, y)
return path, transform
def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath):
"""
draw the text by converting them to paths using textpath module.
*prop*
font property
*s*
text to be converted
*usetex*
If True, use matplotlib usetex mode.
*ismath*
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
path, transform = self._get_text_path_transform(
x, y, s, prop, angle, ismath)
color = gc.get_rgb()
gc.set_linewidth(0.0)
self.draw_path(gc, path, transform, rgbFace=color)
def get_text_width_height_descent(self, s, prop, ismath):
"""
get the width and height, and the offset from the bottom to the
baseline (descent), in display coords of the string s with
:class:`~matplotlib.font_manager.FontProperties` prop
"""
if ismath == 'TeX':
# todo: handle props
size = prop.get_size_in_points()
texmanager = self._text2path.get_texmanager()
fontsize = prop.get_size_in_points()
w, h, d = texmanager.get_text_width_height_descent(s, fontsize,
renderer=self)
return w, h, d
dpi = self.points_to_pixels(72)
if ismath:
dims = self._text2path.mathtext_parser.parse(s, dpi, prop)
return dims[0:3] # return width, height, descent
flags = self._text2path._get_hinting_flag()
font = self._text2path._get_font(prop)
size = prop.get_size_in_points()
font.set_size(size, dpi)
# the width and height of unrotated string
font.set_text(s, 0.0, flags=flags)
w, h = font.get_width_height()
d = font.get_descent()
w /= 64.0 # convert from subpixels
h /= 64.0
d /= 64.0
return w, h, d
def flipy(self):
"""
Return true if y small numbers are top for renderer Is used
for drawing text (:mod:`matplotlib.text`) and images
(:mod:`matplotlib.image`) only
"""
return True
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return 1, 1
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def new_gc(self):
"""
Return an instance of a :class:`GraphicsContextBase`
"""
return GraphicsContextBase()
def points_to_pixels(self, points):
"""
Convert points to display units
*points*
a float or a numpy array of float
return points converted to pixels
You need to override this function (unless your backend
doesn't have a dpi, eg, postscript or svg). Some imaging
systems assume some value for pixels per inch::
points to pixels = points * pixels_per_inch/72.0 * dpi/72.0
"""
return points
def strip_math(self, s):
return cbook.strip_math(s)
def start_rasterizing(self):
"""
Used in MixedModeRenderer. Switch to the raster renderer.
"""
pass
def stop_rasterizing(self):
"""
Used in MixedModeRenderer. Switch back to the vector renderer
and draw the contents of the raster renderer as an image on
the vector renderer.
"""
pass
def start_filter(self):
"""
Used in AggRenderer. Switch to a temporary renderer for image
filtering effects.
"""
pass
def stop_filter(self, filter_func):
"""
Used in AggRenderer. Switch back to the original renderer.
The contents of the temporary renderer is processed with the
*filter_func* and is drawn on the original renderer as an
image.
"""
pass
class GraphicsContextBase:
"""
An abstract base class that provides color, line styles, etc...
"""
# a mapping from dash styles to suggested offset, dash pairs
dashd = {
'solid': (None, None),
'dashed': (0, (6.0, 6.0)),
'dashdot': (0, (3.0, 5.0, 1.0, 5.0)),
'dotted': (0, (1.0, 3.0)),
}
def __init__(self):
self._alpha = 1.0
self._forced_alpha = False # if True, _alpha overrides A from RGBA
self._antialiased = 1 # use 0,1 not True, False for extension code
self._capstyle = 'butt'
self._cliprect = None
self._clippath = None
self._dashes = None, None
self._joinstyle = 'round'
self._linestyle = 'solid'
self._linewidth = 1
self._rgb = (0.0, 0.0, 0.0, 1.0)
self._orig_color = (0.0, 0.0, 0.0, 1.0)
self._hatch = None
self._url = None
self._gid = None
self._snap = None
self._sketch = None
def copy_properties(self, gc):
'Copy properties from gc to self'
self._alpha = gc._alpha
self._forced_alpha = gc._forced_alpha
self._antialiased = gc._antialiased
self._capstyle = gc._capstyle
self._cliprect = gc._cliprect
self._clippath = gc._clippath
self._dashes = gc._dashes
self._joinstyle = gc._joinstyle
self._linestyle = gc._linestyle
self._linewidth = gc._linewidth
self._rgb = gc._rgb
self._orig_color = gc._orig_color
self._hatch = gc._hatch
self._url = gc._url
self._gid = gc._gid
self._snap = gc._snap
self._sketch = gc._sketch
def restore(self):
"""
Restore the graphics context from the stack - needed only
for backends that save graphics contexts on a stack
"""
pass
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on
all backends
"""
return self._alpha
def get_antialiased(self):
"Return true if the object should try to do antialiased rendering"
return self._antialiased
def get_capstyle(self):
"""
Return the capstyle as a string in ('butt', 'round', 'projecting')
"""
return self._capstyle
def get_clip_rectangle(self):
"""
Return the clip rectangle as a :class:`~matplotlib.transforms.Bbox`
instance
"""
return self._cliprect
def get_clip_path(self):
"""
Return the clip path in the form (path, transform), where path
is a :class:`~matplotlib.path.Path` instance, and transform is
an affine transform to apply to the path before clipping.
"""
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def get_dashes(self):
"""
Return the dash information as an offset dashlist tuple.
The dash list is a even size list that gives the ink on, ink
off in pixels.
See p107 of to PostScript `BLUEBOOK
<http://www-cdf.fnal.gov/offline/PostScript/BLUEBOOK.PDF>`_
for more info.
Default value is None
"""
return self._dashes
def get_forced_alpha(self):
"""
Return whether the value given by get_alpha() should be used to
override any other alpha-channel values.
"""
return self._forced_alpha
def get_joinstyle(self):
"""
Return the line join style as one of ('miter', 'round', 'bevel')
"""
return self._joinstyle
def get_linestyle(self, style):
"""
Return the linestyle: one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
return self._linestyle
def get_linewidth(self):
"""
Return the line width in points as a scalar
"""
return self._linewidth
def get_rgb(self):
"""
returns a tuple of three or four floats from 0-1.
"""
return self._rgb
def get_url(self):
"""
returns a url if one is set, None otherwise
"""
return self._url
def get_gid(self):
"""
Return the object identifier if one is set, None otherwise.
"""
return self._gid
def get_snap(self):
"""
returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
return self._snap
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on all backends.
If ``alpha=None`` (the default), the alpha components of the
foreground and fill colors will be used to set their respective
transparencies (where applicable); otherwise, ``alpha`` will override
them.
"""
if alpha is not None:
self._alpha = alpha
self._forced_alpha = True
else:
self._alpha = 1.0
self._forced_alpha = False
self.set_foreground(self._orig_color)
def set_antialiased(self, b):
"""
True if object should be drawn with antialiased rendering
"""
# use 0, 1 to make life easier on extension code trying to read the gc
if b:
self._antialiased = 1
else:
self._antialiased = 0
def set_capstyle(self, cs):
"""
Set the capstyle as a string in ('butt', 'round', 'projecting')
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def set_clip_rectangle(self, rectangle):
"""
Set the clip rectangle with sequence (left, bottom, width, height)
"""
self._cliprect = rectangle
def set_clip_path(self, path):
"""
Set the clip path and transformation. Path should be a
:class:`~matplotlib.transforms.TransformedPath` instance.
"""
assert path is None or isinstance(path, transforms.TransformedPath)
self._clippath = path
def set_dashes(self, dash_offset, dash_list):
"""
Set the dash style for the gc.
*dash_offset*
is the offset (usually 0).
*dash_list*
specifies the on-off sequence as points.
``(None, None)`` specifies a solid line
"""
if dash_list is not None:
dl = np.asarray(dash_list)
if np.any(dl <= 0.0):
raise ValueError("All values in the dash list must be positive")
self._dashes = dash_offset, dash_list
def set_foreground(self, fg, isRGBA=False):
"""
Set the foreground color. fg can be a MATLAB format string, a
html hex color string, an rgb or rgba unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
If you know fg is rgba, set ``isRGBA=True`` for efficiency.
"""
self._orig_color = fg
if self._forced_alpha:
self._rgb = colors.colorConverter.to_rgba(fg, self._alpha)
elif isRGBA:
self._rgb = fg
else:
self._rgb = colors.colorConverter.to_rgba(fg)
def set_graylevel(self, frac):
"""
Set the foreground color to be a gray level with *frac*
"""
self._orig_color = frac
self._rgb = (frac, frac, frac, self._alpha)
def set_joinstyle(self, js):
"""
Set the join style to be one of ('miter', 'round', 'bevel')
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def set_linewidth(self, w):
"""
Set the linewidth in points
"""
self._linewidth = w
def set_linestyle(self, style):
"""
Set the linestyle to be one of ('solid', 'dashed', 'dashdot',
'dotted'). One may specify customized dash styles by providing
a tuple of (offset, dash pairs). For example, the predefiend
linestyles have following values.:
'dashed' : (0, (6.0, 6.0)),
'dashdot' : (0, (3.0, 5.0, 1.0, 5.0)),
'dotted' : (0, (1.0, 3.0)),
"""
if style in self.dashd:
offset, dashes = self.dashd[style]
elif isinstance(style, tuple):
offset, dashes = style
else:
raise ValueError('Unrecognized linestyle: %s' % str(style))
self._linestyle = style
self.set_dashes(offset, dashes)
def set_url(self, url):
"""
Sets the url for links in compatible backends
"""
self._url = url
def set_gid(self, id):
"""
Sets the id.
"""
self._gid = id
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
self._snap = snap
def set_hatch(self, hatch):
"""
Sets the hatch style for filling
"""
self._hatch = hatch
def get_hatch(self):
"""
Gets the current hatch style
"""
return self._hatch
def get_hatch_path(self, density=6.0):
"""
Returns a Path for the current hatch.
"""
if self._hatch is None:
return None
return Path.hatch(self._hatch, density)
def get_sketch_params(self):
"""
Returns the sketch parameters for the artist.
Returns
-------
sketch_params : tuple or `None`
A 3-tuple with the following elements:
* `scale`: The amplitude of the wiggle perpendicular to the
source line.
* `length`: The length of the wiggle along the line.
* `randomness`: The scale factor by which the length is
shrunken or expanded.
May return `None` if no sketch parameters were set.
"""
return self._sketch
def set_sketch_params(self, scale=None, length=None, randomness=None):
"""
Sets the the sketch parameters.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source
line, in pixels. If scale is `None`, or not provided, no
sketch filter will be provided.
length : float, optional
The length of the wiggle along the line, in pixels
(default 128.0)
randomness : float, optional
The scale factor by which the length is shrunken or
expanded (default 16.0)
"""
if scale is None:
self._sketch = None
else:
self._sketch = (scale, length or 128.0, randomness or 16.0)
class TimerBase(object):
'''
A base class for providing timer events, useful for things animations.
Backends need to implement a few specific methods in order to use their
own timing mechanisms so that the timer events are integrated into their
event loops.
Mandatory functions that must be implemented:
* `_timer_start`: Contains backend-specific code for starting
the timer
* `_timer_stop`: Contains backend-specific code for stopping
the timer
Optional overrides:
* `_timer_set_single_shot`: Code for setting the timer to
single shot operating mode, if supported by the timer
object. If not, the `Timer` class itself will store the flag
and the `_on_timer` method should be overridden to support
such behavior.
* `_timer_set_interval`: Code for setting the interval on the
timer, if there is a method for doing so on the timer
object.
* `_on_timer`: This is the internal function that any timer
object should call, which will handle the task of running
all callbacks that have been set.
Attributes:
* `interval`: The time between timer events in
milliseconds. Default is 1000 ms.
* `single_shot`: Boolean flag indicating whether this timer
should operate as single shot (run once and then
stop). Defaults to `False`.
* `callbacks`: Stores list of (func, args) tuples that will be
called upon timer events. This list can be manipulated
directly, or the functions `add_callback` and
`remove_callback` can be used.
'''
def __init__(self, interval=None, callbacks=None):
#Initialize empty callbacks list and setup default settings if necssary
if callbacks is None:
self.callbacks = []
else:
self.callbacks = callbacks[:] # Create a copy
if interval is None:
self._interval = 1000
else:
self._interval = interval
self._single = False
# Default attribute for holding the GUI-specific timer object
self._timer = None
def __del__(self):
'Need to stop timer and possibly disconnect timer.'
self._timer_stop()
def start(self, interval=None):
'''
Start the timer object. `interval` is optional and will be used
to reset the timer interval first if provided.
'''
if interval is not None:
self._set_interval(interval)
self._timer_start()
def stop(self):
'''
Stop the timer.
'''
self._timer_stop()
def _timer_start(self):
pass
def _timer_stop(self):
pass
def _get_interval(self):
return self._interval
def _set_interval(self, interval):
# Force to int since none of the backends actually support fractional
# milliseconds, and some error or give warnings.
interval = int(interval)
self._interval = interval
self._timer_set_interval()
interval = property(_get_interval, _set_interval)
def _get_single_shot(self):
return self._single
def _set_single_shot(self, ss=True):
self._single = ss
self._timer_set_single_shot()
single_shot = property(_get_single_shot, _set_single_shot)
def add_callback(self, func, *args, **kwargs):
'''
Register `func` to be called by timer when the event fires. Any
additional arguments provided will be passed to `func`.
'''
self.callbacks.append((func, args, kwargs))
def remove_callback(self, func, *args, **kwargs):
'''
Remove `func` from list of callbacks. `args` and `kwargs` are optional
and used to distinguish between copies of the same function registered
to be called with different arguments.
'''
if args or kwargs:
self.callbacks.remove((func, args, kwargs))
else:
funcs = [c[0] for c in self.callbacks]
if func in funcs:
self.callbacks.pop(funcs.index(func))
def _timer_set_interval(self):
'Used to set interval on underlying timer object.'
pass
def _timer_set_single_shot(self):
'Used to set single shot on underlying timer object.'
pass
def _on_timer(self):
'''
Runs all function that have been registered as callbacks. Functions
can return False (or 0) if they should not be called any more. If there
are no callbacks, the timer is automatically stopped.
'''
for func, args, kwargs in self.callbacks:
ret = func(*args, **kwargs)
# docstring above explains why we use `if ret == False` here,
# instead of `if not ret`.
if ret == False:
self.callbacks.remove((func, args, kwargs))
if len(self.callbacks) == 0:
self.stop()
class Event:
"""
A matplotlib event. Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`. The following attributes
are defined and shown with their default values
*name*
the event name
*canvas*
the FigureCanvas instance generating the event
*guiEvent*
the GUI event that triggered the matplotlib event
"""
def __init__(self, name, canvas, guiEvent=None):
self.name = name
self.canvas = canvas
self.guiEvent = guiEvent
class IdleEvent(Event):
"""
An event triggered by the GUI backend when it is idle -- useful
for passive animation
"""
pass
class DrawEvent(Event):
"""
An event triggered by a draw operation on the canvas
In addition to the :class:`Event` attributes, the following event
attributes are defined:
*renderer*
the :class:`RendererBase` instance for the draw event
"""
def __init__(self, name, canvas, renderer):
Event.__init__(self, name, canvas)
self.renderer = renderer
class ResizeEvent(Event):
"""
An event triggered by a canvas resize
In addition to the :class:`Event` attributes, the following event
attributes are defined:
*width*
width of the canvas in pixels
*height*
height of the canvas in pixels
"""
def __init__(self, name, canvas):
Event.__init__(self, name, canvas)
self.width, self.height = canvas.get_width_height()
class CloseEvent(Event):
"""
An event triggered by a figure being closed
In addition to the :class:`Event` attributes, the following event
attributes are defined:
"""
def __init__(self, name, canvas, guiEvent=None):
Event.__init__(self, name, canvas, guiEvent)
class LocationEvent(Event):
"""
An event that has a screen location
The following additional attributes are defined and shown with
their default values.
In addition to the :class:`Event` attributes, the following
event attributes are defined:
*x*
x position - pixels from left of canvas
*y*
y position - pixels from bottom of canvas
*inaxes*
the :class:`~matplotlib.axes.Axes` instance if mouse is over axes
*xdata*
x coord of mouse in data coords
*ydata*
y coord of mouse in data coords
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
# the last event that was triggered before this one
lastevent = None
def __init__(self, name, canvas, x, y, guiEvent=None):
"""
*x*, *y* in figure coords, 0,0 = bottom, left
"""
Event.__init__(self, name, canvas, guiEvent=guiEvent)
self.x = x
self.y = y
if x is None or y is None:
# cannot check if event was in axes if no x,y info
self.inaxes = None
self._update_enter_leave()
return
# Find all axes containing the mouse
if self.canvas.mouse_grabber is None:
axes_list = [a for a in self.canvas.figure.get_axes()
if a.in_axes(self)]
else:
axes_list = [self.canvas.mouse_grabber]
if len(axes_list) == 0: # None found
self.inaxes = None
self._update_enter_leave()
return
elif (len(axes_list) > 1): # Overlap, get the highest zorder
axes_list.sort(key=lambda x: x.zorder)
self.inaxes = axes_list[-1] # Use the highest zorder
else: # Just found one hit
self.inaxes = axes_list[0]
try:
trans = self.inaxes.transData.inverted()
xdata, ydata = trans.transform_point((x, y))
except ValueError:
self.xdata = None
self.ydata = None
else:
self.xdata = xdata
self.ydata = ydata
self._update_enter_leave()
def _update_enter_leave(self):
'process the figure/axes enter leave events'
if LocationEvent.lastevent is not None:
last = LocationEvent.lastevent
if last.inaxes != self.inaxes:
# process axes enter/leave events
try:
if last.inaxes is not None:
last.canvas.callbacks.process('axes_leave_event', last)
except:
pass
# See ticket 2901582.
# I think this is a valid exception to the rule
# against catching all exceptions; if anything goes
# wrong, we simply want to move on and process the
# current event.
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
else:
# process a figure enter event
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
LocationEvent.lastevent = self
class MouseEvent(LocationEvent):
"""
A mouse event ('button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event').
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*button*
button pressed None, 1, 2, 3, 'up', 'down' (up and down are used
for scroll events)
*key*
the key depressed when the mouse event triggered (see
:class:`KeyEvent`)
*step*
number of scroll steps (positive for 'up', negative for 'down')
Example usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('button_press_event', on_press)
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
button = None # button pressed None, 1, 2, 3
dblclick = None # whether or not the event is the result of a double click
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
step = None # scroll steps for scroll events
def __init__(self, name, canvas, x, y, button=None, key=None,
step=0, dblclick=False, guiEvent=None):
"""
x, y in figure coords, 0,0 = bottom, left
button pressed None, 1, 2, 3, 'up', 'down'
"""
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.button = button
self.key = key
self.step = step
self.dblclick = dblclick
def __str__(self):
return ("MPL MouseEvent: xy=(%d,%d) xydata=(%s,%s) button=%d " +
"dblclick=%s inaxes=%s") % (self.x, self.y, self.xdata,
self.ydata, self.button,
self.dblclick, self.inaxes)
class PickEvent(Event):
"""
a pick event, fired when the user picks a location on the canvas
sufficiently close to an artist.
Attrs: all the :class:`Event` attributes plus
*mouseevent*
the :class:`MouseEvent` that generated the pick
*artist*
the :class:`~matplotlib.artist.Artist` picked
other
extra class dependent attrs -- eg a
:class:`~matplotlib.lines.Line2D` pick may define different
extra attributes than a
:class:`~matplotlib.collections.PatchCollection` pick event
Example usage::
line, = ax.plot(rand(100), 'o', picker=5) # 5 points tolerance
def on_pick(event):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print('on pick line:', zip(xdata[ind], ydata[ind]))
cid = fig.canvas.mpl_connect('pick_event', on_pick)
"""
def __init__(self, name, canvas, mouseevent, artist,
guiEvent=None, **kwargs):
Event.__init__(self, name, canvas, guiEvent)
self.mouseevent = mouseevent
self.artist = artist
self.__dict__.update(kwargs)
class KeyEvent(LocationEvent):
"""
A key event (key press, key release).
Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`.
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*key*
the key(s) pressed. Could be **None**, a single case sensitive ascii
character ("g", "G", "#", etc.), a special key
("control", "shift", "f1", "up", etc.) or a
combination of the above (e.g., "ctrl+alt+g", "ctrl+alt+G").
.. note::
Modifier keys will be prefixed to the pressed key and will be in the
order "ctrl", "alt", "super". The exception to this rule is when the
pressed key is itself a modifier key, therefore "ctrl+alt" and
"alt+control" can both be valid key values.
Example usage::
def on_key(event):
print('you pressed', event.key, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('key_press_event', on_key)
"""
def __init__(self, name, canvas, key, x=0, y=0, guiEvent=None):
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.key = key
class FigureCanvasBase(object):
"""
The canvas the figure renders into.
Public attributes
*figure*
A :class:`matplotlib.figure.Figure` instance
"""
events = [
'resize_event',
'draw_event',
'key_press_event',
'key_release_event',
'button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event',
'pick_event',
'idle_event',
'figure_enter_event',
'figure_leave_event',
'axes_enter_event',
'axes_leave_event',
'close_event'
]
supports_blit = True
def __init__(self, figure):
figure.set_canvas(self)
self.figure = figure
# a dictionary from event name to a dictionary that maps cid->func
self.callbacks = cbook.CallbackRegistry()
self.widgetlock = widgets.LockDraw()
self._button = None # the button pressed
self._key = None # the key pressed
self._lastx, self._lasty = None, None
self.button_pick_id = self.mpl_connect('button_press_event', self.pick)
self.scroll_pick_id = self.mpl_connect('scroll_event', self.pick)
self.mouse_grabber = None # the axes currently grabbing mouse
self.toolbar = None # NavigationToolbar2 will set me
self._is_saving = False
if False:
## highlight the artists that are hit
self.mpl_connect('motion_notify_event', self.onHilite)
## delete the artists that are clicked on
#self.mpl_disconnect(self.button_pick_id)
#self.mpl_connect('button_press_event',self.onRemove)
def is_saving(self):
"""
Returns `True` when the renderer is in the process of saving
to a file, rather than rendering for an on-screen buffer.
"""
return self._is_saving
def onRemove(self, ev):
"""
Mouse event processor which removes the top artist
under the cursor. Connect this to the 'mouse_press_event'
using::
canvas.mpl_connect('mouse_press_event',canvas.onRemove)
"""
def sort_artists(artists):
# This depends on stable sort and artists returned
# from get_children in z order.
L = [(h.zorder, h) for h in artists]
L.sort()
return [h for zorder, h in L]
# Find the top artist under the cursor
under = sort_artists(self.figure.hitlist(ev))
h = None
if under:
h = under[-1]
# Try deleting that artist, or its parent if you
# can't delete the artist
while h:
if h.remove():
self.draw_idle()
break
parent = None
for p in under:
if h in p.get_children():
parent = p
break
h = parent
def onHilite(self, ev):
"""
Mouse event processor which highlights the artists
under the cursor. Connect this to the 'motion_notify_event'
using::
canvas.mpl_connect('motion_notify_event',canvas.onHilite)
"""
if not hasattr(self, '_active'):
self._active = dict()
under = self.figure.hitlist(ev)
enter = [a for a in under if a not in self._active]
leave = [a for a in self._active if a not in under]
#print "within:"," ".join([str(x) for x in under])
#print "entering:",[str(a) for a in enter]
#print "leaving:",[str(a) for a in leave]
# On leave restore the captured colour
for a in leave:
if hasattr(a, 'get_color'):
a.set_color(self._active[a])
elif hasattr(a, 'get_edgecolor'):
a.set_edgecolor(self._active[a][0])
a.set_facecolor(self._active[a][1])
del self._active[a]
# On enter, capture the color and repaint the artist
# with the highlight colour. Capturing colour has to
# be done first in case the parent recolouring affects
# the child.
for a in enter:
if hasattr(a, 'get_color'):
self._active[a] = a.get_color()
elif hasattr(a, 'get_edgecolor'):
self._active[a] = (a.get_edgecolor(), a.get_facecolor())
else:
self._active[a] = None
for a in enter:
if hasattr(a, 'get_color'):
a.set_color('red')
elif hasattr(a, 'get_edgecolor'):
a.set_edgecolor('red')
a.set_facecolor('lightblue')
else:
self._active[a] = None
self.draw_idle()
def pick(self, mouseevent):
if not self.widgetlock.locked():
self.figure.pick(mouseevent)
def blit(self, bbox=None):
"""
blit the canvas in bbox (default entire canvas)
"""
pass
def resize(self, w, h):
"""
set the canvas size in pixels
"""
pass
def draw_event(self, renderer):
"""
This method will be call all functions connected to the
'draw_event' with a :class:`DrawEvent`
"""
s = 'draw_event'
event = DrawEvent(s, self, renderer)
self.callbacks.process(s, event)
def resize_event(self):
"""
This method will be call all functions connected to the
'resize_event' with a :class:`ResizeEvent`
"""
s = 'resize_event'
event = ResizeEvent(s, self)
self.callbacks.process(s, event)
def close_event(self, guiEvent=None):
"""
This method will be called by all functions connected to the
'close_event' with a :class:`CloseEvent`
"""
s = 'close_event'
try:
event = CloseEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
except (TypeError, AttributeError):
pass
# Suppress the TypeError when the python session is being killed.
# It may be that a better solution would be a mechanism to
# disconnect all callbacks upon shutdown.
# AttributeError occurs on OSX with qt4agg upon exiting
# with an open window; 'callbacks' attribute no longer exists.
def key_press_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_press_event' with a :class:`KeyEvent`
"""
self._key = key
s = 'key_press_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
def key_release_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_release_event' with a :class:`KeyEvent`
"""
s = 'key_release_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._key = None
def pick_event(self, mouseevent, artist, **kwargs):
"""
This method will be called by artists who are picked and will
fire off :class:`PickEvent` callbacks registered listeners
"""
s = 'pick_event'
event = PickEvent(s, self, mouseevent, artist, **kwargs)
self.callbacks.process(s, event)
def scroll_event(self, x, y, step, guiEvent=None):
"""
Backend derived classes should call this function on any
scroll wheel event. x,y are the canvas coords: 0,0 is lower,
left. button and key are as defined in MouseEvent.
This method will be call all functions connected to the
'scroll_event' with a :class:`MouseEvent` instance.
"""
if step >= 0:
self._button = 'up'
else:
self._button = 'down'
s = 'scroll_event'
mouseevent = MouseEvent(s, self, x, y, self._button, self._key,
step=step, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_press_event(self, x, y, button, dblclick=False, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button press. x,y are the canvas coords: 0,0 is lower, left.
button and key are as defined in :class:`MouseEvent`.
This method will be call all functions connected to the
'button_press_event' with a :class:`MouseEvent` instance.
"""
self._button = button
s = 'button_press_event'
mouseevent = MouseEvent(s, self, x, y, button, self._key,
dblclick=dblclick, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_release_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button release.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'button_release_event' with a :class:`MouseEvent` instance.
"""
s = 'button_release_event'
event = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._button = None
def motion_notify_event(self, x, y, guiEvent=None):
"""
Backend derived classes should call this function on any
motion-notify-event.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'motion_notify_event' with a :class:`MouseEvent` instance.
"""
self._lastx, self._lasty = x, y
s = 'motion_notify_event'
event = MouseEvent(s, self, x, y, self._button, self._key,
guiEvent=guiEvent)
self.callbacks.process(s, event)
def leave_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when leaving
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
self.callbacks.process('figure_leave_event', LocationEvent.lastevent)
LocationEvent.lastevent = None
self._lastx, self._lasty = None, None
def enter_notify_event(self, guiEvent=None, xy=None):
"""
Backend derived classes should call this function when entering
canvas
*guiEvent*
the native UI event that generated the mpl event
*xy*
the coordinate location of the pointer when the canvas is
entered
"""
if xy is not None:
x, y = xy
self._lastx, self._lasty = x, y
event = Event('figure_enter_event', self, guiEvent)
self.callbacks.process('figure_enter_event', event)
def idle_event(self, guiEvent=None):
"""Called when GUI is idle."""
s = 'idle_event'
event = IdleEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
def grab_mouse(self, ax):
"""
Set the child axes which are currently grabbing the mouse events.
Usually called by the widgets themselves.
It is an error to call this if the mouse is already grabbed by
another axes.
"""
if self.mouse_grabber not in (None, ax):
raise RuntimeError('two different attempted to grab mouse input')
self.mouse_grabber = ax
def release_mouse(self, ax):
"""
Release the mouse grab held by the axes, ax.
Usually called by the widgets.
It is ok to call this even if you ax doesn't have the mouse
grab currently.
"""
if self.mouse_grabber is ax:
self.mouse_grabber = None
def draw(self, *args, **kwargs):
"""
Render the :class:`~matplotlib.figure.Figure`
"""
pass
def draw_idle(self, *args, **kwargs):
"""
:meth:`draw` only if idle; defaults to draw but backends can overrride
"""
self.draw(*args, **kwargs)
def draw_cursor(self, event):
"""
Draw a cursor in the event.axes if inaxes is not None. Use
native GUI drawing for efficiency if possible
"""
pass
def get_width_height(self):
"""
Return the figure width and height in points or pixels
(depending on the backend), truncated to integers
"""
return int(self.figure.bbox.width), int(self.figure.bbox.height)
filetypes = {
'eps': 'Encapsulated Postscript',
'pdf': 'Portable Document Format',
'pgf': 'LaTeX PGF Figure',
'png': 'Portable Network Graphics',
'ps': 'Postscript',
'raw': 'Raw RGBA bitmap',
'rgba': 'Raw RGBA bitmap',
'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'}
# All of these print_* functions do a lazy import because
# a) otherwise we'd have cyclical imports, since all of these
# classes inherit from FigureCanvasBase
# b) so we don't import a bunch of stuff the user may never use
# TODO: these print_* throw ImportErrror when called from
# compare_images_decorator (decorators.py line 112)
# if the backend has not already been loaded earlier on. Simple trigger:
# >>> import matplotlib.tests.test_spines
# >>> list(matplotlib.tests.test_spines.test_spines_axes_positions())[0][0]()
def print_eps(self, *args, **kwargs):
from .backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_eps(*args, **kwargs)
def print_pdf(self, *args, **kwargs):
from .backends.backend_pdf import FigureCanvasPdf # lazy import
pdf = self.switch_backends(FigureCanvasPdf)
return pdf.print_pdf(*args, **kwargs)
def print_pgf(self, *args, **kwargs):
from .backends.backend_pgf import FigureCanvasPgf # lazy import
pgf = self.switch_backends(FigureCanvasPgf)
return pgf.print_pgf(*args, **kwargs)
def print_png(self, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_png(*args, **kwargs)
def print_ps(self, *args, **kwargs):
from .backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_ps(*args, **kwargs)
def print_raw(self, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_raw(*args, **kwargs)
print_bmp = print_rgba = print_raw
def print_svg(self, *args, **kwargs):
from .backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svg(*args, **kwargs)
def print_svgz(self, *args, **kwargs):
from .backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svgz(*args, **kwargs)
if _has_pil:
filetypes['jpg'] = 'Joint Photographic Experts Group'
filetypes['jpeg'] = filetypes['jpg']
def print_jpg(self, filename_or_obj, *args, **kwargs):
"""
Supported kwargs:
*quality*: The image quality, on a scale from 1 (worst) to
95 (best). The default is 95, if not given in the
matplotlibrc file in the savefig.jpeg_quality parameter.
Values above 95 should be avoided; 100 completely
disables the JPEG quantization stage.
*optimize*: If present, indicates that the encoder should
make an extra pass over the image in order to select
optimal encoder settings.
*progressive*: If present, indicates that this image
should be stored as a progressive JPEG file.
"""
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
buf, size = agg.print_to_buffer()
if kwargs.pop("dryrun", False):
return
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
options = cbook.restrict_dict(kwargs, ['quality', 'optimize',
'progressive'])
if 'quality' not in options:
options['quality'] = rcParams['savefig.jpeg_quality']
return image.save(filename_or_obj, format='jpeg', **options)
print_jpeg = print_jpg
filetypes['tif'] = filetypes['tiff'] = 'Tagged Image File Format'
def print_tif(self, filename_or_obj, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
buf, size = agg.print_to_buffer()
if kwargs.pop("dryrun", False):
return
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
dpi = (self.figure.dpi, self.figure.dpi)
return image.save(filename_or_obj, format='tiff',
dpi=dpi)
print_tiff = print_tif
def get_supported_filetypes(self):
"""Return dict of savefig file formats supported by this backend"""
return self.filetypes
def get_supported_filetypes_grouped(self):
"""Return a dict of savefig file formats supported by this backend,
where the keys are a file type name, such as 'Joint Photographic
Experts Group', and the values are a list of filename extensions used
for that filetype, such as ['jpg', 'jpeg']."""
groupings = {}
for ext, name in self.filetypes.items():
groupings.setdefault(name, []).append(ext)
groupings[name].sort()
return groupings
def _get_print_method(self, format):
method_name = 'print_%s' % format
# check for registered backends
if format in _backend_d:
backend_class = _backend_d[format]
def _print_method(*args, **kwargs):
backend = self.switch_backends(backend_class)
print_method = getattr(backend, method_name)
return print_method(*args, **kwargs)
return _print_method
formats = self.get_supported_filetypes()
if (format not in formats or not hasattr(self, method_name)):
formats = sorted(formats)
raise ValueError(
'Format "%s" is not supported.\n'
'Supported formats: '
'%s.' % (format, ', '.join(formats)))
return getattr(self, method_name)
def print_figure(self, filename, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', format=None, **kwargs):
"""
Render the figure to hardcopy. Set the figure patch face and edge
colors. This is useful because some of the GUIs have a gray figure
face color background and you'll probably want to override this on
hardcopy.
Arguments are:
*filename*
can also be a file object on image backends
*orientation*
only currently applies to PostScript printing.
*dpi*
the dots per inch to save the figure in; if None, use savefig.dpi
*facecolor*
the facecolor of the figure
*edgecolor*
the edgecolor of the figure
*orientation*
landscape' | 'portrait' (not supported on all backends)
*format*
when set, forcibly set the file format to save to
*bbox_inches*
Bbox in inches. Only the given portion of the figure is
saved. If 'tight', try to figure out the tight bbox of
the figure. If None, use savefig.bbox
*pad_inches*
Amount of padding around the figure when bbox_inches is
'tight'. If None, use savefig.pad_inches
*bbox_extra_artists*
A list of extra artists that will be considered when the
tight bbox is calculated.
"""
if format is None:
# get format from filename, or from backend's default filetype
if cbook.is_string_like(filename):
format = os.path.splitext(filename)[1][1:]
if format is None or format == '':
format = self.get_default_filetype()
if cbook.is_string_like(filename):
filename = filename.rstrip('.') + '.' + format
format = format.lower()
print_method = self._get_print_method(format)
if dpi is None:
dpi = rcParams['savefig.dpi']
origDPI = self.figure.dpi
origfacecolor = self.figure.get_facecolor()
origedgecolor = self.figure.get_edgecolor()
self.figure.dpi = dpi
self.figure.set_facecolor(facecolor)
self.figure.set_edgecolor(edgecolor)
bbox_inches = kwargs.pop("bbox_inches", None)
if bbox_inches is None:
bbox_inches = rcParams['savefig.bbox']
if bbox_inches:
# call adjust_bbox to save only the given area
if bbox_inches == "tight":
# when bbox_inches == "tight", it saves the figure
# twice. The first save command is just to estimate
# the bounding box of the figure. A stringIO object is
# used as a temporary file object, but it causes a
# problem for some backends (ps backend with
# usetex=True) if they expect a filename, not a
# file-like object. As I think it is best to change
# the backend to support file-like object, i'm going
# to leave it as it is. However, a better solution
# than stringIO seems to be needed. -JJL
#result = getattr(self, method_name)
result = print_method(
io.BytesIO(),
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
dryrun=True,
**kwargs)
renderer = self.figure._cachedRenderer
bbox_inches = self.figure.get_tightbbox(renderer)
bbox_artists = kwargs.pop("bbox_extra_artists", None)
if bbox_artists is None:
bbox_artists = self.figure.get_default_bbox_extra_artists()
bbox_filtered = []
for a in bbox_artists:
bbox = a.get_window_extent(renderer)
if a.get_clip_on():
clip_box = a.get_clip_box()
if clip_box is not None:
bbox = Bbox.intersection(bbox, clip_box)
clip_path = a.get_clip_path()
if clip_path is not None and bbox is not None:
clip_path = clip_path.get_fully_transformed_path()
bbox = Bbox.intersection(bbox,
clip_path.get_extents())
if bbox is not None and (bbox.width != 0 or
bbox.height != 0):
bbox_filtered.append(bbox)
if bbox_filtered:
_bbox = Bbox.union(bbox_filtered)
trans = Affine2D().scale(1.0 / self.figure.dpi)
bbox_extra = TransformedBbox(_bbox, trans)
bbox_inches = Bbox.union([bbox_inches, bbox_extra])
pad = kwargs.pop("pad_inches", None)
if pad is None:
pad = rcParams['savefig.pad_inches']
bbox_inches = bbox_inches.padded(pad)
restore_bbox = tight_bbox.adjust_bbox(self.figure, format,
bbox_inches)
_bbox_inches_restore = (bbox_inches, restore_bbox)
else:
_bbox_inches_restore = None
self._is_saving = True
try:
#result = getattr(self, method_name)(
result = print_method(
filename,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
bbox_inches_restore=_bbox_inches_restore,
**kwargs)
finally:
if bbox_inches and restore_bbox:
restore_bbox()
self.figure.dpi = origDPI
self.figure.set_facecolor(origfacecolor)
self.figure.set_edgecolor(origedgecolor)
self.figure.set_canvas(self)
self._is_saving = False
#self.figure.canvas.draw() ## seems superfluous
return result
def get_default_filetype(self):
"""
Get the default savefig file format as specified in rcParam
``savefig.format``. Returned string excludes period. Overridden
in backends that only support a single file type.
"""
return rcParams['savefig.format']
def get_window_title(self):
"""
Get the title text of the window containing the figure.
Return None if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
return self.manager.get_window_title()
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
self.manager.set_window_title(title)
def get_default_filename(self):
"""
Return a string, which includes extension, suitable for use as
a default filename.
"""
default_filename = self.get_window_title() or 'image'
default_filename = default_filename.lower().replace(' ', '_')
return default_filename + '.' + self.get_default_filetype()
def switch_backends(self, FigureCanvasClass):
"""
Instantiate an instance of FigureCanvasClass
This is used for backend switching, eg, to instantiate a
FigureCanvasPS from a FigureCanvasGTK. Note, deep copying is
not done, so any changes to one of the instances (eg, setting
figure size or line props), will be reflected in the other
"""
newCanvas = FigureCanvasClass(self.figure)
newCanvas._is_saving = self._is_saving
return newCanvas
def mpl_connect(self, s, func):
"""
Connect event with string *s* to *func*. The signature of *func* is::
def func(event)
where event is a :class:`matplotlib.backend_bases.Event`. The
following events are recognized
- 'button_press_event'
- 'button_release_event'
- 'draw_event'
- 'key_press_event'
- 'key_release_event'
- 'motion_notify_event'
- 'pick_event'
- 'resize_event'
- 'scroll_event'
- 'figure_enter_event',
- 'figure_leave_event',
- 'axes_enter_event',
- 'axes_leave_event'
- 'close_event'
For the location events (button and key press/release), if the
mouse is over the axes, the variable ``event.inaxes`` will be
set to the :class:`~matplotlib.axes.Axes` the event occurs is
over, and additionally, the variables ``event.xdata`` and
``event.ydata`` will be defined. This is the mouse location
in data coords. See
:class:`~matplotlib.backend_bases.KeyEvent` and
:class:`~matplotlib.backend_bases.MouseEvent` for more info.
Return value is a connection id that can be used with
:meth:`~matplotlib.backend_bases.Event.mpl_disconnect`.
Example usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = canvas.mpl_connect('button_press_event', on_press)
"""
return self.callbacks.connect(s, func)
def mpl_disconnect(self, cid):
"""
Disconnect callback id cid
Example usage::
cid = canvas.mpl_connect('button_press_event', on_press)
#...later
canvas.mpl_disconnect(cid)
"""
return self.callbacks.disconnect(cid)
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of
:class:`backend_bases.Timer`. This is useful for getting periodic
events through the backend's native event loop. Implemented only for
backends with GUIs.
optional arguments:
*interval*
Timer interval in milliseconds
*callbacks*
Sequence of (func, args, kwargs) where func(*args, **kwargs) will
be executed by the timer every *interval*.
"""
return TimerBase(*args, **kwargs)
def flush_events(self):
"""
Flush the GUI events for the figure. Implemented only for
backends with GUIs.
"""
raise NotImplementedError
def start_event_loop(self, timeout):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def stop_event_loop(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def start_event_loop_default(self, timeout=0):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This function provides default event loop functionality based
on time.sleep that is meant to be used until event loop
functions for each of the GUI backends can be written. As
such, it throws a deprecated warning.
Call signature::
start_event_loop_default(self,timeout=0)
This call blocks until a callback function triggers
stop_event_loop() or *timeout* is reached. If *timeout* is
<=0, never timeout.
"""
str = "Using default event loop until function specific"
str += " to this GUI is implemented"
warnings.warn(str, mplDeprecation)
if timeout <= 0:
timeout = np.inf
timestep = 0.01
counter = 0
self._looping = True
while self._looping and counter * timestep < timeout:
self.flush_events()
time.sleep(timestep)
counter += 1
def stop_event_loop_default(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
Call signature::
stop_event_loop_default(self)
"""
self._looping = False
def key_press_handler(event, canvas, toolbar=None):
"""
Implement the default mpl key bindings for the canvas and toolbar
described at :ref:`key-event-handling`
*event*
a :class:`KeyEvent` instance
*canvas*
a :class:`FigureCanvasBase` instance
*toolbar*
a :class:`NavigationToolbar2` instance
"""
# these bindings happen whether you are over an axes or not
if event.key is None:
return
# Load key-mappings from your matplotlibrc file.
fullscreen_keys = rcParams['keymap.fullscreen']
home_keys = rcParams['keymap.home']
back_keys = rcParams['keymap.back']
forward_keys = rcParams['keymap.forward']
pan_keys = rcParams['keymap.pan']
zoom_keys = rcParams['keymap.zoom']
save_keys = rcParams['keymap.save']
quit_keys = rcParams['keymap.quit']
grid_keys = rcParams['keymap.grid']
toggle_yscale_keys = rcParams['keymap.yscale']
toggle_xscale_keys = rcParams['keymap.xscale']
all = rcParams['keymap.all_axes']
# toggle fullscreen mode (default key 'f')
if event.key in fullscreen_keys:
canvas.manager.full_screen_toggle()
# quit the figure (defaut key 'ctrl+w')
if event.key in quit_keys:
Gcf.destroy_fig(canvas.figure)
if toolbar is not None:
# home or reset mnemonic (default key 'h', 'home' and 'r')
if event.key in home_keys:
toolbar.home()
# forward / backward keys to enable left handed quick navigation
# (default key for backward: 'left', 'backspace' and 'c')
elif event.key in back_keys:
toolbar.back()
# (default key for forward: 'right' and 'v')
elif event.key in forward_keys:
toolbar.forward()
# pan mnemonic (default key 'p')
elif event.key in pan_keys:
toolbar.pan()
# zoom mnemonic (default key 'o')
elif event.key in zoom_keys:
toolbar.zoom()
# saving current figure (default key 's')
elif event.key in save_keys:
toolbar.save_figure()
if event.inaxes is None:
return
# these bindings require the mouse to be over an axes to trigger
# switching on/off a grid in current axes (default key 'g')
if event.key in grid_keys:
event.inaxes.grid()
canvas.draw()
# toggle scaling of y-axes between 'log and 'linear' (default key 'l')
elif event.key in toggle_yscale_keys:
ax = event.inaxes
scale = ax.get_yscale()
if scale == 'log':
ax.set_yscale('linear')
ax.figure.canvas.draw()
elif scale == 'linear':
ax.set_yscale('log')
ax.figure.canvas.draw()
# toggle scaling of x-axes between 'log and 'linear' (default key 'k')
elif event.key in toggle_xscale_keys:
ax = event.inaxes
scalex = ax.get_xscale()
if scalex == 'log':
ax.set_xscale('linear')
ax.figure.canvas.draw()
elif scalex == 'linear':
ax.set_xscale('log')
ax.figure.canvas.draw()
elif (event.key.isdigit() and event.key != '0') or event.key in all:
# keys in list 'all' enables all axes (default key 'a'),
# otherwise if key is a number only enable this particular axes
# if it was the axes, where the event was raised
if not (event.key in all):
n = int(event.key) - 1
for i, a in enumerate(canvas.figure.get_axes()):
# consider axes, in which the event was raised
# FIXME: Why only this axes?
if event.x is not None and event.y is not None \
and a.in_axes(event):
if event.key in all:
a.set_navigate(True)
else:
a.set_navigate(i == n)
class NonGuiException(Exception):
pass
class FigureManagerBase:
"""
Helper class for pyplot mode, wraps everything up into a neat bundle
Public attibutes:
*canvas*
A :class:`FigureCanvasBase` instance
*num*
The figure number
"""
def __init__(self, canvas, num):
self.canvas = canvas
canvas.manager = self # store a pointer to parent
self.num = num
self.key_press_handler_id = self.canvas.mpl_connect('key_press_event',
self.key_press)
"""
The returned id from connecting the default key handler via
:meth:`FigureCanvasBase.mpl_connnect`.
To disable default key press handling::
manager, canvas = figure.canvas.manager, figure.canvas
canvas.mpl_disconnect(manager.key_press_handler_id)
"""
def show(self):
"""
For GUI backends, show the figure window and redraw.
For non-GUI backends, raise an exception to be caught
by :meth:`~matplotlib.figure.Figure.show`, for an
optional warning.
"""
raise NonGuiException()
def destroy(self):
pass
def full_screen_toggle(self):
pass
def resize(self, w, h):
""""For gui backends, resize the window (in pixels)."""
pass
def key_press(self, event):
"""
Implement the default mpl key bindings defined at
:ref:`key-event-handling`
"""
key_press_handler(event, self.canvas, self.canvas.toolbar)
def show_popup(self, msg):
"""
Display message in a popup -- GUI only
"""
pass
def get_window_title(self):
"""
Get the title text of the window containing the figure.
Return None for non-GUI backends (eg, a PS backend).
"""
return 'image'
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect for non-GUI backends (eg, a PS backend).
"""
pass
class Cursors:
# this class is only used as a simple namespace
HAND, POINTER, SELECT_REGION, MOVE = list(range(4))
cursors = Cursors()
class NavigationToolbar2(object):
"""
Base class for the navigation cursor, version 2
backends must implement a canvas that handles connections for
'button_press_event' and 'button_release_event'. See
:meth:`FigureCanvasBase.mpl_connect` for more information
They must also define
:meth:`save_figure`
save the current figure
:meth:`set_cursor`
if you want the pointer icon to change
:meth:`_init_toolbar`
create your toolbar widget
:meth:`draw_rubberband` (optional)
draw the zoom to rect "rubberband" rectangle
:meth:`press` (optional)
whenever a mouse button is pressed, you'll be notified with
the event
:meth:`release` (optional)
whenever a mouse button is released, you'll be notified with
the event
:meth:`dynamic_update` (optional)
dynamically update the window while navigating
:meth:`set_message` (optional)
display message
:meth:`set_history_buttons` (optional)
you can change the history back / forward buttons to
indicate disabled / enabled state.
That's it, we'll do the rest!
"""
# list of toolitems to add to the toolbar, format is:
# (
# text, # the text of the button (often not visible to users)
# tooltip_text, # the tooltip shown on hover (where possible)
# image_file, # name of the image for the button (without the extension)
# name_of_method, # name of the method in NavigationToolbar2 to call
# )
toolitems = (
('Home', 'Reset original view', 'home', 'home'),
('Back', 'Back to previous view', 'back', 'back'),
('Forward', 'Forward to next view', 'forward', 'forward'),
(None, None, None, None),
('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'),
('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'),
(None, None, None, None),
('Subplots', 'Configure subplots', 'subplots', 'configure_subplots'),
('Save', 'Save the figure', 'filesave', 'save_figure'),
)
def __init__(self, canvas):
self.canvas = canvas
canvas.toolbar = self
# a dict from axes index to a list of view limits
self._views = cbook.Stack()
self._positions = cbook.Stack() # stack of subplot positions
self._xypress = None # the location and axis info at the time
# of the press
self._idPress = None
self._idRelease = None
self._active = None
self._lastCursor = None
self._init_toolbar()
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
self._ids_zoom = []
self._zoom_mode = None
self._button_pressed = None # determined by the button pressed
# at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
def set_message(self, s):
"""Display a message on toolbar or in status bar"""
pass
def back(self, *args):
"""move back up the view lim stack"""
self._views.back()
self._positions.back()
self.set_history_buttons()
self._update_view()
def dynamic_update(self):
pass
def draw_rubberband(self, event, x0, y0, x1, y1):
"""Draw a rectangle rubberband to indicate zoom limits"""
pass
def forward(self, *args):
"""Move forward in the view lim stack"""
self._views.forward()
self._positions.forward()
self.set_history_buttons()
self._update_view()
def home(self, *args):
"""Restore the original view"""
self._views.home()
self._positions.home()
self.set_history_buttons()
self._update_view()
def _init_toolbar(self):
"""
This is where you actually build the GUI widgets (called by
__init__). The icons ``home.xpm``, ``back.xpm``, ``forward.xpm``,
``hand.xpm``, ``zoom_to_rect.xpm`` and ``filesave.xpm`` are standard
across backends (there are ppm versions in CVS also).
You just need to set the callbacks
home : self.home
back : self.back
forward : self.forward
hand : self.pan
zoom_to_rect : self.zoom
filesave : self.save_figure
You only need to define the last one - the others are in the base
class implementation.
"""
raise NotImplementedError
def mouse_move(self, event):
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if self._active == 'ZOOM':
if self._lastCursor != cursors.SELECT_REGION:
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
elif (self._active == 'PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
if event.inaxes and event.inaxes.get_navigate():
try:
s = event.inaxes.format_coord(event.xdata, event.ydata)
except (ValueError, OverflowError):
pass
else:
if len(self.mode):
self.set_message('%s, %s' % (self.mode, s))
else:
self.set_message(s)
else:
self.set_message(self.mode)
def pan(self, *args):
"""Activate the pan/zoom tool. pan with left button, zoom with right"""
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def press(self, event):
"""Called whenver a mouse button is pressed."""
pass
def press_pan(self, event):
"""the press mouse button in pan/zoom mode callback"""
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_pan()):
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect('motion_notify_event',
self.drag_pan)
self.press(event)
def press_zoom(self, event):
"""the press mouse button in zoom to rect mode callback"""
# If we're already in the middle of a zoom, pressing another
# button works to "cancel"
if self._ids_zoom != []:
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self.release(event)
self.draw()
self._xypress = None
self._button_pressed = None
self._ids_zoom = []
return
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_zoom()):
self._xypress.append((x, y, a, i, a.viewLim.frozen(),
a.transData.frozen()))
id1 = self.canvas.mpl_connect('motion_notify_event', self.drag_zoom)
id2 = self.canvas.mpl_connect('key_press_event',
self._switch_on_zoom_mode)
id3 = self.canvas.mpl_connect('key_release_event',
self._switch_off_zoom_mode)
self._ids_zoom = id1, id2, id3
self._zoom_mode = event.key
self.press(event)
def _switch_on_zoom_mode(self, event):
self._zoom_mode = event.key
self.mouse_move(event)
def _switch_off_zoom_mode(self, event):
self._zoom_mode = None
self.mouse_move(event)
def push_current(self):
"""push the current view limits and position onto the stack"""
lims = []
pos = []
for a in self.canvas.figure.get_axes():
xmin, xmax = a.get_xlim()
ymin, ymax = a.get_ylim()
lims.append((xmin, xmax, ymin, ymax))
# Store both the original and modified positions
pos.append((
a.get_position(True).frozen(),
a.get_position().frozen()))
self._views.push(lims)
self._positions.push(pos)
self.set_history_buttons()
def release(self, event):
"""this will be called whenever mouse button is released"""
pass
def release_pan(self, event):
"""the release mouse button callback in pan/zoom mode"""
if self._button_pressed is None:
return
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress:
return
self._xypress = []
self._button_pressed = None
self.push_current()
self.release(event)
self.draw()
def drag_pan(self, event):
"""the drag callback in pan/zoom mode"""
for a, ind in self._xypress:
#safer to use the recorded button at the press than current button:
#multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.dynamic_update()
def drag_zoom(self, event):
"""the drag callback in zoom mode"""
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = self._xypress[0]
# adjust x, last, y, last
x1, y1, x2, y2 = a.bbox.extents
x, lastx = max(min(x, lastx), x1), min(max(x, lastx), x2)
y, lasty = max(min(y, lasty), y1), min(max(y, lasty), y2)
if self._zoom_mode == "x":
x1, y1, x2, y2 = a.bbox.extents
y, lasty = y1, y2
elif self._zoom_mode == "y":
x1, y1, x2, y2 = a.bbox.extents
x, lastx = x1, x2
self.draw_rubberband(event, x, y, lastx, lasty)
def release_zoom(self, event):
"""the release mouse button callback in zoom to rect mode"""
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self._ids_zoom = []
if not self._xypress:
return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
if abs(x - lastx) < 5 or abs(y - lasty) < 5:
self._xypress = None
self.release(event)
self.draw()
return
x0, y0, x1, y1 = lim.extents
# zoom to rect
inverse = a.transData.inverted()
lastx, lasty = inverse.transform_point((lastx, lasty))
x, y = inverse.transform_point((x, y))
Xmin, Xmax = a.get_xlim()
Ymin, Ymax = a.get_ylim()
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a, la):
twinx = True
if a.get_shared_y_axes().joined(a, la):
twiny = True
last_a.append(a)
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x < lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 < Xmin:
x0 = Xmin
if x1 > Xmax:
x1 = Xmax
else:
if x > lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 > Xmin:
x0 = Xmin
if x1 < Xmax:
x1 = Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y < lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 < Ymin:
y0 = Ymin
if y1 > Ymax:
y1 = Ymax
else:
if y > lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 > Ymin:
y0 = Ymin
if y1 < Ymax:
y1 = Ymax
if self._button_pressed == 1:
if self._zoom_mode == "x":
a.set_xlim((x0, x1))
elif self._zoom_mode == "y":
a.set_ylim((y0, y1))
else:
a.set_xlim((x0, x1))
a.set_ylim((y0, y1))
elif self._button_pressed == 3:
if a.get_xscale() == 'log':
alpha = np.log(Xmax / Xmin) / np.log(x1 / x0)
rx1 = pow(Xmin / x0, alpha) * Xmin
rx2 = pow(Xmax / x0, alpha) * Xmin
else:
alpha = (Xmax - Xmin) / (x1 - x0)
rx1 = alpha * (Xmin - x0) + Xmin
rx2 = alpha * (Xmax - x0) + Xmin
if a.get_yscale() == 'log':
alpha = np.log(Ymax / Ymin) / np.log(y1 / y0)
ry1 = pow(Ymin / y0, alpha) * Ymin
ry2 = pow(Ymax / y0, alpha) * Ymin
else:
alpha = (Ymax - Ymin) / (y1 - y0)
ry1 = alpha * (Ymin - y0) + Ymin
ry2 = alpha * (Ymax - y0) + Ymin
if self._zoom_mode == "x":
a.set_xlim((rx1, rx2))
elif self._zoom_mode == "y":
a.set_ylim((ry1, ry2))
else:
a.set_xlim((rx1, rx2))
a.set_ylim((ry1, ry2))
self.draw()
self._xypress = None
self._button_pressed = None
self._zoom_mode = None
self.push_current()
self.release(event)
def draw(self):
"""Redraw the canvases, update the locators"""
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw_idle()
def _update_view(self):
"""Update the viewlim and position from the view and
position stack for each axes
"""
lims = self._views()
if lims is None:
return
pos = self._positions()
if pos is None:
return
for i, a in enumerate(self.canvas.figure.get_axes()):
xmin, xmax, ymin, ymax = lims[i]
a.set_xlim((xmin, xmax))
a.set_ylim((ymin, ymax))
# Restore both the original and modified positions
a.set_position(pos[i][0], 'original')
a.set_position(pos[i][1], 'active')
self.canvas.draw_idle()
def save_figure(self, *args):
"""Save the current figure"""
raise NotImplementedError
def set_cursor(self, cursor):
"""
Set the current cursor to one of the :class:`Cursors`
enums values
"""
pass
def update(self):
"""Reset the axes stack"""
self._views.clear()
self._positions.clear()
self.set_history_buttons()
def zoom(self, *args):
"""Activate zoom to rect mode"""
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event',
self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event',
self.release_zoom)
self.mode = 'zoom rect'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def set_history_buttons(self):
"""Enable or disable back/forward button"""
pass
| alephu5/Soundbyte | environment/lib/python3.3/site-packages/matplotlib/backend_bases.py | Python | gpl-3.0 | 106,921 |
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import datetime
import decimal
import httplib
import json
import threading
import time
import re
from decimal import Decimal
from electrum_myr.plugins import BasePlugin, hook
from electrum_myr.i18n import _
from electrum_myr_gui.qt.util import *
from electrum_myr_gui.qt.amountedit import AmountEdit
EXCHANGES = ["Cryptsy",
"MintPal",
"Prelude"]
EXCH_SUPPORT_HIST = [("BitcoinVenezuela", "ARS"),
("BitcoinVenezuela", "EUR"),
("BitcoinVenezuela", "USD"),
("BitcoinVenezuela", "VEF"),
("Kraken", "EUR"),
("Kraken", "USD")]
class Exchanger(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
self.daemon = True
self.parent = parent
self.quote_currencies = None
self.lock = threading.Lock()
self.query_rates = threading.Event()
self.use_exchange = self.parent.config.get('use_exchange', "MintPal")
self.parent.exchanges = EXCHANGES
self.parent.win.emit(SIGNAL("refresh_exchanges_combo()"))
self.parent.win.emit(SIGNAL("refresh_currencies_combo()"))
self.is_running = False
def get_json(self, site, get_string, http=False):
try:
if http:
connection = httplib.HTTPConnection(site)
else:
connection = httplib.HTTPSConnection(site)
connection.request("GET", get_string, headers={"User-Agent":"Electrum"})
except Exception:
raise
resp = connection.getresponse()
if resp.reason == httplib.responses[httplib.NOT_FOUND]:
raise
try:
json_resp = json.loads(resp.read())
except Exception:
raise
return json_resp
def exchange(self, btc_amount, quote_currency):
with self.lock:
if self.quote_currencies is None:
return None
quote_currencies = self.quote_currencies.copy()
if quote_currency not in quote_currencies:
return None
return btc_amount * decimal.Decimal(str(quote_currencies[quote_currency]))
def stop(self):
self.is_running = False
def update_rate(self):
self.use_exchange = self.parent.config.get('use_exchange', "MintPal")
update_rates = {
"Cryptsy": self.update_c,
"MintPal": self.update_mp,
"Prelude": self.update_pl,
}
try:
update_rates[self.use_exchange]()
except KeyError:
return
def run(self):
self.is_running = True
while self.is_running:
self.query_rates.clear()
self.update_rate()
self.query_rates.wait(150)
def update_mp(self):
quote_currencies = {"BTC": 0.0}
for cur in quote_currencies:
try:
quote_currencies[cur] = Decimal(self.get_json('api.mintpal.com', "/v1/market/stats/MYR/BTC")[0]['last_price'])
except Exception:
pass
quote_currencies['mBTC'] = quote_currencies['BTC'] * Decimal('1000.0')
quote_currencies['uBTC'] = quote_currencies['mBTC'] * Decimal('1000.0')
quote_currencies['sat'] = quote_currencies['uBTC'] * Decimal('100.0')
with self.lock:
self.quote_currencies = quote_currencies
self.parent.set_currencies(quote_currencies)
def update_pl(self):
quote_currencies = {"BTC": 0.0}
try:
jsonresp = self.get_json('api.prelude.io', "/last/MYR")
except Exception:
return
try:
btcprice = jsonresp["last"]
quote_currencies["BTC"] = decimal.Decimal(str(btcprice))
quote_currencies['mBTC'] = quote_currencies['BTC'] * Decimal('1000.0')
quote_currencies['uBTC'] = quote_currencies['mBTC'] * Decimal('1000.0')
quote_currencies['sat'] = quote_currencies['uBTC'] * Decimal('100.0')
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_c(self):
quote_currencies = {"BTC": 0.0}
try:
jsonresp = self.get_json('pubapi.cryptsy.com', "/api.php?method=singlemarketdata&marketid=200", http=True)['return']['markets']['MYR']
except Exception:
return
try:
btcprice = jsonresp['lasttradeprice']
quote_currencies['BTC'] = decimal.Decimal(str(btcprice))
quote_currencies['mBTC'] = quote_currencies['BTC'] * Decimal('1000.0')
quote_currencies['uBTC'] = quote_currencies['mBTC'] * Decimal('1000.0')
quote_currencies['sat'] = quote_currencies['uBTC'] * Decimal('100.0')
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
class Plugin(BasePlugin):
def fullname(self):
return "Exchange rates"
def description(self):
return """exchange rates, retrieved from MintPal"""
def __init__(self,a,b):
BasePlugin.__init__(self,a,b)
self.currencies = [self.fiat_unit()]
self.exchanges = [self.config.get('use_exchange', "MintPal")]
self.exchanger = None
@hook
def init_qt(self, gui):
self.gui = gui
self.win = self.gui.main_window
self.win.connect(self.win, SIGNAL("refresh_currencies()"), self.win.update_status)
self.btc_rate = Decimal("0.0")
self.resp_hist = {}
self.tx_list = {}
if self.exchanger is None:
# Do price discovery
self.exchanger = Exchanger(self)
self.exchanger.start()
self.gui.exchanger = self.exchanger #
self.add_fiat_edit()
self.win.update_status()
def close(self):
self.exchanger.stop()
self.exchanger = None
self.win.tabs.removeTab(1)
self.win.tabs.insertTab(1, self.win.create_send_tab(), _('Send'))
self.win.update_status()
def set_currencies(self, currency_options):
self.currencies = sorted(currency_options)
self.win.emit(SIGNAL("refresh_currencies()"))
self.win.emit(SIGNAL("refresh_currencies_combo()"))
@hook
def get_fiat_balance_text(self, btc_balance, r):
# return balance as: 1.23 USD
r[0] = self.create_fiat_balance_text(Decimal(btc_balance) / 100000000)
def get_fiat_price_text(self, r):
# return BTC price as: 123.45 USD
r[0] = self.create_fiat_balance_text(1)
quote = r[0]
if quote:
r[0] = "%s"%quote
@hook
def get_fiat_status_text(self, btc_balance, r2):
# return status as: (1.23 USD) 1 BTC~123.45 USD
text = ""
r = {}
self.get_fiat_price_text(r)
quote = r.get(0)
if quote:
price_text = "1 MYR~%s"%quote
fiat_currency = self.fiat_unit()
btc_price = self.btc_rate
fiat_balance = Decimal(btc_price) * (Decimal(btc_balance)/100000000)
balance_text = "(%.2f %s)" % (fiat_balance,fiat_currency)
text = " " + balance_text + " " + price_text + " "
r2[0] = text
def create_fiat_balance_text(self, btc_balance):
quote_currency = self.fiat_unit()
self.exchanger.use_exchange = self.config.get("use_exchange", "MintPal")
cur_rate = self.exchanger.exchange(Decimal("1.0"), quote_currency)
if cur_rate is None:
quote_text = ""
else:
quote_balance = btc_balance * Decimal(cur_rate)
self.btc_rate = cur_rate
quote_text = "%.2f %s" % (quote_balance, quote_currency)
return quote_text
@hook
def request_history_rates(self):
return
@hook
def load_wallet(self, wallet):
self.wallet = wallet
tx_list = {}
for item in self.wallet.get_tx_history(self.wallet.storage.get("current_account", None)):
tx_hash, conf, is_mine, value, fee, balance, timestamp = item
tx_list[tx_hash] = {'value': value, 'timestamp': timestamp, 'balance': balance}
self.tx_list = tx_list
self.cur_exchange = self.config.get('use_exchange', "BTC-e")
threading.Thread(target=self.request_history_rates, args=()).start()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'), self.settings_dialog)
def settings_dialog(self):
d = QDialog()
d.setWindowTitle("Settings")
layout = QGridLayout(d)
layout.addWidget(QLabel(_('Exchange rate API: ')), 0, 0)
layout.addWidget(QLabel(_('Currency: ')), 1, 0)
combo = QComboBox()
combo_ex = QComboBox()
ok_button = QPushButton(_("OK"))
def on_change(x):
try:
cur_request = str(self.currencies[x])
except Exception:
return
if cur_request != self.fiat_unit():
self.config.set_key('currency', cur_request, True)
cur_exchange = self.config.get('use_exchange', "MintPal")
self.win.update_status()
try:
self.fiat_button
except:
pass
else:
self.fiat_button.setText(cur_request)
def on_change_ex(x):
cur_request = str(self.exchanges[x])
if cur_request != self.config.get('use_exchange', "MintPal"):
self.config.set_key('use_exchange', cur_request, True)
self.currencies = []
combo.clear()
self.exchanger.query_rates.set()
cur_currency = self.fiat_unit()
set_currencies(combo)
self.win.update_status()
def set_currencies(combo):
try:
combo.blockSignals(True)
current_currency = self.fiat_unit()
combo.clear()
except Exception:
return
combo.addItems(self.currencies)
try:
index = self.currencies.index(current_currency)
except Exception:
index = 0
if len(self.currencies):
on_change(0)
combo.blockSignals(False)
combo.setCurrentIndex(index)
def set_exchanges(combo_ex):
try:
combo_ex.clear()
except Exception:
return
combo_ex.addItems(self.exchanges)
try:
index = self.exchanges.index(self.config.get('use_exchange', "MintPal"))
except Exception:
index = 0
combo_ex.setCurrentIndex(index)
def ok_clicked():
if self.config.get('use_exchange', "BTC-e") in ["CoinDesk", "itBit"]:
self.exchanger.query_rates.set()
d.accept();
set_exchanges(combo_ex)
set_currencies(combo)
combo.currentIndexChanged.connect(on_change)
combo_ex.currentIndexChanged.connect(on_change_ex)
combo.connect(self.win, SIGNAL('refresh_currencies_combo()'), lambda: set_currencies(combo))
combo_ex.connect(d, SIGNAL('refresh_exchanges_combo()'), lambda: set_exchanges(combo_ex))
ok_button.clicked.connect(lambda: ok_clicked())
layout.addWidget(combo,1,1)
layout.addWidget(combo_ex,0,1)
layout.addWidget(ok_button,3,1)
if d.exec_():
return True
else:
return False
def fiat_unit(self):
return self.config.get("currency", "BTC")
def add_fiat_edit(self):
self.fiat_e = AmountEdit(self.fiat_unit)
self.btc_e = self.win.amount_e
grid = self.btc_e.parent()
def fiat_changed():
try:
fiat_amount = Decimal(str(self.fiat_e.text()))
except:
self.btc_e.setText("")
return
exchange_rate = self.exchanger.exchange(Decimal("1.0"), self.fiat_unit())
if exchange_rate is not None:
btc_amount = fiat_amount/exchange_rate
self.btc_e.setAmount(int(btc_amount*Decimal(100000000)))
self.fiat_e.textEdited.connect(fiat_changed)
def btc_changed():
btc_amount = self.btc_e.get_amount()
if btc_amount is None:
self.fiat_e.setText("")
return
fiat_amount = self.exchanger.exchange(Decimal(btc_amount)/Decimal(100000000), self.fiat_unit())
if fiat_amount is not None:
self.fiat_e.setText("%.2f"%fiat_amount)
self.btc_e.textEdited.connect(btc_changed)
self.btc_e.frozen.connect(lambda: self.fiat_e.setFrozen(self.btc_e.isReadOnly()))
self.win.send_grid.addWidget(self.fiat_e, 4, 3, Qt.AlignHCenter)
| wozz/electrum-myr | plugins/exchange_rate.py | Python | gpl-3.0 | 13,200 |
# Copyright (c) 2019 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
import random
from textwrap import dedent
from typing import List
# project
from kiwi.storage.subformat.template.virtualbox_ovf import (
VirtualboxOvfTemplate
)
from kiwi.storage.subformat.vagrant_base import DiskFormatVagrantBase
from kiwi.storage.subformat.vmdk import DiskFormatVmdk
from kiwi.command import Command
class DiskFormatVagrantVirtualBox(DiskFormatVagrantBase):
"""
**Create a vagrant box for the virtualbox provider**
"""
def vagrant_post_init(self) -> None:
self.provider = 'virtualbox'
self.image_format = 'vagrant.virtualbox.box'
def get_additional_vagrant_config_settings(self) -> str:
"""
Configure the default shared folder to use rsync when guest additions
are not present inside the box.
:return:
ruby code to be evaluated as string
:rtype: str
"""
extra_settings = dedent('''
config.vm.base_mac = "{mac_address}"
''').strip().format(mac_address=self._random_mac())
if not self.xml_state.get_vagrant_config_virtualbox_guest_additions():
extra_settings += os.linesep + dedent('''
config.vm.synced_folder ".", "/vagrant", type: "rsync"
''').strip()
return extra_settings
def create_box_img(self, temp_image_dir: str) -> List[str]:
"""
Create the vmdk image for the Virtualbox vagrant provider.
This function creates the vmdk disk image and the ovf file.
The latter is created via the class :class:`VirtualboxOvfTemplate`.
:param str temp_image_dir:
Path to the temporary directory used to build the box image
:return:
A list of files relevant for the virtualbox box to be
included in the vagrant box
:rtype: list
"""
vmdk = DiskFormatVmdk(self.xml_state, self.root_dir, self.target_dir)
vmdk.create_image_format()
box_img = os.sep.join([temp_image_dir, 'box.vmdk'])
Command.run(
[
'mv', self.get_target_file_path_for_format(vmdk.image_format),
box_img
]
)
box_ovf = os.sep.join([temp_image_dir, 'box.ovf'])
ovf_template = VirtualboxOvfTemplate()
disk_image_capacity = self.vagrantconfig.get_virtualsize() or 42
xml_description_specification = self.xml_state \
.get_description_section().specification
with open(box_ovf, "w") as ovf_file:
ovf_file.write(
ovf_template.get_template().substitute(
{
'root_uuid': self.xml_state.get_root_filesystem_uuid(),
'vm_name': self.xml_state.xml_data.name,
'disk_image_capacity': disk_image_capacity,
'vm_description': xml_description_specification
}
)
)
return [box_img, box_ovf]
@staticmethod
def _random_mac():
return '%02x%02x%02x%02x%02x%02x'.upper() % (
0x00, 0x16, 0x3e,
random.randrange(0, 0x7e),
random.randrange(0, 0xff),
random.randrange(0, 0xff)
)
| dirkmueller/kiwi | kiwi/storage/subformat/vagrant_virtualbox.py | Python | gpl-3.0 | 3,951 |
# -*- coding: utf-8 -*-
#
# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, [email protected])
#
# This file is part of SpamFighter.
#
# SpamFighter is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpamFighter is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SpamFighter. If not, see <http://www.gnu.org/licenses/>.
#
"""
Диспетчеризация команд по имени.
Т.е. отображаем имя команды (атрибут comandName из интерфейса L{spamfighter.core.commands.ICommand}) в
класс команды.
"""
from spamfighter.utils.registrator import registrator
from spamfighter.core.commands import errors
dispatch_map = {}
"""
Карта отображения команд, имеет вид: имя_команды -> класс_команды.
"""
@registrator
def install(command_class):
"""Вариант функции L{installCommand} которую можно использовать в определении класса
Пример использования::
from spamfighter.core.commands import install, Command
class MyCommand(Command):
install()
"""
installCommand(command_class)
def installCommand(command_class):
"""Установить новую команду в карту диспетчеризации.
@param command_class: класс, производный от L{Command}
"""
name = command_class.commandName
assert name not in dispatch_map
dispatch_map[name] = command_class
def deinstallCommand(command_class):
"""
Убрать команду из карты диспетчеризации.
@param command_class: класс, производный от L{Command}
"""
name = command_class.commandName
assert name in dispatch_map
del dispatch_map[name]
def dispatchCommand(commandName):
"""
Найти класс команды, соответствующий данной команде по имени
@param commandName: имя команды
@type commandName: C{str}
@raise errors.CommandUnknownException: если такой команды не существует
@rtype: производная от L{Command}
"""
if commandName not in dispatch_map:
raise errors.CommandUnknownException, commandName
return dispatch_map[commandName]()
def listAllCommands():
"""
Вернуть список всех команд.
"""
return dispatch_map.keys()
| smira/spamfighter | spamfighter/core/commands/dispatcher.py | Python | gpl-3.0 | 2,984 |
from ..schema import default_radio_driver
from .base import OpenWrtConverter
class Radios(OpenWrtConverter):
netjson_key = 'radios'
intermediate_key = 'wireless'
_uci_types = ['wifi-device']
def to_intermediate_loop(self, block, result, index=None):
radio = self.__intermediate_radio(block)
result.setdefault('wireless', [])
result['wireless'].append(radio)
return result
def __intermediate_radio(self, radio):
radio.update({'.type': 'wifi-device', '.name': radio.pop('name')})
# rename tx_power to txpower
if 'tx_power' in radio:
radio['txpower'] = radio.pop('tx_power')
# rename driver to type
radio['type'] = radio.pop('driver', default_radio_driver)
# determine hwmode option
radio['hwmode'] = self.__intermediate_hwmode(radio)
# check if using channel 0, that means "auto"
if radio['channel'] == 0:
radio['channel'] = 'auto'
# determine channel width
if radio['type'] == 'mac80211':
radio['htmode'] = self.__intermediate_htmode(radio)
else:
del radio['protocol']
# ensure country is uppercase
if 'country' in radio:
radio['country'] = radio['country'].upper()
return self.sorted_dict(radio)
def __intermediate_hwmode(self, radio):
"""
possible return values are: 11a, 11b, 11g
"""
protocol = radio['protocol']
if protocol in ['802.11a', '802.11b', '802.11g']:
# return 11a, 11b or 11g
return protocol[4:]
if protocol == '802.11ac':
return '11a'
# determine hwmode depending on channel used
if radio['channel'] == 0:
# when using automatic channel selection, we need an
# additional parameter to determine the frequency band
return radio.get('hwmode')
elif radio['channel'] <= 13:
return '11g'
else:
return '11a'
def __intermediate_htmode(self, radio):
"""
only for mac80211 driver
"""
protocol = radio.pop('protocol')
channel_width = radio.pop('channel_width')
# allow overriding htmode
if 'htmode' in radio:
return radio['htmode']
if protocol == '802.11n':
return 'HT{0}'.format(channel_width)
elif protocol == '802.11ac':
return 'VHT{0}'.format(channel_width)
elif protocol == '802.11ax':
return 'HE{0}'.format(channel_width)
# disables n
return 'NONE'
def to_netjson_loop(self, block, result, index):
radio = self.__netjson_radio(block)
result.setdefault('radios', [])
result['radios'].append(radio)
return result
def __netjson_radio(self, radio):
del radio['.type']
radio['name'] = radio.pop('.name')
if 'txpower' in radio:
radio['tx_power'] = int(radio.pop('txpower'))
radio['driver'] = radio.pop('type')
if 'disabled' in radio:
radio['disabled'] = radio['disabled'] == '1'
radio['protocol'] = self.__netjson_protocol(radio)
radio['channel'] = self.__netjson_channel(radio)
radio['channel_width'] = self.__netjson_channel_width(radio)
return radio
def __netjson_protocol(self, radio):
"""
determines NetJSON protocol radio attribute
"""
htmode = radio.get('htmode')
hwmode = radio.get('hwmode', None)
if htmode.startswith('HT'):
return '802.11n'
elif htmode.startswith('VHT'):
return '802.11ac'
elif htmode.startswith('HE'):
return '802.11ax'
return '802.{0}'.format(hwmode)
def __netjson_channel(self, radio):
"""
determines NetJSON channel radio attribute
"""
if radio['channel'] == 'auto':
return 0
# delete hwmode because is needed
# only when channel is auto
del radio['hwmode']
return int(radio['channel'])
def __netjson_channel_width(self, radio):
"""
determines NetJSON channel_width radio attribute
"""
htmode = radio.pop('htmode')
if htmode == 'NONE':
return 20
channel_width = htmode.replace('VHT', '').replace('HT', '').replace('HE', '')
# we need to override htmode
if '+' in channel_width or '-' in channel_width:
radio['htmode'] = htmode
channel_width = channel_width[0:-1]
return int(channel_width)
| openwisp/netjsonconfig | netjsonconfig/backends/openwrt/converters/radios.py | Python | gpl-3.0 | 4,636 |
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from .urls import parse_bug_id, parse_attachment_id
class URLsTest(unittest.TestCase):
def test_parse_bug_id(self):
# FIXME: These would be all better as doctests
self.assertEquals(12345, parse_bug_id("http://webkit.org/b/12345"))
self.assertEquals(12345, parse_bug_id("foo\n\nhttp://webkit.org/b/12345\nbar\n\n"))
self.assertEquals(12345, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?id=12345"))
# Our url parser is super-fragile, but at least we're testing it.
self.assertEquals(None, parse_bug_id("http://www.webkit.org/b/12345"))
self.assertEquals(None, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?ctype=xml&id=12345"))
def test_parse_attachment_id(self):
self.assertEquals(12345, parse_attachment_id("https://bugs.webkit.org/attachment.cgi?id=12345&action=review"))
self.assertEquals(12345, parse_attachment_id("https://bugs.webkit.org/attachment.cgi?id=12345&action=edit"))
self.assertEquals(12345, parse_attachment_id("https://bugs.webkit.org/attachment.cgi?id=12345&action=prettypatch"))
self.assertEquals(12345, parse_attachment_id("https://bugs.webkit.org/attachment.cgi?id=12345&action=diff"))
# Direct attachment links are hosted from per-bug subdomains:
self.assertEquals(12345, parse_attachment_id("https://bug-23456-attachments.webkit.org/attachment.cgi?id=12345"))
# Make sure secure attachment URLs work too.
self.assertEquals(12345, parse_attachment_id("https://bug-23456-attachments.webkit.org/attachment.cgi?id=12345&t=Bqnsdkl9fs"))
| cs-au-dk/Artemis | WebKit/Tools/Scripts/webkitpy/common/config/urls_unittest.py | Python | gpl-3.0 | 3,141 |
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from form_utils.forms import BetterForm
import form_utils.fields as bf_fields
from tao.widgets import ChoiceFieldWithOtherAttrs, SelectWithOtherAttrs, TwoSidedSelectWidget, SpinnerWidget
from tao import datasets
from tao.models import DataSetProperty
from tao.xml_util import module_xpath, module_xpath_iterate
def from_xml_2(cls, ui_holder, xml_root, prefix=None):
query = module_xpath(xml_root, '//sql/query')
simulation_name = module_xpath(xml_root, '//sql/simulation')
galaxy_model_name = module_xpath(xml_root, '//sql/galaxy-model')
simulation = datasets.simulation_from_xml(simulation_name)
galaxy_model = datasets.galaxy_model_from_xml(galaxy_model_name)
data_set = datasets.dataset_find_from_xml(simulation, galaxy_model)
output_properties = [dsp for dsp in SQLJobForm._map_elems(xml_root)]
if data_set is not None: query = query.replace('-table-', data_set.database)
simulation_id = None
if simulation is not None: simulation_id = simulation.id
galaxy_model_id = None
if galaxy_model is not None: galaxy_model_id = galaxy_model.id
params = {
prefix+'-galaxy_model': galaxy_model_id,
prefix+'-dark_matter_simulation': simulation_id,
prefix+'-query': query,
prefix+'-output_properties': output_properties,
}
return cls(ui_holder, params, prefix=prefix)
class SQLJobForm(BetterForm):
SUMMARY_TEMPLATE = 'mock_galaxy_factory/sql_job_summary.html'
simple_fields = ['dark_matter_simulation', 'galaxy_model']
fieldsets = [
('primary', {
'legend': 'Data Selection',
'fields': simple_fields,
'query': '',
}),]
def __init__(self, *args, **kwargs):
self.ui_holder = args[0]
super(SQLJobForm, self).__init__(*args[1:], **kwargs)
is_int = False
#self.fields['query'].widget.attrs['data-bind'] = ''
self.fields['query'] = forms.CharField()
self.fields['dark_matter_simulation'] = ChoiceFieldWithOtherAttrs(choices=[])
self.fields['galaxy_model'] = ChoiceFieldWithOtherAttrs(choices=[])
self.fields['output_properties'] = bf_fields.forms.MultipleChoiceField(required=False, choices=[], widget=TwoSidedSelectWidget)
self.fields['query'].widget.attrs['data-bind'] = 'value: query'
self.fields['dark_matter_simulation'].widget.attrs['data-bind'] = 'options: dark_matter_simulations, value: dark_matter_simulation, optionsText: function(i) { return i.fields.name}, event: {change: function() { box_size(dark_matter_simulation().fields.box_size); }}'
self.fields['galaxy_model'].widget.attrs['data-bind'] = 'options: galaxy_models, value: galaxy_model, optionsText: function(i) { return i.fields.name }'
self.fields['output_properties'].widget.attrs['ko_data'] = {'widget':'output_properties_widget','value':'output_properties'}
def clean(self):
super(SQLJobForm, self).clean()
return self.cleaned_data
def to_json_dict(self):
"""Answer the json dictionary representation of the receiver.
i.e. something that can easily be passed to json.dumps()"""
json_dict = {}
for fn in self.fields.keys():
ffn = self.prefix + '-' + fn
val = self.data.get(ffn)
if val is not None:
json_dict[ffn] = val
return json_dict
def to_xml(self, parent_xml_element):
version = 2.0
to_xml_2(self, parent_xml_element)
@classmethod
def from_xml(cls, ui_holder, xml_root, prefix=None):
version = module_xpath(xml_root, '//workflow/schema-version')
if version == '2.0':
return from_xml_2(cls, ui_holder, xml_root, prefix=prefix)
else:
return cls(ui_holder, {}, prefix=prefix)
@classmethod
def _map_elems(cls, xml_root):
for elem in module_xpath_iterate(xml_root, '//votable/fields/item', text=False):
label = elem.get('label')
units = elem.get('units')
name = elem.text
yield {'label': label, 'units': units, 'name': name}
| IntersectAustralia/asvo-tao | web/tao/sql_job_form.py | Python | gpl-3.0 | 4,293 |
import szurubooru.model.util
from szurubooru.model.base import Base
from szurubooru.model.comment import Comment, CommentScore
from szurubooru.model.pool import Pool, PoolName, PoolPost
from szurubooru.model.pool_category import PoolCategory
from szurubooru.model.post import (
Post,
PostFavorite,
PostFeature,
PostNote,
PostRelation,
PostScore,
PostSignature,
PostTag,
)
from szurubooru.model.snapshot import Snapshot
from szurubooru.model.tag import Tag, TagImplication, TagName, TagSuggestion
from szurubooru.model.tag_category import TagCategory
from szurubooru.model.user import User, UserToken
| rr-/szurubooru | server/szurubooru/model/__init__.py | Python | gpl-3.0 | 632 |
import curses
import datetime
import itertools
import sys
import time
from tabulate import tabulate
from Brick.sockserver import SockClient
def build_info(client):
headers = ["SID", "CONF", "BOOT", "TERM", "TIME", "STATE", "CPU%", "MEM%", "TASK", "NQ", "QUEUE"]
table = []
res = []
for status in client.get_status():
sid, conf, st, ft, status, current_task, queue, cpu, memory = status
nq = len(queue)
if nq > 5:
queue = str(queue[:5])[:-1] + "...]"
st = datetime.datetime.fromtimestamp(st)
if ft:
ft = datetime.datetime.fromtimestamp(ft)
rt = ft - st
else:
rt = datetime.datetime.now() - st
rt -= datetime.timedelta(microseconds=rt.microseconds)
st = st.strftime("%Y-%m-%d %H:%M:%S")
ft = ft.strftime("%Y-%m-%d %H:%M:%S") if ft else "Running"
res.append([sid, conf, st, ft, rt, status, cpu, memory, current_task, nq, queue])
work = sorted([x for x in res if x[3] == "Running"], key=lambda x: x[0])
idle = sorted([y for y in res if y[3] != "Running"], key=lambda x: x[0])
for item in itertools.chain(work, idle):
table.append(item)
return tabulate(table, headers=headers, tablefmt="psql")
def list_status():
port = int(sys.argv[1])
client = SockClient(("localhost", port))
print build_info(client)
def brick_top():
port = int(sys.argv[1])
client = SockClient(("localhost", port))
def output(window):
curses.use_default_colors()
while True:
window.clear()
window.addstr(build_info(client))
window.refresh()
time.sleep(1)
curses.wrapper(output)
| Tefx/Brick | Brick/tools.py | Python | gpl-3.0 | 1,716 |
from .conftest import decode_response
YARA_TEST_RULE = 'rule rulename {strings: $a = "foobar" condition: $a}'
def test_no_data(test_app):
result = decode_response(test_app.post('/rest/binary_search'))
assert 'Input payload validation failed' in result['message']
assert 'errors' in result
assert 'is a required property' in result['errors']['rule_file']
def test_no_rule_file(test_app):
result = decode_response(test_app.post('/rest/binary_search', json=dict()))
assert 'Input payload validation failed' in result['message']
assert 'errors' in result
assert '\'rule_file\' is a required property' in result['errors']['rule_file']
def test_wrong_rule_file_format(test_app):
result = decode_response(test_app.post('/rest/binary_search', json={'rule_file': 'not an actual rule file'}))
assert 'Error in YARA rule file' in result['error_message']
def test_firmware_uid_not_found(test_app):
data = {'rule_file': YARA_TEST_RULE, 'uid': 'not found'}
result = decode_response(test_app.post('/rest/binary_search', json=data))
assert 'not found in database' in result['error_message']
def test_start_binary_search(test_app):
result = decode_response(test_app.post('/rest/binary_search', json={'rule_file': YARA_TEST_RULE}))
assert 'Started binary search' in result['message']
def test_start_binary_search_with_uid(test_app):
data = {'rule_file': YARA_TEST_RULE, 'uid': 'uid_in_db'}
result = decode_response(test_app.post('/rest/binary_search', json=data))
assert 'Started binary search' in result['message']
def test_get_result_without_search_id(test_app):
result = decode_response(test_app.get('/rest/binary_search'))
assert 'The method is not allowed for the requested URL' in result['message']
def test_get_result_non_existent_id(test_app):
result = decode_response(test_app.get('/rest/binary_search/foobar'))
assert 'result is not ready yet' in result['error_message']
| fkie-cad/FACT_core | src/test/unit/web_interface/rest/test_rest_binary_search.py | Python | gpl-3.0 | 1,968 |
# Author: medariox <[email protected]>,
# based on Antoine Bertin's <[email protected]> work
# and originally written by Nyaran <[email protected]>
# URL: https://github.com/SickRage/SickRage/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import datetime
import traceback
import subliminal
import subprocess
import pkg_resources
import sickbeard
from subliminal.api import provider_manager
from babelfish import Language, language_converters
from sickbeard import logger
from sickbeard import history
from sickbeard import db
from sickbeard import processTV
from sickrage.helper.common import media_extensions, dateTimeFormat
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import ex
from sickrage.show.Show import Show
DISTRIBUTION = pkg_resources.Distribution(location=os.path.dirname(os.path.dirname(__file__)),
project_name='fake_entry_points', version='1.0.0')
ENTRY_POINTS = {
'subliminal.providers': [
'addic7ed = subliminal.providers.addic7ed:Addic7edProvider',
'legendastv = subliminal.providers.legendastv:LegendasTvProvider',
'napiprojekt = subliminal.providers.napiprojekt:NapiProjektProvider',
'opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider',
'podnapisi = subliminal.providers.podnapisi:PodnapisiProvider',
'thesubdb = subliminal.providers.thesubdb:TheSubDBProvider',
'tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider'
],
'babelfish.language_converters': [
'addic7ed = subliminal.converters.addic7ed:Addic7edConverter',
'legendastv = subliminal.converters.legendastv:LegendasTvConverter',
'thesubdb = subliminal.converters.thesubdb:TheSubDBConverter',
'tvsubtitles = subliminal.converters.tvsubtitles:TVsubtitlesConverter'
]
}
# pylint: disable=protected-access
# Access to a protected member of a client class
DISTRIBUTION._ep_map = pkg_resources.EntryPoint.parse_map(ENTRY_POINTS, DISTRIBUTION)
pkg_resources.working_set.add(DISTRIBUTION)
provider_manager.ENTRY_POINT_CACHE.pop('subliminal.providers')
subliminal.region.configure('dogpile.cache.memory')
PROVIDER_URLS = {
'addic7ed': 'http://www.addic7ed.com',
'legendastv': 'http://www.legendas.tv',
'napiprojekt': 'http://www.napiprojekt.pl',
'opensubtitles': 'http://www.opensubtitles.org',
'podnapisi': 'http://www.podnapisi.net',
'thesubdb': 'http://www.thesubdb.com',
'tvsubtitles': 'http://www.tvsubtitles.net'
}
def sorted_service_list():
new_list = []
lmgtfy = 'http://lmgtfy.com/?q=%s'
current_index = 0
for current_service in sickbeard.SUBTITLES_SERVICES_LIST:
if current_service in subliminal.provider_manager.names():
new_list.append({'name': current_service,
'url': PROVIDER_URLS[current_service] if current_service in PROVIDER_URLS else
lmgtfy % current_service,
'image': current_service + '.png',
'enabled': sickbeard.SUBTITLES_SERVICES_ENABLED[current_index] == 1
})
current_index += 1
for current_service in subliminal.provider_manager.names():
if current_service not in [service['name'] for service in new_list]:
new_list.append({'name': current_service,
'url': PROVIDER_URLS[current_service] if current_service in PROVIDER_URLS else
lmgtfy % current_service,
'image': current_service + '.png',
'enabled': False,
})
return new_list
def enabled_service_list():
return [service['name'] for service in sorted_service_list() if service['enabled']]
def wanted_languages(sql_like=None):
wanted = frozenset(sickbeard.SUBTITLES_LANGUAGES).intersection(subtitle_code_filter())
return (wanted, '%' + ','.join(wanted) + '%')[bool(sql_like)]
def get_needed_languages(subtitles):
return {from_code(language) for language in wanted_languages().difference(subtitles)}
def subtitle_code_filter():
return {code for code in language_converters['opensubtitles'].codes if len(code) == 3}
def needs_subtitles(subtitles):
if isinstance(subtitles, basestring):
subtitles = {subtitle.strip() for subtitle in subtitles.split(',')}
if sickbeard.SUBTITLES_MULTI:
return len(wanted_languages().difference(subtitles)) > 0
else:
return len(subtitles) == 0
# Hack around this for now.
def from_code(language):
language = language.strip()
if language not in language_converters['opensubtitles'].codes:
return Language('und')
return Language.fromopensubtitles(language) # pylint: disable=no-member
def name_from_code(code):
return from_code(code).name
def code_from_code(code):
return from_code(code).opensubtitles
def download_subtitles(subtitles_info):
existing_subtitles = subtitles_info['subtitles']
if not needs_subtitles(existing_subtitles):
logger.log(u'Episode already has all needed subtitles, skipping episode %dx%d of show %s'
% (subtitles_info['season'], subtitles_info['episode'], subtitles_info['show_name']), logger.DEBUG)
return (existing_subtitles, None)
# Check if we really need subtitles
languages = get_needed_languages(existing_subtitles)
if not languages:
logger.log(u'No subtitles needed for %s S%02dE%02d'
% (subtitles_info['show_name'], subtitles_info['season'],
subtitles_info['episode']), logger.DEBUG)
return (existing_subtitles, None)
subtitles_path = get_subtitles_path(subtitles_info['location']).encode(sickbeard.SYS_ENCODING)
video_path = subtitles_info['location'].encode(sickbeard.SYS_ENCODING)
video = get_video(video_path, subtitles_path=subtitles_path)
if not video:
logger.log(u'Exception caught in subliminal.scan_video for %s S%02dE%02d'
% (subtitles_info['show_name'], subtitles_info['season'],
subtitles_info['episode']), logger.DEBUG)
return (existing_subtitles, None)
providers = enabled_service_list()
provider_configs = {'addic7ed': {'username': sickbeard.ADDIC7ED_USER,
'password': sickbeard.ADDIC7ED_PASS},
'legendastv': {'username': sickbeard.LEGENDASTV_USER,
'password': sickbeard.LEGENDASTV_PASS},
'opensubtitles': {'username': sickbeard.OPENSUBTITLES_USER,
'password': sickbeard.OPENSUBTITLES_PASS}}
pool = subliminal.api.ProviderPool(providers=providers, provider_configs=provider_configs)
try:
subtitles_list = pool.list_subtitles(video, languages)
if not subtitles_list:
logger.log(u'No subtitles found for %s S%02dE%02d on any provider'
% (subtitles_info['show_name'], subtitles_info['season'],
subtitles_info['episode']), logger.DEBUG)
return (existing_subtitles, None)
for sub in subtitles_list:
matches = sub.get_matches(video, hearing_impaired=False)
score = subliminal.subtitle.compute_score(matches, video)
logger.log(u"[%s] Subtitle score for %s is: %s (min=132)" % (sub.provider_name, sub.id, score), logger.DEBUG)
found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages, min_score=132,
hearing_impaired=sickbeard.SUBTITLES_HEARING_IMPAIRED,
only_one=not sickbeard.SUBTITLES_MULTI)
subliminal.save_subtitles(video, found_subtitles, directory=subtitles_path,
single=not sickbeard.SUBTITLES_MULTI)
except Exception:
logger.log(u"Error occurred when downloading subtitles for: %s" % video_path)
logger.log(traceback.format_exc(), logger.ERROR)
return (existing_subtitles, None)
for subtitle in found_subtitles:
subtitle_path = subliminal.subtitle.get_subtitle_path(video.name,
None if not sickbeard.SUBTITLES_MULTI else
subtitle.language)
if subtitles_path is not None:
subtitle_path = ek(os.path.join, subtitles_path, ek(os.path.split, subtitle_path)[1])
sickbeard.helpers.chmodAsParent(subtitle_path)
sickbeard.helpers.fixSetGroupID(subtitle_path)
if (not sickbeard.EMBEDDED_SUBTITLES_ALL and sickbeard.SUBTITLES_EXTRA_SCRIPTS and
video_path.rsplit(".", 1)[1] in media_extensions):
run_subs_extra_scripts(subtitles_info, found_subtitles, video, single=not sickbeard.SUBTITLES_MULTI)
current_subtitles = [subtitle.language.opensubtitles for subtitle in found_subtitles]
new_subtitles = frozenset(current_subtitles).difference(existing_subtitles)
current_subtitles += existing_subtitles
if sickbeard.SUBTITLES_HISTORY:
for subtitle in found_subtitles:
logger.log(u'history.logSubtitle %s, %s' %
(subtitle.provider_name, subtitle.language.opensubtitles), logger.DEBUG)
history.logSubtitle(subtitles_info['show_indexerid'], subtitles_info['season'],
subtitles_info['episode'], subtitles_info['status'], subtitle)
return (current_subtitles, new_subtitles)
def refresh_subtitles(episode_info, existing_subtitles):
video = get_video(episode_info['location'].encode(sickbeard.SYS_ENCODING))
if not video:
logger.log(u"Exception caught in subliminal.scan_video, subtitles couldn't be refreshed", logger.DEBUG)
return (existing_subtitles, None)
current_subtitles = get_subtitles(video)
if existing_subtitles == current_subtitles:
logger.log(u'No changed subtitles for %s S%02dE%02d'
% (episode_info['show_name'], episode_info['season'],
episode_info['episode']), logger.DEBUG)
return (existing_subtitles, None)
else:
return (current_subtitles, True)
def get_video(video_path, subtitles_path=None):
if not subtitles_path:
subtitles_path = get_subtitles_path(video_path).encode(sickbeard.SYS_ENCODING)
try:
if not sickbeard.EMBEDDED_SUBTITLES_ALL and video_path.endswith('.mkv'):
video = subliminal.scan_video(video_path, subtitles=True, embedded_subtitles=True,
subtitles_dir=subtitles_path)
else:
video = subliminal.scan_video(video_path, subtitles=True, embedded_subtitles=False,
subtitles_dir=subtitles_path)
except Exception:
return None
return video
def get_subtitles_path(video_path):
if ek(os.path.isabs, sickbeard.SUBTITLES_DIR):
new_subtitles_path = sickbeard.SUBTITLES_DIR
elif sickbeard.SUBTITLES_DIR:
new_subtitles_path = ek(os.path.join, ek(os.path.dirname, video_path), sickbeard.SUBTITLES_DIR)
dir_exists = sickbeard.helpers.makeDir(new_subtitles_path)
if not dir_exists:
logger.log(u'Unable to create subtitles folder ' + new_subtitles_path, logger.ERROR)
else:
sickbeard.helpers.chmodAsParent(new_subtitles_path)
else:
new_subtitles_path = ek(os.path.join, ek(os.path.dirname, video_path))
return new_subtitles_path
def get_subtitles(video):
"""Return a sorted list of detected subtitles for the given video file"""
result_list = []
if not video.subtitle_languages:
return result_list
for language in video.subtitle_languages:
if hasattr(language, 'opensubtitles') and language.opensubtitles:
result_list.append(language.opensubtitles)
return sorted(result_list)
class SubtitlesFinder(object):
"""
The SubtitlesFinder will be executed every hour but will not necessarly search
and download subtitles. Only if the defined rule is true
"""
def __init__(self):
self.amActive = False
@staticmethod
def subtitles_download_in_pp(): # pylint: disable=too-many-locals
logger.log(u'Checking for needed subtitles in Post-Process folder', logger.INFO)
providers = enabled_service_list()
provider_configs = {'addic7ed': {'username': sickbeard.ADDIC7ED_USER,
'password': sickbeard.ADDIC7ED_PASS},
'legendastv': {'username': sickbeard.LEGENDASTV_USER,
'password': sickbeard.LEGENDASTV_PASS},
'opensubtitles': {'username': sickbeard.OPENSUBTITLES_USER,
'password': sickbeard.OPENSUBTITLES_PASS}}
pool = subliminal.api.ProviderPool(providers=providers, provider_configs=provider_configs)
# Search for all wanted languages
languages = {from_code(language) for language in wanted_languages()}
if not languages:
return
run_post_process = False
# Check if PP folder is set
if sickbeard.TV_DOWNLOAD_DIR and ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
for root, _, files in ek(os.walk, sickbeard.TV_DOWNLOAD_DIR, topdown=False):
for video_filename in sorted(files):
if video_filename.rsplit(".", 1)[1] in media_extensions:
try:
video = subliminal.scan_video(os.path.join(root, video_filename),
subtitles=False, embedded_subtitles=False)
subtitles_list = pool.list_subtitles(video, languages)
if not subtitles_list:
logger.log(u'No subtitles found for %s'
% ek(os.path.join, root, video_filename), logger.DEBUG)
continue
hearing_impaired = sickbeard.SUBTITLES_HEARING_IMPAIRED
found_subtitles = pool.download_best_subtitles(subtitles_list, video, languages=languages,
hearing_impaired=hearing_impaired, min_score=132,
only_one=not sickbeard.SUBTITLES_MULTI)
for sub in subtitles_list:
matches = sub.get_matches(video, hearing_impaired=False)
score = subliminal.subtitle.compute_score(matches, video)
logger.log(u"[%s] Subtitle score for %s is: %s (min=132)" % (sub.provider_name, sub.id, score), logger.DEBUG)
downloaded_languages = set()
for subtitle in found_subtitles:
logger.log(u"Found subtitle for %s in %s provider with language %s"
% (os.path.join(root, video_filename), subtitle.provider_name,
subtitle.language.opensubtitles), logger.DEBUG)
subliminal.save_subtitles(video, found_subtitles, directory=root,
single=not sickbeard.SUBTITLES_MULTI)
subtitles_multi = not sickbeard.SUBTITLES_MULTI
subtitle_path = subliminal.subtitle.get_subtitle_path(video.name,
None if subtitles_multi else
subtitle.language)
if root is not None:
subtitle_path = ek(os.path.join, root, ek(os.path.split, subtitle_path)[1])
sickbeard.helpers.chmodAsParent(subtitle_path)
sickbeard.helpers.fixSetGroupID(subtitle_path)
downloaded_languages.add(subtitle.language.opensubtitles)
# Don't run post processor unless at least one file has all of the needed subtitles
if not needs_subtitles(downloaded_languages):
run_post_process = True
except Exception as error:
logger.log(u"Error occurred when downloading subtitles for: %s. Error: %r"
% (os.path.join(root, video_filename), ex(error)))
if run_post_process:
logger.log(u"Starting post-process with default settings now that we found subtitles")
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
def run(self, force=False): # pylint: disable=unused-argument
if not sickbeard.USE_SUBTITLES:
return
if len(sickbeard.subtitles.enabled_service_list()) < 1:
logger.log(u'Not enough services selected. At least 1 service is required to '
'search subtitles in the background', logger.WARNING)
return
self.amActive = True
if sickbeard.SUBTITLES_DOWNLOAD_IN_PP:
self.subtitles_download_in_pp()
logger.log(u'Checking for subtitles', logger.INFO)
# get episodes on which we want subtitles
# criteria is:
# - show subtitles = 1
# - episode subtitles != config wanted languages or 'und' (depends on config multi)
# - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
# - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d
today = datetime.date.today().toordinal()
database = db.DBConnection()
sql_results = database.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, ' +
'e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, '
'(? - e.airdate) AS airdate_daydiff ' +
'FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) ' +
'WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) ' +
'AND (e.subtitles_searchcount <= 2 OR (e.subtitles_searchcount <= 7 AND airdate_daydiff <= 7)) ' +
'AND e.location != ""', [today, wanted_languages(True)])
if len(sql_results) == 0:
logger.log(u'No subtitles to download', logger.INFO)
self.amActive = False
return
rules = self._get_rules()
now = datetime.datetime.now()
for ep_to_sub in sql_results:
if not ek(os.path.isfile, ep_to_sub['location']):
logger.log(u'Episode file does not exist, cannot download subtitles for episode %dx%d of show %s'
% (ep_to_sub['season'], ep_to_sub['episode'], ep_to_sub['show_name']), logger.DEBUG)
continue
if not needs_subtitles(ep_to_sub['subtitles']):
logger.log(u'Episode already has all needed subtitles, skipping episode %dx%d of show %s'
% (ep_to_sub['season'], ep_to_sub['episode'], ep_to_sub['show_name']), logger.DEBUG)
continue
# http://bugs.python.org/issue7980#msg221094
# I dont think this needs done here, but keeping to be safe (Recent shows rule)
datetime.datetime.strptime('20110101', '%Y%m%d')
if ((ep_to_sub['airdate_daydiff'] > 7 and ep_to_sub['searchcount'] < 2 and
now - datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat) >
datetime.timedelta(hours=rules['old'][ep_to_sub['searchcount']])) or
(ep_to_sub['airdate_daydiff'] <= 7 and ep_to_sub['searchcount'] < 7 and
now - datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat) >
datetime.timedelta(hours=rules['new'][ep_to_sub['searchcount']]))):
logger.log(u'Downloading subtitles for episode %dx%d of show %s'
% (ep_to_sub['season'], ep_to_sub['episode'], ep_to_sub['show_name']), logger.DEBUG)
show_object = Show.find(sickbeard.showList, int(ep_to_sub['showid']))
if not show_object:
logger.log(u'Show not found', logger.DEBUG)
self.amActive = False
return
episode_object = show_object.getEpisode(int(ep_to_sub["season"]), int(ep_to_sub["episode"]))
if isinstance(episode_object, str):
logger.log(u'Episode not found', logger.DEBUG)
self.amActive = False
return
existing_subtitles = episode_object.subtitles
try:
episode_object.download_subtitles()
except Exception as error:
logger.log(u'Unable to find subtitles', logger.DEBUG)
logger.log(str(error), logger.DEBUG)
self.amActive = False
return
new_subtitles = frozenset(episode_object.subtitles).difference(existing_subtitles)
if new_subtitles:
logger.log(u'Downloaded subtitles for S%02dE%02d in %s'
% (ep_to_sub["season"], ep_to_sub["episode"], ', '.join(new_subtitles)))
self.amActive = False
@staticmethod
def _get_rules():
"""
Define the hours to wait between 2 subtitles search depending on:
- the episode: new or old
- the number of searches done so far (searchcount), represented by the index of the list
"""
return {'old': [0, 24], 'new': [0, 4, 8, 4, 16, 24, 24]}
def run_subs_extra_scripts(episode_object, found_subtitles, video, single=False):
for script_name in sickbeard.SUBTITLES_EXTRA_SCRIPTS:
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name) if piece.strip()]
script_cmd[0] = ek(os.path.abspath, script_cmd[0])
logger.log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
for subtitle in found_subtitles:
subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, None if single else subtitle.language)
inner_cmd = script_cmd + [video.name, subtitle_path, subtitle.language.opensubtitles,
episode_object['show_name'], str(episode_object['season']),
str(episode_object['episode']), episode_object['name'],
str(episode_object['show_indexerid'])]
# use subprocess to run the command and capture output
logger.log(u"Executing command: %s" % inner_cmd)
try:
process = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
out, _ = process.communicate() # @UnusedVariable
logger.log(u"Script result: %s" % out, logger.DEBUG)
except Exception as error:
logger.log(u"Unable to run subs_extra_script: " + ex(error))
| hernandito/SickRage | sickbeard/subtitles.py | Python | gpl-3.0 | 24,581 |
import numpy as np
import matplotlib.pyplot as plt
from asteroseismology_model import *
keep = np.loadtxt('keep.txt')
# Trace plot of the number of peaks
plt.plot(keep[:,0].astype('int'))
plt.xlabel('Iteration', fontsize=18)
plt.ylabel('Number of Peaks', fontsize=18)
plt.show()
# Histogram of the number of peaks
plt.hist(keep[:,0].astype('int'), 100)
plt.xlabel('Number of peaks', fontsize=18)
plt.ylabel('Number of Posterior Samples', fontsize=18)
plt.show()
def signal(params):
"""
Calculate the expected curve from the parameters
(mostly copied from log_likelihood)
"""
# Rename the parameters
num_peaks = int(params[0])
B = np.exp(params[1])
# Calculate the expected/model signal
mu = B + np.zeros(N)
# Add the peaks
k = 2
for i in range(0, num_peaks):
# Get the parameters
A = -20.*np.log(1. - params[k])
xc = x_min + x_range*params[k+1]
width = np.exp(np.log(1E-2*x_range) + np.log(1E2)*params[k+2])
# Add the Lorentzian peak
mu += A/(1. + ((data[:,0] - xc)/width)**2)
k += 3
# Exponential distribution
return mu
# Plot a movie of the fits
# Only use the second half of the run.
# Also, accumulate all x-values (frequencies) and amplitudes
# in these arrays:
all_x = np.array([])
all_A = np.array([])
plt.ion()
for i in range(keep.shape[0]//2, keep.shape[0]):
# Plotting
plt.hold(False)
plt.plot(data[:,0], data[:,1], 'b.')
mu = signal(keep[i, :])
plt.hold(True)
plt.plot(data[:,0], mu, 'r-', linewidth=2)
plt.title('Model {i}/{n}'.format(i=(i+1), n=keep.shape[0]))
plt.xlabel('Frequency')
plt.ylabel('Power')
plt.draw()
# Accumulate
num_peaks = keep[i, 0].astype('int')
A = -10*np.log(1. - keep[i, 2::3][0:num_peaks])
x = 10.*keep[i, 3::3][0:num_peaks]
all_x = np.hstack([all_x, x])
all_A = np.hstack([all_A, A])
plt.ioff()
plt.show()
plt.hist(all_x, 200)
plt.xlabel('Frequency')
plt.ylabel('Number of Posterior Samples')
plt.show()
plt.plot(all_x, all_A, 'b.', markersize=1)
plt.xlabel('$x$', fontsize=18)
plt.ylabel('$A$', fontsize=18)
plt.show()
| eggplantbren/WinterSchool | Code/asteroseismology_results.py | Python | gpl-3.0 | 2,064 |
#!/usr/bin/env python
from Media import Media
class Movie(Media):
"""Inherits Media.
Attributes:
section_type: The type of library this is (i.e. "TV Shows")
title: The title of the media item
natural_start_time: The scheduled start time before any shifting happens.
natural_end_time: The end time of the scheduled content.
duration: The duration of the media item.
day_of_week: When the content is scheduled to play
is_strict_time: If strict time, then anchor to "natural_start_time"
"""
def __init__(
self,
section_type,
title,
natural_start_time,
natural_end_time,
duration,
day_of_week,
is_strict_time,
time_shift,
overlap_max,
plex_media_id,
custom_section_name
):
super(Movie, self).__init__(
section_type,
title,
natural_start_time,
natural_end_time,
duration,
day_of_week,
is_strict_time,
time_shift,
overlap_max,
plex_media_id,
custom_section_name
)
| justinemter/pseudo-channel | src/Movie.py | Python | gpl-3.0 | 1,306 |
#---LICENSE----------------------
'''
Copyright 2014 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
#---METADATA---------------------
'''
Returb Boardings
Authors: pkucirek
Latest revision by: pkucirek
Returns a 'serialized' (e.g. string repr) of transit line boardings to XTMF.
See XTMF for documentation.
'''
#---VERSION HISTORY
'''
0.0.1 Created on 2014-02-05 by pkucirek
0.1.0 Upgraded to work with get_attribute_values (partial read)
'''
import inro.modeller as _m
import traceback as _traceback
from contextlib import contextmanager
from contextlib import nested
from json import loads
_MODELLER = _m.Modeller() #Instantiate Modeller once.
_util = _MODELLER.module('tmg.common.utilities')
_tmgTPB = _MODELLER.module('tmg.common.TMG_tool_page_builder')
##########################################################################################################
class ReturnBoardings(_m.Tool()):
version = '0.1.0'
tool_run_msg = ""
number_of_tasks = 1 # For progress reporting, enter the integer number of tasks here
# Tool Input Parameters
# Only those parameters necessary for Modeller and/or XTMF to dock with
# need to be placed here. Internal parameters (such as lists and dicts)
# get initialized during construction (__init__)
xtmf_ScenarioNumber = _m.Attribute(int) # parameter used by XTMF only
xtmf_LineAggregationFile = _m.Attribute(str)
xtmf_CheckAggregationFlag = _m.Attribute(bool)
def __init__(self):
#---Init internal variables
self.TRACKER = _util.ProgressTracker(self.number_of_tasks) #init the ProgressTracker
def page(self):
pb = _m.ToolPageBuilder(self, title="Return Boardings",
description="Cannot be called from Modeller.",
runnable=False,
branding_text="XTMF")
return pb.render()
##########################################################################################################
def __call__(self, xtmf_ScenarioNumber, xtmf_LineAggregationFile, xtmf_CheckAggregationFlag):
_m.logbook_write("Extracting boarding results")
#---1 Set up scenario
scenario = _m.Modeller().emmebank.scenario(xtmf_ScenarioNumber)
if (scenario is None):
raise Exception("Scenario %s was not found!" %xtmf_ScenarioNumber)
if not scenario.has_transit_results:
raise Exception("Scenario %s does not have transit assignment results" %xtmf_ScenarioNumber)
self.xtmf_LineAggregationFile = xtmf_LineAggregationFile
self.xtmf_CheckAggregationFlag = xtmf_CheckAggregationFlag
try:
return self._Execute(scenario)
except Exception as e:
msg = str(e) + "\n" + _traceback.format_exc()
raise Exception(msg)
##########################################################################################################
def _Execute(self, scenario):
lineAggregation = self._LoadLineAggregationFile()
lineBoardings = self._GetLineResults(scenario)
netSet = set([key for key in lineBoardings.iterkeys()])
if self.xtmf_CheckAggregationFlag:
self._CheckAggregationFile(netSet, lineAggregation)
self.TRACKER.completeTask()
results = {}
self.TRACKER.startProcess(len(lineBoardings))
for lineId, lineCount in lineBoardings.iteritems():
if not lineId in lineAggregation:
self.TRACKER.completeSubtask()
continue #Skip unmapped lines
lineGroupId = lineAggregation[lineId]
if lineGroupId in results:
results[lineGroupId] += lineCount
else:
results[lineGroupId] = lineCount
self.TRACKER.completeSubtask()
print "Extracted results from Emme"
return str(results)
def _LoadLineAggregationFile(self):
mapping = {}
with open(self.xtmf_LineAggregationFile) as reader:
reader.readline()
for line in reader:
cells = line.strip().split(',')
key = cells[0].strip()
val = cells[1].strip()
mapping[key] = val
return mapping
def _GetLineResults(self, scenario):
results = _util.fastLoadSummedSegmentAttributes(scenario, ['transit_boardings'])
retVal = {}
for lineId, attributes in results.iteritems():
id = str(lineId)
retVal[id] = attributes['transit_boardings']
return retVal
def _CheckAggregationFile(self, netSet, lineAggregation):
aggSet = set([key for key in lineAggregation.iterkeys()])
linesInNetworkButNotMapped = [id for id in (netSet - aggSet)]
linesMappedButNotInNetwork = [id for id in (aggSet - netSet)]
if len(linesMappedButNotInNetwork) > 0:
msg = "%s lines have been found in the network without a line grouping: " %len(linesInNetworkButNotMapped)
msg += ",".join(linesInNetworkButNotMapped[:10])
if len(linesInNetworkButNotMapped) > 10:
msg += "...(%s more)" %(len(linesInNetworkButNotMapped) - 10)
print msg
if len(linesMappedButNotInNetwork) > 0:
msg = "%s lines have been found in the aggregation file but do not exist in the network: " %len(linesMappedButNotInNetwork)
msg += ",".join(linesMappedButNotInNetwork[:10])
if len(linesMappedButNotInNetwork) > 10:
msg += "...(%s more)" %(len(linesMappedButNotInNetwork) - 10)
print msg
##########################################################################################################
@_m.method(return_type=_m.TupleType)
def percent_completed(self):
return self.TRACKER.getProgress()
@_m.method(return_type=unicode)
def tool_run_msg_status(self):
return self.tool_run_msg
| TravelModellingGroup/TMGToolbox | TMGToolbox/src/XTMF_internal/return_boardings.py | Python | gpl-3.0 | 6,999 |
class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
result = []
for i in nums1:
if i in nums2 and i not in result:
result.append(i)
return result
| CharlotteLock/LeetCode | 349. Intersection of Two Arrays.py | Python | gpl-3.0 | 324 |
#!/usr/bin/python
# BSD Licensed, Copyright (c) 2006-2008 MetaCarta, Inc.
import sys, os, traceback
import cgi as cgimod
from web_request.response import Response
import urllib
import StringIO
class ApplicationException(Exception):
"""Any application exception should be subclassed from here. """
status_code = 500
status_message = "Error"
def get_error(self):
"""Returns an HTTP Header line: a la '500 Error'"""
return "%s %s" % (self.status_code, self.status_message)
def binary_print(binary_data):
"""This function is designed to work around the fact that Python
in Windows does not handle binary output correctly. This function
will set the output to binary, and then write to stdout directly
rather than using print."""
try:
import msvcrt
msvcrt.setmode(sys.__stdout__.fileno(), os.O_BINARY)
except:
# No need to do anything if we can't import msvcrt.
pass
sys.stdout.write(binary_data)
def mod_python (dispatch_function, apache_request):
"""mod_python handler."""
from mod_python import apache, util
try:
if apache_request.headers_in.has_key("X-Forwarded-Host"):
base_path = "http://" + apache_request.headers_in["X-Forwarded-Host"]
else:
base_path = "http://" + apache_request.headers_in["Host"]
base_path += apache_request.uri[:-len(apache_request.path_info)]
accepts = ""
if apache_request.headers_in.has_key("Accept"):
accepts = apache_request.headers_in["Accept"]
elif apache_request.headers_in.has_key("Content-Type"):
accepts = apache_request.headers_in["Content-Type"]
post_data = apache_request.read()
request_method = apache_request.method
params = {}
if request_method != "POST":
fields = util.FieldStorage(apache_request)
for key in fields.keys():
params[key.lower()] = fields[key]
#if post_data:
# for key, value in cgimod.parse_qsl(post_data, keep_blank_values=True):
# params[key.lower()] = value
returned_data = dispatch_function(
base_path = base_path,
path_info = apache_request.path_info,
params = params,
request_method = request_method,
post_data = post_data,
accepts = accepts )
if isinstance(returned_data, list) or isinstance(returned_data, tuple):
format, data = returned_data[0:2]
if len(returned_data) == 3:
for key, value in returned_data[2].items():
apache_request.headers_out[key] = value
apache_request.content_type = format
apache_request.send_http_header()
apache_request.write(data)
else:
obj = returned_data
if obj.extra_headers:
for key, value in obj.extra_headers.items():
apache_request.headers_out[key] = value
(status, message) = obj.status_code.split(" ")
apache_request.status = int(status)
apache_request.content_type = obj.content_type
apache_request.send_http_header()
apache_request.write(obj.getData())
except ApplicationException, error:
apache_request.content_type = "text/html"
apache_request.status = error.status_code
apache_request.send_http_header()
apache_request.write("<h4>An error occurred</h4><p>%s<p>" % (str(error)))
except Exception, error:
apache_request.content_type = "text/html"
apache_request.status = apache.HTTP_INTERNAL_SERVER_ERROR
apache_request.send_http_header()
apache_request.write("<h4>An error occurred</h4><p>%s\n</p><p>Trace back: <pre>%s</pre></p>\n" % (
str(error),
"".join(traceback.format_tb(sys.exc_traceback))))
return apache.OK
def wsgi (dispatch_function, environ, start_response):
"""handler for wsgiref simple_server"""
try:
path_info = base_path = ""
if "PATH_INFO" in environ:
path_info = environ["PATH_INFO"]
if "HTTP_X_FORWARDED_HOST" in environ:
base_path = "http://" + environ["HTTP_X_FORWARDED_HOST"]
elif "HTTP_HOST" in environ:
base_path = "http://" + environ["HTTP_HOST"]
base_path += environ["SCRIPT_NAME"]
accepts = None
if environ.has_key("CONTENT_TYPE"):
accepts = environ['CONTENT_TYPE']
else:
accepts = environ.get('HTTP_ACCEPT', '')
request_method = environ["REQUEST_METHOD"]
params = {}
post_data = None
if environ.has_key('CONTENT_LENGTH') and environ['CONTENT_LENGTH']:
post_data = environ['wsgi.input'].read(int(environ['CONTENT_LENGTH']))
#if post_data:
# for key, value in cgimod.parse_qsl(post_data, keep_blank_values=True):
# params[key.lower()] = value
if environ.has_key('QUERY_STRING'):
for key, value in cgimod.parse_qsl(environ['QUERY_STRING'], keep_blank_values=True):
params[key.lower()] = value
returned_data = dispatch_function(
base_path = base_path,
path_info = path_info,
params = params,
request_method = request_method,
post_data = post_data,
accepts = accepts )
if isinstance(returned_data, list) or isinstance(returned_data, tuple):
format, data = returned_data[0:2]
headers = {'Content-Type': format}
if len(returned_data) == 3:
headers.update(returned_data[2])
start_response("200 OK", headers.items())
return [str(data)]
else:
# This is a a web_request.Response.Response object
headers = {'Content-Type': returned_data.content_type}
if returned_data.extra_headers:
headers.update(returned_data.extra_headers)
start_response("%s Message" % returned_data.status_code,
headers.items())
return [returned_data.getData()]
except ApplicationException, error:
start_response(error.get_error(), [('Content-Type','text/plain')])
return ["An error occurred: %s" % (str(error))]
except Exception, error:
start_response("500 Internal Server Error", [('Content-Type','text/plain')])
return ["An error occurred: %s\n%s\n" % (
str(error),
"".join(traceback.format_tb(sys.exc_traceback)))]
def cgi (dispatch_function):
"""cgi handler"""
try:
accepts = ""
if "CONTENT_TYPE" in os.environ:
accepts = os.environ['CONTENT_TYPE']
elif "HTTP_ACCEPT" in os.environ:
accepts = os.environ['HTTP_ACCEPT']
request_method = os.environ["REQUEST_METHOD"]
post_data = None
params = {}
if request_method != "GET" and request_method != "DELETE":
post_data = sys.stdin.read()
#if post_data:
# for key, value in cgimod.parse_qsl(post_data, keep_blank_values=True):
# params[key.lower()] = value
fields = cgimod.FieldStorage()
if fields <> None:
for key, value in cgimod.parse_qsl(fields.qs_on_post, keep_blank_values=True):
params[key.lower()] = value
else:
fields = cgimod.FieldStorage()
try:
for key in fields.keys():
params[key.lower()] = urllib.unquote(fields[key].value)
except TypeError:
pass
path_info = base_path = ""
if "PATH_INFO" in os.environ:
path_info = os.environ["PATH_INFO"]
if "HTTP_X_FORWARDED_HOST" in os.environ:
base_path = "http://" + os.environ["HTTP_X_FORWARDED_HOST"]
elif "HTTP_HOST" in os.environ:
base_path = "http://" + os.environ["HTTP_HOST"]
base_path += os.environ["SCRIPT_NAME"]
returned_data = dispatch_function(
base_path = base_path,
path_info = path_info,
params = params,
request_method = request_method,
post_data = post_data,
accepts = accepts )
if isinstance(returned_data, list) or isinstance(returned_data, tuple):
format, data = returned_data[0:2]
if len(returned_data) == 3:
for (key, value) in returned_data[2].items():
print "%s: %s" % (key, value)
print "Content-type: %s\n" % format
if sys.platform == "win32":
binary_print(data)
else:
print data
else:
# Returned object is a 'response'
obj = returned_data
if obj.extra_headers:
for (key, value) in obj.extra_headers.items():
print "%s: %s" % (key, value)
print "Content-type: %s\n" % obj.content_type
if sys.platform == "win32":
binary_print(obj.getData())
else:
print obj.getData()
except ApplicationException, error:
print "Cache-Control: max-age=10, must-revalidate" # make the client reload
print "Content-type: text/plain\n"
print "An error occurred: %s\n" % (str(error))
except Exception, error:
print "Cache-Control: max-age=10, must-revalidate" # make the client reload
print "Content-type: text/plain\n"
print "An error occurred: %s\n%s\n" % (
str(error),
"".join(traceback.format_tb(sys.exc_traceback)))
print params
| guolivar/totus-niwa | service/thirdparty/featureserver/web_request/handlers.py | Python | gpl-3.0 | 10,134 |
"""
Mopaq archive format found in Blizzard titles Diablo 1.0 and later
Implemented according to info from http://www.zezula.net
"""
import struct, hashlib, sys
from collections import namedtuple
USERDATA_MAGIC = b'MPQ\x1A'
FILEHEADER_MAGIC = b'MPQ\x1B'
HET_MAGIC = b'HET\x1A'
BET_MAGIC = b'BET\x1A'
BITMAP_MAGIC = b'ptv3'
PATCH_MAGIC = b'BSDIFF40' # offset 0x0000
MD5_MAGIC = b'MD5_'
XFRM_MAGIC = b'XFRM'
V1LEN = 0x20
V2LEN = 0x2C
V3LEN = 0x44 # or greater
V4LEN = 0xD0
L_N = 0 # neutral/American English
L_CNTW = 0x404 # Chinese (Taiwan)
L_CZ = 0x405 # Czech
L_DE = 0x407 # German
L_EN = 0x409 # English
L_ES = 0x40A # Spanish
L_FR = 0x40C # French
L_IT = 0x410 # Italian
L_JP = 0x411 # Japanese
L_KR = 0x412 # Korean
L_PL = 0x415 # Polish
L_PT = 0x416 # Portuguese
L_RU = 0x419 # Russian
L_ENUK = 0x809 # UK English
BF_IMPL = 0x00000100 # File is compressed using PKWARE Data compression library
BF_COMP = 0x00000200 # File is compressed using combination of compression methods
BF_ENCR = 0x00010000 # File is encrypted
BF_FKEY = 0x00020000 # The decryption key for the file is altered according to the position of the file in the archive
BF_PTCH = 0x00100000 # The file contains incremental patch for an existing file in base MPQ
BF_SNGL = 0x01000000 # Instead of being divided to 0x1000-bytes blocks, the file is stored as single unit
BF_DMRK = 0x02000000 # File is a deletion marker, indicating that the file no longer exists. This is used to allow patch archives to delete files present in lower-priority archives in the search chain. The file usually has length of 0 or 1 byte and its name is a hash
BF_SCRC = 0x04000000 # File has checksums for each sector (explained in the File Data section). Ignored if file is not compressed or imploded.
BF_EXST = 0x80000000 # Set if file exists, reset when the file was deleted
AF_READ = 0x00000001 # MPQ opened read only
AF_CHNG = 0x00000002 # tables were changed
AF_PROT = 0x00000004 # protected MPQs like W3M maps
AF_CHKS = 0x00000008 # checking sector CRC when reading files
AF_FIXS = 0x00000010 # need fix size, used during archive open
AF_IVLF = 0x00000020 # (listfile) invalidated
AF_IVAT = 0x00000040 # (attributes) invalidated
ATR_CRC32 = 0x00000001 # contains CRC32 for each file
ATR_FTIME = 0x00000002 # file time for each file
ATR_MD5 = 0x00000004 # MD5 for each file
ATR_PATCHBIT = 0x00000008 # patch bit for each file
ATR_ALL = 0x0000000F
CF_HUFF = 0x01 # Huffman compression, WAVE files only
CF_ZLIB = 0x02
CF_PKWR = 0x08 # PKWARE
CF_BZP2 = 0x10 # BZip2, added in Warcraft 3
CF_SPRS = 0x20 # Sparse, added in Starcraft 2
CF_MONO = 0x40 # IMA ADPCM (mono)
CF_STER = 0x80 # IMA ADPCM (stereo)
CF_LZMA = 0x12 # added in Starcraft 2, not a combination
CF_SAME = 0xFFFFFFFF # Same
K_HASH = 0xC3AF3770
K_BLCK = 0xEC83B3A3
PTYPE1 = b'BSD0' # Blizzard modified version of BSDIFF40 incremental patch
PTYPE2 = b'BSDP'
PTYPE3 = b'COPY' # plain replace
PTYPE4 = b'COUP'
PTYPE5 = b'CPOG'
UserDataHeader = namedtuple('UserDataHeader', [
'magic',
'data_size',
'header_offset',
'header_size'])
UserDataHeader.format = '<4s3L'
Header = namedtuple('Header', [
'magic',
'header_size',
'archive_size', # archive size, deprecated in ver. 2, calced as length from beg. of archive to end of hash table/block table/ext. block table (whichever is largest)
'version', # 0 = up to WoW:BC, 1 = WoW:BC-WoW:CT beta, 2/3 = WoW:CT beta and later
'block_size',
'hash_table_pos',
'block_table_pos',
'hash_table_size',
'block_table_size'])
Header.format = '<4s2L2H4L'
Header2 = namedtuple('Header2', [
'hi_block_table_pos',
'hash_table_pos_hi',
'block_table_pos_hi'])
Header2.format = '<Q2H'
Header3 = namedtuple('Header3', [
'archive_size64',
'bet_table_pos',
'het_table_pos'])
Header3.format = '<Q3'
Header4 = namedtuple('Header4', [
'hash_table_size64',
'block_table_size64',
'hi_block_table_size',
'het_table_size',
'bet_table_size',
'raw_chunk_size'])
Header4.format = '<Q5L'
if sys.byteorder == 'little':
Hash = namedtuple('Hash', [
'name1',
'name2',
'locale',
'platform',
'block_index'])
else:
Hash = namedtuple('Hash', [
'name1',
'name2',
'platform',
'locale',
'block_index'])
Hash.format = '<2L2HL'
Block = namedtuple('Block', [
'file_pos',
'comp_size',
'uncomp_size',
'flags'])
Block.format = '<4L'
PatchInfo = namedtuple('PatchInfo', [
'length',
'flags',
'uncomp_size',
'md5'])
PatchInfo.format = '<3L16s'
PatchHeader = namedtuple('PatchHeader', [
'header_magic',
'size',
'size_before_patch',
'size_after_patch',
'md5',
'md5_block_size',
'md5_before_patch',
'md5_after_patch',
'xfrm_magic',
'xfrm_block_size',
'type'])
PatchHeader.format = '<4s3L4sL16s16s4sL4s'
FileEntry = namedtuple('FileEntry', [
'byte_offset',
'file_time',
'bet_hash',
'hash_index',
'het_index',
'file_size',
'comp_size',
'flags',
'locale',
'platform',
'crc32',
'md5'])
FileEntry.format = '<3Q5L2HL16s'
ExtTable = namedtuple('ExtTable', [
'magic',
'version',
'size'])
ExtTable.format = '<4s2L'
Bitmap = namedtuple('Bitmap', [
'magic',
'unknown',
'game_build_num',
'map_offset_lo',
'map_offset_hi',
'block_size'])
Bitmap.format = '<4s5L'
HashEntryTable = namedtuple('HashEntryTable', [
'and_mask',
'or_mask',
'index_size_total',
'index_size_extra',
'index_size',
'file_num',
'hash_table_size',
'hash_bit_size'])
HashEntryTable.format = '<2Q6L'
BlockEntryTable = namedtuple('BlockEntryTable', [
'table_entry_size',
'bit_index_file_pos',
'bit_index_file_size',
'bit_index_comp_size',
'bit_index_flag_index',
'bit_index_unknown',
'bit_count_file_pos',
'bit_count_file_size',
'bit_count_comp_size',
'bit_count_flag_index',
'bit_count_unknown',
'bet_hash_size_total',
'bet_hash_size_extra',
'bet_hash_size',
'file_num',
'flag_num'])
BlockEntryTable.format = '<16L'
| Schala/format-scripts | mpq.py | Python | gpl-3.0 | 5,804 |
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "./etc/"))
sys.path.append(os.path.join(os.path.dirname(__file__), "./lib/"))
from sheepsense.webapp import app as application
if __name__ == "__main__":
application.run()
| basbloemsaat/sheepsense_v2 | wsgi.py | Python | gpl-3.0 | 255 |
# vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2008-2021 Andrew Ziem
# https://www.bleachbit.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test case for module Worker
"""
from tests import TestCleaner, common
from bleachbit import CLI, Command
from bleachbit.Action import ActionProvider
from bleachbit.Worker import *
import os
import tempfile
import unittest
class AccessDeniedActionAction(ActionProvider):
action_key = 'access.denied'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# access denied, should fail and continue
def accessdenied():
import errno
raise OSError(errno.EACCES, 'Permission denied: /foo/bar')
yield Command.Function(None, accessdenied, 'Test access denied')
# real file, should succeed
yield Command.Delete(self.pathname)
class DoesNotExistAction(ActionProvider):
action_key = 'does.not.exist'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# non-existent file, should fail and continue
yield Command.Delete("doesnotexist")
# real file, should succeed
yield Command.Delete(self.pathname)
class FunctionGeneratorAction(ActionProvider):
action_key = 'function.generator'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# function generator without path, should succeed
def funcgenerator():
yield 10
yield Command.Function(None, funcgenerator, 'funcgenerator')
# real file, should succeed
yield Command.Delete(self.pathname)
class FunctionPathAction(ActionProvider):
action_key = 'function.path'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# function with path, should succeed
def pathfunc(path):
pass
# self.pathname must exist because it checks the file size
yield Command.Function(self.pathname, pathfunc, 'pathfunc')
# real file, should succeed
yield Command.Delete(self.pathname)
class InvalidEncodingAction(ActionProvider):
action_key = 'invalid.encoding'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# file with invalid encoding
(fd, filename) = tempfile.mkstemp('invalid-encoding-\xe4\xf6\xfc~')
os.close(fd)
yield Command.Delete(filename)
# real file, should succeed
yield Command.Delete(self.pathname)
class FunctionPlainAction(ActionProvider):
action_key = 'function.plain'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# plain function without path, should succeed
def intfunc():
return int(5)
yield Command.Function(None, intfunc, 'intfunc')
# real file, should succeed
yield Command.Delete(self.pathname)
class LockedAction(ActionProvider):
action_key = 'locked'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# Open the file with a non-exclusive lock, so the file should
# be truncated and marked for deletion. This is checked just on
# on Windows.
fd = os.open(self.pathname, os.O_RDWR)
from bleachbit.FileUtilities import getsize
# Without admin privileges, this delete fails.
yield Command.Delete(self.pathname)
assert(os.path.exists(self.pathname))
fsize = getsize(self.pathname)
if not fsize == 3: # Contents is "123"
raise RuntimeError('Locked file has size %dB (not 3B)' % fsize)
os.close(fd)
# Now that the file is not locked, admin privileges
# are not required to delete it.
yield Command.Delete(self.pathname)
class RuntimeErrorAction(ActionProvider):
action_key = 'runtime'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# runtime exception, should fail and continue
def runtime():
raise RuntimeError('This is a test exception')
yield Command.Function(None, runtime, 'Test runtime exception')
# real file, should succeed
yield Command.Delete(self.pathname)
class TruncateTestAction(ActionProvider):
action_key = 'truncate.test'
def __init__(self, action_element):
self.pathname = action_element.getAttribute('path')
def get_commands(self):
# truncate real file
yield Command.Truncate(self.pathname)
# real file, should succeed
yield Command.Delete(self.pathname)
class WorkerTestCase(common.BleachbitTestCase):
"""Test case for module Worker"""
def action_test_helper(self, command, special_expected, errors_expected,
bytes_expected_posix, count_deleted_posix,
bytes_expected_nt, count_deleted_nt):
ui = CLI.CliCallback()
(fd, filename) = tempfile.mkstemp(
prefix='bleachbit-test-worker', dir=self.tempdir)
os.write(fd, b'123')
os.close(fd)
self.assertExists(filename)
astr = '<action command="%s" path="%s"/>' % (command, filename)
cleaner = TestCleaner.action_to_cleaner(astr)
backends['test'] = cleaner
operations = {'test': ['option1']}
worker = Worker(ui, True, operations)
run = worker.run()
while next(run):
pass
del backends['test']
self.assertNotExists(filename, "Path still exists '%s'" % filename)
self.assertEqual(worker.total_special, special_expected,
'For command %s expecting %s special operations but observed %d'
% (command, special_expected, worker.total_special))
self.assertEqual(worker.total_errors, errors_expected,
'For command %s expecting %d errors but observed %d'
% (command, errors_expected, worker.total_errors))
if 'posix' == os.name:
self.assertEqual(worker.total_bytes, bytes_expected_posix)
self.assertEqual(worker.total_deleted, count_deleted_posix)
elif 'nt' == os.name:
self.assertEqual(worker.total_bytes, bytes_expected_nt)
self.assertEqual(worker.total_deleted, count_deleted_nt)
def test_AccessDenied(self):
"""Test Worker using Action.AccessDeniedAction"""
self.action_test_helper('access.denied', 0, 1, 4096, 1, 3, 1)
def test_DoesNotExist(self):
"""Test Worker using Action.DoesNotExistAction"""
self.action_test_helper('does.not.exist', 0, 1, 4096, 1, 3, 1)
def test_FunctionGenerator(self):
"""Test Worker using Action.FunctionGenerator"""
self.action_test_helper('function.generator', 1,
0, 4096 + 10, 1, 3 + 10, 1)
def test_FunctionPath(self):
"""Test Worker using Action.FunctionPathAction"""
self.action_test_helper('function.path', 1, 0, 4096, 1, 3, 1)
def test_FunctionPlain(self):
"""Test Worker using Action.FunctionPlainAction"""
self.action_test_helper('function.plain', 1, 0, 4096 + 5, 1, 3 + 5, 1)
def test_InvalidEncoding(self):
"""Test Worker using Action.InvalidEncodingAction"""
self.action_test_helper('invalid.encoding', 0, 0, 4096, 2, 3, 2)
@common.skipUnlessWindows
def test_Locked(self):
"""Test Worker using Action.LockedAction"""
from win32com.shell import shell
if shell.IsUserAnAdmin():
# If an admin, the first attempt will mark for delete (3 bytes),
# and the second attempt will actually delete it (3 bytes).
errors_expected = 0
bytes_expected = 3 + 3
total_deleted = 2
else:
# If not an admin, the first attempt will fail, and the second wil succeed.
errors_expected = 1
bytes_expected = 3
total_deleted = 1
self.action_test_helper(
'locked', 0, errors_expected, None, None, bytes_expected, total_deleted)
def test_RuntimeError(self):
"""Test Worker using Action.RuntimeErrorAction
The Worker module handles these differently than
access denied exceptions
"""
self.action_test_helper('runtime', 0, 1, 4096, 1, 3, 1)
def test_Truncate(self):
"""Test Worker using Action.TruncateTestAction
"""
self.action_test_helper('truncate.test', 0, 0, 4096, 2, 3, 2)
def test_deep_scan(self):
"""Test for deep scan"""
# load cleaners from XML
import bleachbit.CleanerML
list(bleachbit.CleanerML.load_cleaners())
# DeepScan itself is tested elsewhere, so replace it here
import bleachbit.DeepScan
SaveDeepScan = bleachbit.DeepScan.DeepScan
self.scanned = 0
parent = self
class MyDeepScan:
def __init__(self, searches):
for (path, searches) in searches.items():
parent.assertEqual(path, os.path.expanduser('~'))
for s in searches:
parent.assertIn(
s.regex, ['^Thumbs\\.db$', '^Thumbs\\.db:encryptable$'])
def scan(self):
parent.scanned += 1
yield True
bleachbit.DeepScan.DeepScan = MyDeepScan
# test
operations = {'deepscan': ['thumbs_db']}
ui = CLI.CliCallback()
worker = Worker(ui, False, operations).run()
while next(worker):
pass
self.assertEqual(1, self.scanned)
# clean up
bleachbit.DeepScan.DeepScan = SaveDeepScan
def test_multiple_options(self):
"""Test one cleaner with two options"""
ui = CLI.CliCallback()
filename1 = self.mkstemp(prefix='bleachbit-test-worker')
filename2 = self.mkstemp(prefix='bleachbit-test-worker')
astr1 = '<action command="delete" search="file" path="%s"/>' % filename1
astr2 = '<action command="delete" search="file" path="%s"/>' % filename2
cleaner = TestCleaner.actions_to_cleaner([astr1, astr2])
backends['test'] = cleaner
operations = {'test': ['option1', 'option2']}
worker = Worker(ui, True, operations)
run = worker.run()
while next(run):
pass
del backends['test']
self.assertNotExists(filename1)
self.assertNotExists(filename2)
self.assertEqual(worker.total_special, 0)
self.assertEqual(worker.total_errors, 0)
self.assertEqual(worker.total_deleted, 2)
| bleachbit/bleachbit | tests/TestWorker.py | Python | gpl-3.0 | 11,675 |
from setuptools import setup, find_packages
version = {}
with open('swk_casp/version.py') as f:
exec(f.read(), version)
url = 'https://github.com/trueneu/swiss-knife'
setup(name='swk_casp',
version=version['__version__'],
packages=find_packages(),
install_requires=[
'swk>=0.0.4a4',
'requests>=2.9.1'
],
description='Plugin for swk, enabling casp api',
long_description='This is not a standalone program nor a library.'
' You should use it in conjunction with base program, swk.'
' For more information, please refer to documentation that can be found at {0}'.format(url),
author="Pavel Gurkov",
author_email="[email protected]",
url=url,
license='GPLv3',
platforms='Posix; MacOS X',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Intended Audience :: End Users/Desktop',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Topic :: System :: Systems Administration',
'Topic :: System :: Shells',
'Topic :: Utilities'
],
keywords='cli swiss-knife sysadmin casp',
entry_points={
'swk_plugin': [
'swk_casp = swk_casp.swk_casp:main'
],
},
)
| trueneu/swiss-knife | swk_plugins/swk_casp/setup.py | Python | gpl-3.0 | 1,746 |
"""Experiment to test the correction of calculation of income from
agriculture and ecosystem services.
This experiment tests the influence of prefactors in income
calculation (r_bca and r_es) for two scenarios:
1) for income calculated as **mean** over income from cells
2) for income calculated as **sum** over income from cells
Explanation
-----------
Previously, the income from aggriculture and ecosystem services for each city
was calculated as the mean of the income from cells which it had under its
controll.
This does not make sense, since the actual harvest is not the mean of the
harvest of different places, but obviously the sum of the harvest from
different places.
Therefore, I changed the calculation of these sources of income to calculate
the sum over different cells.
Then, to get reasonable results, one has to adjust the prefactors in the
calculation of total income, since they have been tailored to reproduce
stylized facts before (and therefore must be taylored to do so again, just
differently)
"""
from __future__ import print_function
try:
import cPickle as cp
except ImportError:
import pickle as cp
import getpass
import itertools as it
import numpy as np
import sys
import pandas as pd
from pymofa.experiment_handling import experiment_handling as eh
from mayasim.model.ModelCore import ModelCore as Model
from mayasim.model.ModelParameters import ModelParameters as Parameters
test = True
def run_function(r_bca=0.2, r_eco=0.0002, population_control=False,
N=30, crop_income_mode='sum',
kill_cropless=True, steps=350, filename='./'):
"""
Set up the Model for different Parameters and determine
which parts of the output are saved where.
Output is saved in pickled dictionaries including the
initial values and Parameters, as well as the time
development of aggregated variables for each run.
Parameters:
-----------
r_bca : float > 0
the pre factor for income from agriculture
population_control : boolean
determines whether the population grows
unbounded or if population growth decreases
with income per capita and population density.
N : int > 0
initial number of settlements on the map
crop_income_mode : string
defines the mode of crop income calculation.
possible values are 'sum' and 'mean'
kill_cropless: bool
Switch to determine whether or not to kill cities
without cropped cells.
filename: string
path to save the results to.
"""
# initialize the Model
m = Model(N, output_data_location=filename, debug=test)
if not filename.endswith('s0.pkl'):
m.output_geographic_data = False
m.output_settlement_data = False
m.population_control = population_control
m.crop_income_mode = crop_income_mode
m.r_bca_sum = r_bca
m.r_es_sum = r_eco
m.kill_cities_without_crops = kill_cropless
# store initial conditions and Parameters
res = {}
res["initials"] = pd.DataFrame({"Settlement X Possitions":
m.settlement_positions[0],
"Settlement Y Possitions":
m.settlement_positions[1],
"Population": m.population})
res["Parameters"] = pd.Series({key:
getattr(m, key)
for key in dir(Parameters)
if not key.startswith('__')
and not callable(key)})
# run Model
if test:
m.run(1)
else:
m.run(steps)
# Retrieve results
res["trajectory"] = m.get_trajectory()
res["traders trajectory"] = m.get_traders_trajectory()
try:
with open(filename, 'wb') as dumpfile:
cp.dump(res, dumpfile)
return 1
except IOError:
return -1
def run_experiment(argv):
"""
Take arv input variables and run sub_experiment accordingly.
This happens in five steps:
1) parse input arguments to set switches
for [test],
2) set output folders according to switches,
3) generate parameter combinations,
4) define names and dictionaries of callables to apply to sub_experiment
data for post processing,
5) run computation and/or post processing and/or plotting
depending on execution on cluster or locally or depending on
experimentation mode.
Parameters
----------
argv: list[N]
List of parameters from terminal input
Returns
-------
rt: int
some return value to show whether sub_experiment succeeded
return 1 if sucessfull.
"""
global test
# Parse switches from input
if len(argv) > 1:
test = int(argv[1])
# Generate paths according to switches and user name
test_folder = ['', 'test_output/'][int(test)]
experiment_folder = 'X2_eco_income/'
raw = 'raw_data/'
res = 'results/'
if getpass.getuser() == "kolb":
save_path_raw = "/p/tmp/kolb/Mayasim/output_data/{}{}{}".format(
test_folder, experiment_folder, raw)
save_path_res = "/home/kolb/Mayasim/output_data/{}{}{}".format(
test_folder, experiment_folder, res)
elif getpass.getuser() == "jakob":
save_path_raw = "/home/jakob/Project_MayaSim/Python/" \
"output_data/{}{}{}".format(test_folder,
experiment_folder, raw)
save_path_res = "/home/jakob/Project_MayaSim/Python/" \
"output_data/{}{}{}".format(test_folder,
experiment_folder, res)
else:
save_path_res = './{}'.format(res)
save_path_raw = './{}'.format(raw)
print(save_path_raw)
# Generate parameter combinations
index = {0: "r_bca",
1: "r_eco",
2: "kill_cropless"}
if test == 0:
r_bcas = [0.1, 0.15, 0.2, 0.25, 0.3]
r_ecos = [0.0001, 0.00015, 0.0002, 0.00025]
kill_cropless = [True, False]
test=False
if test == 1:
r_bcas = [0.1, 0.3]
r_ecos = [0.0001, 0.00025]
kill_cropless = [True, False]
test=True
param_combs = list(it.product(r_bcas, r_ecos, kill_cropless))
sample_size = 10 if not test else 2
# Define names and callables for post processing
name1 = "trajectory"
estimators1 = {"mean_trajectories":
lambda fnames:
pd.concat([np.load(f)["trajectory"]
for f in fnames]).groupby(level=0).mean(),
"sigma_trajectories":
lambda fnames:
pd.concat([np.load(f)["trajectory"]
for f in fnames]).groupby(level=0).std()
}
name2 = "traders_trajectory"
estimators2 = {
"mean_trajectories":
lambda fnames:
pd.concat([np.load(f)["traders trajectory"]
for f in fnames]).groupby(
level=0).mean(),
"sigma_trajectories":
lambda fnames:
pd.concat([np.load(f)["traders trajectory"]
for f in fnames]).groupby(
level=0).std()
}
# Run computation and post processing.
if test:
print('testing {}'.format(experiment_folder))
handle = eh(sample_size=sample_size,
parameter_combinations=param_combs,
index=index,
path_raw=save_path_raw,
path_res=save_path_res,
use_kwargs=True)
handle.compute(run_func=run_function)
handle.resave(eva=estimators1, name=name1)
handle.resave(eva=estimators2, name=name2)
if test:
data = pd.read_pickle(save_path_res + name1)
print(data.head())
data = pd.read_pickle(save_path_res + name2)
print(data.head())
return 1
if __name__ == '__main__':
run_experiment(sys.argv)
| jakobkolb/MayaSim | Experiments/mayasim_X2_scan_r_es_and_r_agg.py | Python | gpl-3.0 | 8,304 |
# -*- coding: utf-8 -*-
#
# forms_function.py - Function Flask Forms
#
from flask_babel import lazy_gettext
from flask_wtf import FlaskForm
from wtforms import BooleanField
from wtforms import DecimalField
from wtforms import SelectField
from wtforms import StringField
from wtforms import SubmitField
from wtforms import widgets
from wtforms.widgets import NumberInput
from mycodo.config_translations import TRANSLATIONS
from mycodo.mycodo_flask.utils.utils_general import generate_form_action_list
from mycodo.utils.function_actions import parse_function_action_information
class FunctionAdd(FlaskForm):
function_type = SelectField()
function_add = SubmitField(TRANSLATIONS['add']['title'])
class FunctionMod(FlaskForm):
choices_actions = []
dict_actions = parse_function_action_information()
list_actions_sorted = generate_form_action_list(dict_actions)
for each_action in list_actions_sorted:
choices_actions.append((each_action, dict_actions[each_action]['name']))
action_type = SelectField(
choices=[('', TRANSLATIONS['select_one']['title'])] + choices_actions)
function_id = StringField('Function ID', widget=widgets.HiddenInput())
function_type = StringField('Function Type', widget=widgets.HiddenInput())
name = StringField(TRANSLATIONS['name']['title'])
log_level_debug = BooleanField(
TRANSLATIONS['log_level_debug']['title'])
add_action = SubmitField(lazy_gettext('Add Action'))
execute_all_actions = SubmitField(lazy_gettext('Execute All Actions'))
function_activate = SubmitField(TRANSLATIONS['activate']['title'])
function_deactivate = SubmitField(TRANSLATIONS['deactivate']['title'])
function_mod = SubmitField(TRANSLATIONS['save']['title'])
function_delete = SubmitField(TRANSLATIONS['delete']['title'])
class Actions(FlaskForm):
function_type = StringField(
'Function Type', widget=widgets.HiddenInput())
function_id = StringField(
'Function ID', widget=widgets.HiddenInput())
function_action_id = StringField(
'Function Action ID', widget=widgets.HiddenInput())
pause_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
do_action_string = StringField(lazy_gettext('Action String'))
do_unique_id = StringField(lazy_gettext('Controller ID'))
do_output_state = StringField(lazy_gettext('State'))
do_output_amount = DecimalField(
lazy_gettext('Amount'),
widget=NumberInput(step='any'))
do_output_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
do_output_pwm = DecimalField(
lazy_gettext('Duty Cycle'),
widget=NumberInput(step='any'))
do_output_pwm2 = DecimalField(
lazy_gettext('Duty Cycle'),
widget=NumberInput(step='any'))
do_camera_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
save_action = SubmitField(TRANSLATIONS['save']['title'])
delete_action = SubmitField(TRANSLATIONS['delete']['title'])
| kizniche/Mycodo | mycodo/mycodo_flask/forms/forms_function.py | Python | gpl-3.0 | 3,125 |
#!/usr/bin/env python3
# encoding: utf-8
# Copyright (C) 2020 Space Science and Engineering Center (SSEC),
# University of Wisconsin-Madison.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This file is part of the polar2grid software package. Polar2grid takes
# satellite observation data, remaps it, and writes it to a file format for
# input into another program.
# Documentation: http://www.ssec.wisc.edu/software/polar2grid/
"""The VIIRS SDR Reader operates on Science Data Record (SDR) HDF5 files from
the Suomi National Polar-orbiting Partnership's (NPP) and/or the NOAA20
Visible/Infrared Imager Radiometer Suite (VIIRS) instrument. The VIIRS
SDR reader ignores filenames and uses internal file content to determine
the type of file being provided, but SDR are typically named as below
and have corresponding geolocation files::
SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5
The VIIRS SDR reader supports all instrument spectral bands, identified as
the products shown below. It supports terrain corrected or non-terrain corrected
navigation files. Geolocation files must be included when specifying filepaths to
readers and ``polar2grid.sh``. The VIIRS reader can be specified to the ``polar2grid.sh`` script
with the reader name ``viirs_sdr``.
This reader's default remapping algorithm is ``ewa`` for Elliptical Weighted
Averaging resampling. The ``--fornav-D`` parameter set to 40 and the
``--fornav-d`` parameter set to 2.
+---------------------------+-----------------------------------------------------+
| Product Name | Description |
+===========================+=====================================================+
| i01 | I01 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i02 | I02 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i03 | I03 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i04 | I04 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| i05 | I05 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| i01_rad | I01 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i02_rad | I02 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i03_rad | I03 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i04_rad | I04 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i05_rad | I05 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m01 | M01 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m02 | M02 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m03 | M03 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m04 | M04 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m05 | M05 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m06 | M06 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m07 | M07 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m08 | M08 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m09 | M09 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m10 | M10 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m11 | M11 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m12 | M12 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m13 | M13 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m14 | M14 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m15 | M15 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m16 | M16 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m01_rad | M01 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m02_rad | M02 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m03_rad | M03 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m04_rad | M04 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m05_rad | M05 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m06_rad | M06 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m07_rad | M07 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m08_rad | M08 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m09_rad | M09 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m10_rad | M10 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m11_rad | M11 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m12_rad | M12 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m13_rad | M13 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m14_rad | M14 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m15_rad | M15 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m16_rad | M16 Radiance Band |
+---------------------------+-----------------------------------------------------+
| dnb | Raw DNB Band (not useful for images) |
+---------------------------+-----------------------------------------------------+
| histogram_dnb | Histogram Equalized DNB Band |
+---------------------------+-----------------------------------------------------+
| adaptive_dnb | Adaptive Histogram Equalized DNB Band |
+---------------------------+-----------------------------------------------------+
| dynamic_dnb | Dynamic DNB Band from Steve Miller and |
| | Curtis Seaman. Uses erf to scale the data. |
+---------------------------+-----------------------------------------------------+
| hncc_dnb | Simplified High and Near-Constant Contrast |
| | Approach from Stephan Zinke |
+---------------------------+-----------------------------------------------------+
| ifog | Temperature difference between I05 and I04 |
+---------------------------+-----------------------------------------------------+
| i_solar_zenith_angle | I Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| i_solar_azimuth_angle | I Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| i_sat_zenith_angle | I Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| i_sat_azimuth_angle | I Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| m_solar_zenith_angle | M Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| m_solar_azimuth_angle | M Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| m_sat_zenith_angle | M Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| m_sat_azimuth_angle | M Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_solar_zenith_angle | DNB Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_solar_azimuth_angle | DNB Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_sat_zenith_angle | DNB Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_sat_azimuth_angle | DNB Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_lunar_zenith_angle | DNB Band Lunar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_lunar_azimuth_angle | DNB Band Lunar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| true_color | Ratio sharpened rayleigh corrected true color |
+---------------------------+-----------------------------------------------------+
| false_color | Ratio sharpened rayleigh corrected false color |
+---------------------------+-----------------------------------------------------+
"""
from __future__ import annotations
from argparse import ArgumentParser, _ArgumentGroup
from typing import Optional
from satpy import DataQuery, Scene
from polar2grid.core.script_utils import ExtendConstAction
from ._base import ReaderProxyBase
I_PRODUCTS = [
"I01",
"I02",
"I03",
"I04",
"I05",
]
I_ANGLE_PRODUCTS = [
"i_solar_zenith_angle",
"i_solar_azimuth_angle",
"i_sat_zenith_angle",
"i_sat_azimuth_angle",
]
M_PRODUCTS = [
"M01",
"M02",
"M03",
"M04",
"M05",
"M06",
"M07",
"M08",
"M09",
"M10",
"M11",
"M12",
"M13",
"M14",
"M15",
"M16",
]
M_ANGLE_PRODUCTS = [
"m_solar_zenith_angle",
"m_solar_azimuth_angle",
"m_sat_zenith_angle",
"m_sat_azimuth_angle",
]
DNB_PRODUCTS = [
"histogram_dnb",
"adaptive_dnb",
"dynamic_dnb",
"hncc_dnb",
]
DNB_ANGLE_PRODUCTS = [
"dnb_solar_zenith_angle",
"dnb_solar_azimuth_angle",
"dnb_sat_zenith_angle",
"dnb_sat_azimuth_angle",
"dnb_lunar_zenith_angle",
"dnb_lunar_azimuth_angle",
]
TRUE_COLOR_PRODUCTS = ["true_color"]
FALSE_COLOR_PRODUCTS = ["false_color"]
OTHER_COMPS = [
"ifog",
]
PRODUCT_ALIASES = {}
def _process_legacy_and_rad_products(satpy_names, band_aliases, rad_aliases):
"""Map all lowercase band names to uppercase names and add radiance product."""
for band in satpy_names:
# P2G name is lowercase, Satpy is uppercase
PRODUCT_ALIASES[band.lower()] = band
band_aliases.append(band.lower())
# radiance products for M and I bands
rad_name = band.lower() + "_rad"
dq = DataQuery(name=band, calibration="radiance")
PRODUCT_ALIASES[rad_name] = dq
rad_aliases.append(rad_name)
I_ALIASES = []
I_RAD_PRODUCTS = []
_process_legacy_and_rad_products(I_PRODUCTS, I_ALIASES, I_RAD_PRODUCTS)
M_ALIASES = []
M_RAD_PRODUCTS = []
_process_legacy_and_rad_products(M_PRODUCTS, M_ALIASES, M_RAD_PRODUCTS)
_AWIPS_TRUE_COLOR = ["viirs_crefl08", "viirs_crefl04", "viirs_crefl03"]
_AWIPS_FALSE_COLOR = ["viirs_crefl07", "viirs_crefl09", "viirs_crefl08"]
PRODUCT_ALIASES["dnb_solar_zenith_angle"] = DataQuery(name="dnb_solar_zenith_angle")
PRODUCT_ALIASES["dnb_solar_azimuth_angle"] = DataQuery(name="dnb_solar_azimuth_angle")
PRODUCT_ALIASES["dnb_sat_zenith_angle"] = DataQuery(name="dnb_satellite_zenith_angle")
PRODUCT_ALIASES["dnb_sat_azimuth_angle"] = DataQuery(name="dnb_satellite_azimuth_angle")
PRODUCT_ALIASES["dnb_lunar_zenith_angle"] = DataQuery(name="dnb_lunar_zenith_angle")
PRODUCT_ALIASES["dnb_lunar_azimuth_angle"] = DataQuery(name="dnb_lunar_azimuth_angle")
PRODUCT_ALIASES["m_solar_zenith_angle"] = DataQuery(name="solar_zenith_angle", resolution=742)
PRODUCT_ALIASES["m_solar_azimuth_angle"] = DataQuery(name="solar_azimuth_angle", resolution=742)
PRODUCT_ALIASES["m_sat_zenith_angle"] = DataQuery(name="satellite_zenith_angle", resolution=742)
PRODUCT_ALIASES["m_sat_azimuth_angle"] = DataQuery(name="satellite_azimuth_angle", resolution=742)
PRODUCT_ALIASES["i_solar_zenith_angle"] = DataQuery(name="solar_zenith_angle", resolution=371)
PRODUCT_ALIASES["i_solar_azimuth_angle"] = DataQuery(name="solar_azimuth_angle", resolution=371)
PRODUCT_ALIASES["i_sat_zenith_angle"] = DataQuery(name="satellite_zenith_angle", resolution=371)
PRODUCT_ALIASES["i_sat_azimuth_angle"] = DataQuery(name="satellite_azimuth_angle", resolution=371)
DEFAULT_PRODUCTS = I_ALIASES + M_ALIASES + DNB_PRODUCTS[1:] + TRUE_COLOR_PRODUCTS + FALSE_COLOR_PRODUCTS + OTHER_COMPS
P2G_PRODUCTS = I_ALIASES + M_ALIASES + DNB_PRODUCTS + I_RAD_PRODUCTS + M_RAD_PRODUCTS
P2G_PRODUCTS += I_ANGLE_PRODUCTS + M_ANGLE_PRODUCTS + DNB_ANGLE_PRODUCTS + OTHER_COMPS
P2G_PRODUCTS += TRUE_COLOR_PRODUCTS + FALSE_COLOR_PRODUCTS
FILTERS = {
"day_only": {
"standard_name": [
"toa_bidirectional_reflectance",
"true_color",
"false_color",
"natural_color",
"corrected_reflectance",
],
},
"night_only": {
"standard_name": ["temperature_difference"],
},
}
class ReaderProxy(ReaderProxyBase):
"""Provide Polar2Grid-specific information about this reader's products."""
is_polar2grid_reader = True
def __init__(self, scn: Scene, user_products: list[str]):
self.scn = scn
self._modified_aliases = PRODUCT_ALIASES.copy()
if "dynamic_dnb_saturation" in user_products:
# they specified --dnb-saturation-correction
# let's modify the aliases so dynamic_dnb points to this product
user_products.remove("dynamic_dnb_saturation")
user_products.append("dynamic_dnb")
self._modified_aliases["dynamic_dnb"] = DataQuery(name="dynamic_dnb_saturation")
self._orig_user_products = user_products
def get_default_products(self) -> list[str]:
"""Get products to load if users hasn't specified any others."""
return DEFAULT_PRODUCTS
def get_all_products(self):
"""Get all polar2grid products that could be loaded."""
return P2G_PRODUCTS
@property
def _aliases(self):
return self._modified_aliases
def add_reader_argument_groups(
parser: ArgumentParser, group: Optional[_ArgumentGroup] = None
) -> tuple[Optional[_ArgumentGroup], Optional[_ArgumentGroup]]:
"""Add reader-specific command line arguments to an existing argument parser.
If ``group`` is provided then arguments are added to this group. If not,
a new group is added to the parser and arguments added to this new group.
"""
if group is None:
group = parser.add_argument_group(title="VIIRS SDR Reader")
group.add_argument(
"--i-bands",
dest="products",
action=ExtendConstAction,
const=I_ALIASES,
help="Add all I-band raw products to list of products",
)
group.add_argument(
"--m-bands",
dest="products",
action=ExtendConstAction,
const=M_ALIASES,
help="Add all M-band raw products to list of products",
)
group.add_argument(
"--dnb-angle-products",
dest="products",
action=ExtendConstAction,
const=DNB_ANGLE_PRODUCTS,
help="Add DNB-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--dnb-saturation-correction",
dest="products",
action=ExtendConstAction,
const=["dynamic_dnb_saturation"],
help="Enable dynamic DNB saturation correction (normally used for aurora scenes)",
)
group.add_argument(
"--i-angle-products",
dest="products",
action=ExtendConstAction,
const=I_ANGLE_PRODUCTS,
help="Add I-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--m-angle-products",
dest="products",
action=ExtendConstAction,
const=M_ANGLE_PRODUCTS,
help="Add M-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--m-rad-products",
dest="products",
action=ExtendConstAction,
const=M_RAD_PRODUCTS,
help="Add M-band geolocation radiance products to list of products",
)
group.add_argument(
"--i-rad-products",
dest="products",
action=ExtendConstAction,
const=I_RAD_PRODUCTS,
help="Add I-band geolocation radiance products to list of products",
)
group.add_argument(
"--awips-true-color",
dest="products",
action=ExtendConstAction,
const=_AWIPS_TRUE_COLOR,
help="Add individual CREFL corrected products to create " "the 'true_color' composite in AWIPS.",
)
group.add_argument(
"--awips-false-color",
dest="products",
action=ExtendConstAction,
const=_AWIPS_FALSE_COLOR,
help="Add individual CREFL corrected products to create " "the 'false_color' composite in AWIPS.",
)
return group, None
| davidh-ssec/polar2grid | polar2grid/readers/viirs_sdr.py | Python | gpl-3.0 | 21,308 |
# -*- coding: utf-8 -*-
from django.db.models import Q
from django.conf import settings
from django.contrib.auth.models import Permission, Group
from django.utils.html import escape
from ajax_select import register, LookupChannel
from .models import (
Attribute,
ServerAttribute,
Package,
Property,
DeviceLogical,
Computer,
UserProfile,
)
@register('user_profile')
class UserProfileLookup(LookupChannel):
model = UserProfile
def can_add(self, user, model):
return False
def get_query(self, q, request):
return self.model.objects.filter(
Q(username__icontains=q) | Q(first_name__icontains=q) | Q(last_name__icontains=q)
).order_by('username')
def format_item_display(self, obj):
if obj.first_name or obj.last_name:
return '{} ({})'.format(
obj.link(),
' '.join(filter(None, [obj.first_name, obj.last_name]))
)
return obj.link()
def format_match(self, obj):
return escape("%s (%s)" % (obj.__str__(), ' '.join(filter(None, [obj.first_name, obj.last_name]))))
@register('domain_admin')
class DomainAdminLookup(UserProfileLookup):
def get_query(self, q, request):
domain_admin = Group.objects.get(name="Domain Admin")
return self.model.objects.filter(
Q(username__icontains=q) | Q(first_name__icontains=q) | Q(last_name__icontains=q),
groups__in=[domain_admin]
).order_by('username')
@register('permission')
class PermissionLookup(LookupChannel):
model = Permission
def get_query(self, q, request):
return self.model.objects.filter(
Q(name__icontains=q) | Q(codename__icontains=q)
).order_by('name')
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.__str__()
def get_objects(self, ids):
return self.model.objects.filter(pk__in=ids).order_by('name')
@register('attribute')
class AttributeLookup(LookupChannel):
model = Attribute
def get_query(self, q, request):
properties = Property.objects.values_list('prefix', flat=True)
if q[0:Property.PREFIX_LEN].upper() \
in (item.upper() for item in properties) \
and len(q) > (Property.PREFIX_LEN + 1):
queryset = self.model.objects.scope(request.user.userprofile).filter(
property_att__prefix__icontains=q[0:Property.PREFIX_LEN],
value__icontains=q[Property.PREFIX_LEN + 1:],
property_att__enabled=True
)
else:
queryset = self.model.objects.scope(request.user.userprofile).filter(
Q(value__icontains=q) |
Q(description__icontains=q) |
Q(property_att__prefix__icontains=q)
).filter(property_att__enabled=True)
# exclude available and unsubscribed computers (inactive)
inactive_computers = [
str(x) for x in Computer.inactive.values_list('id', flat=True)
]
queryset = queryset.exclude(
property_att__prefix='CID',
value__in=inactive_computers
).order_by('value')
return queryset
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] != "id":
return self.model.objects.filter(
pk__in=ids
).filter(
~Q(property_att__prefix='CID')
).order_by(
'property_att',
'value'
) | self.model.objects.filter(
pk__in=ids,
property_att__prefix='CID'
).order_by(
'description'
)
else:
return self.model.objects.filter(
pk__in=ids
).order_by(
'property_att',
'value'
)
@register('package')
class PackageLookup(LookupChannel):
model = Package
def get_query(self, q, request):
project_id = request.GET.get('project_id', None)
queryset = self.model.objects.scope(request.user.userprofile).filter(name__icontains=q).order_by('name')
if project_id:
queryset = queryset.filter(project__id=project_id)
return queryset
def format_match(self, obj):
return escape(obj.name)
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
return self.model.objects.filter(pk__in=ids).order_by('name')
@register('tag')
class TagLookup(LookupChannel):
model = ServerAttribute
def get_query(self, q, request):
return self.model.objects.scope(request.user.userprofile).filter(
property_att__enabled=True,
property_att__sort='server'
).filter(
Q(value__icontains=q) |
Q(description__icontains=q) |
Q(property_att__prefix__icontains=q)
).order_by('value')
def format_match(self, obj):
return '{}-{} {}'.format(
escape(obj.property_att.prefix),
escape(obj.value),
escape(obj.description)
)
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
return self.model.objects.filter(
pk__in=ids
).order_by(
'property_att',
'value'
)
@register('devicelogical')
class DeviceLogicalLookup(LookupChannel):
model = DeviceLogical
def get_query(self, q, request):
return self.model.objects.filter(device__name__icontains=q)
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
@register('computer')
class ComputerLookup(LookupChannel):
model = Computer
def get_query(self, q, request):
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] == "id":
return self.model.objects.scope(request.user.userprofile).filter(id__exact=q)
else:
return self.model.objects.scope(request.user.userprofile).filter(
Q(id__exact=q) if isinstance(q, int) else Q() |
Q(**{'{}__icontains'.format(settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0]): q})
).filter(
~Q(status__in=['available', 'unsubscribed'])
)
def format_match(self, obj):
return obj.__str__()
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def reorder(self, ids):
return [row.id for row in Computer.objects.filter(
pk__in=ids
).order_by(settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0])]
def get_objects(self, ids):
things = self.model.objects.in_bulk(ids)
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] == "id":
return [things[aid] for aid in ids if aid in things]
return [things[aid] for aid in self.reorder(ids) if aid in things]
| migasfree/migasfree | migasfree/server/lookups.py | Python | gpl-3.0 | 7,455 |
# Maked by Mr. Have fun! Version 0.2
# Shadow Weapon Coupons contributed by BiTi for the Official L2J Datapack Project
# Visit http://forum.l2jdp.com for more details
import sys
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "408_PathToElvenwizard"
ROGELLIAS_LETTER = 1218
RED_DOWN = 1219
MAGICAL_POWERS_RUBY = 1220
PURE_AQUAMARINE = 1221
APPETIZING_APPLE = 1222
GOLD_LEAVES = 1223
IMMORTAL_LOVE = 1224
AMETHYST = 1225
NOBILITY_AMETHYST = 1226
FERTILITY_PERIDOT = 1229
ETERNITY_DIAMOND = 1230
CHARM_OF_GRAIN = 1272
SAP_OF_WORLD_TREE = 1273
LUCKY_POTPOURI = 1274
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
player = st.getPlayer()
if event == "1" :
st.set("id","0")
if player.getClassId().getId() != 0x19 :
if player.getClassId().getId() == 0x1a :
htmltext = "30414-02a.htm"
else:
htmltext = "30414-03.htm"
else:
if player.getLevel()<19 :
htmltext = "30414-04.htm"
else:
if st.getQuestItemsCount(ETERNITY_DIAMOND) != 0 :
htmltext = "30414-05.htm"
else:
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
if st.getQuestItemsCount(FERTILITY_PERIDOT) == 0 :
st.giveItems(FERTILITY_PERIDOT,1)
htmltext = "30414-06.htm"
elif event == "408_1" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY) != 0 :
htmltext = "30414-10.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(ROGELLIAS_LETTER) == 0 :
st.giveItems(ROGELLIAS_LETTER,1)
htmltext = "30414-07.htm"
st.set("cond","2")
elif event == "408_4" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(ROGELLIAS_LETTER) != 0 :
st.takeItems(ROGELLIAS_LETTER,st.getQuestItemsCount(ROGELLIAS_LETTER))
if st.getQuestItemsCount(CHARM_OF_GRAIN) == 0 :
st.giveItems(CHARM_OF_GRAIN,1)
htmltext = "30157-02.htm"
elif event == "408_2" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(PURE_AQUAMARINE) != 0 :
htmltext = "30414-13.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(PURE_AQUAMARINE) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(APPETIZING_APPLE) == 0 :
st.giveItems(APPETIZING_APPLE,1)
htmltext = "30414-14.htm"
elif event == "408_5" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(APPETIZING_APPLE) != 0 :
st.takeItems(APPETIZING_APPLE,st.getQuestItemsCount(APPETIZING_APPLE))
if st.getQuestItemsCount(SAP_OF_WORLD_TREE) == 0 :
st.giveItems(SAP_OF_WORLD_TREE,1)
htmltext = "30371-02.htm"
elif event == "408_3" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(NOBILITY_AMETHYST) != 0 :
htmltext = "30414-17.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(NOBILITY_AMETHYST) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(IMMORTAL_LOVE) == 0 :
st.giveItems(IMMORTAL_LOVE,1)
htmltext = "30414-18.htm"
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
if npcId != 30414 and id != STARTED : return htmltext
if id == CREATED :
st.setState(STARTING)
st.set("cond","0")
st.set("onlyone","0")
st.set("id","0")
if npcId == 30414 and st.getInt("cond")==0 :
if st.getInt("cond")<15 :
htmltext = "30414-01.htm"
else:
htmltext = "30414-01.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)==0 and st.getQuestItemsCount(APPETIZING_APPLE)==0 and st.getQuestItemsCount(IMMORTAL_LOVE)==0 and st.getQuestItemsCount(CHARM_OF_GRAIN)==0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)==0 and st.getQuestItemsCount(LUCKY_POTPOURI)==0 and st.getQuestItemsCount(FERTILITY_PERIDOT)!=0 and (st.getQuestItemsCount(MAGICAL_POWERS_RUBY)==0 or st.getQuestItemsCount(NOBILITY_AMETHYST)==0 or st.getQuestItemsCount(PURE_AQUAMARINE)==0) :
htmltext = "30414-11.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)!=0 :
htmltext = "30414-08.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)!=0 :
htmltext = "30157-01.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)<5 :
htmltext = "30157-03.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)>=5 :
st.takeItems(RED_DOWN,st.getQuestItemsCount(RED_DOWN))
st.takeItems(CHARM_OF_GRAIN,st.getQuestItemsCount(CHARM_OF_GRAIN))
if st.getQuestItemsCount(MAGICAL_POWERS_RUBY) == 0 :
st.giveItems(MAGICAL_POWERS_RUBY,1)
htmltext = "30157-04.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)<5 :
htmltext = "30414-09.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)>=5 :
htmltext = "30414-25.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(APPETIZING_APPLE)!=0 :
htmltext = "30414-15.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(APPETIZING_APPLE)!=0 :
htmltext = "30371-01.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)<5 :
htmltext = "30371-03.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)>=5 :
st.takeItems(GOLD_LEAVES,st.getQuestItemsCount(GOLD_LEAVES))
st.takeItems(SAP_OF_WORLD_TREE,st.getQuestItemsCount(SAP_OF_WORLD_TREE))
if st.getQuestItemsCount(PURE_AQUAMARINE) == 0 :
st.giveItems(PURE_AQUAMARINE,1)
htmltext = "30371-04.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)<5 :
htmltext = "30414-16.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(GOLD_LEAVES)>=5 :
htmltext = "30414-26.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(IMMORTAL_LOVE)!=0 :
htmltext = "30414-19.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(IMMORTAL_LOVE)!=0 :
st.takeItems(IMMORTAL_LOVE,st.getQuestItemsCount(IMMORTAL_LOVE))
if st.getQuestItemsCount(LUCKY_POTPOURI) == 0 :
st.giveItems(LUCKY_POTPOURI,1)
htmltext = "30423-01.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)<2 :
htmltext = "30423-02.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)>=2 :
st.takeItems(AMETHYST,st.getQuestItemsCount(AMETHYST))
st.takeItems(LUCKY_POTPOURI,st.getQuestItemsCount(LUCKY_POTPOURI))
if st.getQuestItemsCount(NOBILITY_AMETHYST) == 0 :
st.giveItems(NOBILITY_AMETHYST,1)
htmltext = "30423-03.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)<2 :
htmltext = "30414-20.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)>=2 :
htmltext = "30414-27.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)==0 and st.getQuestItemsCount(APPETIZING_APPLE)==0 and st.getQuestItemsCount(IMMORTAL_LOVE)==0 and st.getQuestItemsCount(CHARM_OF_GRAIN)==0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)==0 and st.getQuestItemsCount(LUCKY_POTPOURI)==0 and st.getQuestItemsCount(FERTILITY_PERIDOT)!=0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY)!=0 and st.getQuestItemsCount(NOBILITY_AMETHYST)!=0 and st.getQuestItemsCount(PURE_AQUAMARINE)!=0 :
st.takeItems(MAGICAL_POWERS_RUBY,st.getQuestItemsCount(MAGICAL_POWERS_RUBY))
st.takeItems(PURE_AQUAMARINE,st.getQuestItemsCount(PURE_AQUAMARINE))
st.takeItems(NOBILITY_AMETHYST,st.getQuestItemsCount(NOBILITY_AMETHYST))
st.takeItems(FERTILITY_PERIDOT,st.getQuestItemsCount(FERTILITY_PERIDOT))
st.set("cond","0")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
if st.getQuestItemsCount(ETERNITY_DIAMOND) == 0 :
st.giveItems(ETERNITY_DIAMOND,1)
htmltext = "30414-24.htm"
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
npcId = npc.getNpcId()
if npcId == 20466 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(CHARM_OF_GRAIN) != 0 and st.getQuestItemsCount(RED_DOWN)<5 and st.getRandom(100)<70 :
st.giveItems(RED_DOWN,1)
if st.getQuestItemsCount(RED_DOWN) == 5 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
elif npcId == 20019 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE) != 0 and st.getQuestItemsCount(GOLD_LEAVES)<5 and st.getRandom(100)<40 :
st.giveItems(GOLD_LEAVES,1)
if st.getQuestItemsCount(GOLD_LEAVES) == 5 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
elif npcId == 20047 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(LUCKY_POTPOURI) != 0 and st.getQuestItemsCount(AMETHYST)<2 and st.getRandom(100)<40 :
st.giveItems(AMETHYST,1)
if st.getQuestItemsCount(AMETHYST) == 2 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
return
QUEST = Quest(408,qn,"Path To Elvenwizard")
CREATED = State('Start', QUEST)
STARTING = State('Starting', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(30414)
QUEST.addTalkId(30414)
QUEST.addTalkId(30157)
QUEST.addTalkId(30371)
QUEST.addTalkId(30423)
QUEST.addKillId(20019)
QUEST.addKillId(20466)
QUEST.addKillId(20047)
STARTED.addQuestDrop(30414,ROGELLIAS_LETTER,1)
STARTED.addQuestDrop(20466,RED_DOWN,1)
STARTED.addQuestDrop(30157,CHARM_OF_GRAIN,1)
STARTED.addQuestDrop(30414,APPETIZING_APPLE,1)
STARTED.addQuestDrop(20019,GOLD_LEAVES,1)
STARTED.addQuestDrop(30371,SAP_OF_WORLD_TREE,1)
STARTED.addQuestDrop(30414,IMMORTAL_LOVE,1)
STARTED.addQuestDrop(20047,AMETHYST,1)
STARTED.addQuestDrop(30423,LUCKY_POTPOURI,1)
STARTED.addQuestDrop(30157,MAGICAL_POWERS_RUBY,1)
STARTED.addQuestDrop(30371,PURE_AQUAMARINE,1)
STARTED.addQuestDrop(30423,NOBILITY_AMETHYST,1)
STARTED.addQuestDrop(30414,FERTILITY_PERIDOT,1) | zenn1989/scoria-interlude | L2Jscoria-Game/data/scripts/quests/408_PathToElvenwizard/__init__.py | Python | gpl-3.0 | 12,096 |
class Foo(object):
def __init__(self):
self.frotz = {'ping':'pong'}
class Bar(Foo):
def __init__(self, frob, frizzle):
Foo.__init__(self)
self.frotz['foo'] = 'bar'
self.frazzle = frizzle
bar = Bar(1,2)
print("frotz:", bar.frotz)
print("frazzle:", bar.frazzle) | ozzmeister00/textAdventureA | sandbox.py | Python | gpl-3.0 | 312 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from mock import patch
from django.test import TestCase as DjangoTestCase
from tastypie.test import ResourceTestCase as TastypieResourceTestCase
from baobab.utils.mock import MockSN, MockLOG
class TestCase(DjangoTestCase):
def setUp(self, *args, **kwargs):
super(TestCase, self).setUp(*args, **kwargs)
self._mock = []
cls_path = 'baobab.socialnetwork.base.LOG'
self._mock.append(patch(cls_path, new_callable=MockLOG))
self.log = self._mock[0].start()
cls_path = 'baobab.socialnetwork.base.SocialNetworkBase.__subclasses__'
self._mock.append(patch(cls_path, return_value=[MockSN, ]))
self._mock[1].start()
def tearDown(self, *args, **kwargs):
super(TestCase, self).tearDown(*args, **kwargs)
for mock in self._mock:
mock.stop()
class ResourceTestCase(TastypieResourceTestCase):
def setUp(self, *args, **kwargs):
super(ResourceTestCase, self).setUp(*args, **kwargs)
self._mock = []
cls_path = 'baobab.socialnetwork.base.LOG'
self._mock.append(patch(cls_path, new_callable=MockLOG))
self.log = self._mock[0].start()
cls_path = 'baobab.socialnetwork.base.SocialNetworkBase.__subclasses__'
self._mock.append(patch(cls_path, return_value=[MockSN, ]))
self._mock[1].start()
def tearDown(self, *args, **kwargs):
super(ResourceTestCase, self).tearDown(*args, **kwargs)
for mock in self._mock:
mock.stop()
| Gandi/baobab | baobab/utils/test.py | Python | gpl-3.0 | 1,573 |
# -*- encoding: utf-8 -*-
"""Implements System Groups UI"""
from robottelo.ui.base import Base
from robottelo.ui.locators import common_locators, locators
class SystemGroup(Base):
def create(self, name, description=None, limit=None):
"""Creates new System Group from UI"""
if self.wait_until_element(locators['system-groups.new']):
# new
self.click(locators['system-groups.new'])
self.wait_until_element(locators['system-groups.name'])
# fill name
self.field_update('system-groups.name', name)
# fill description
if description:
self.field_update('system-groups.description', description)
# set limit (unlimited by default)
if limit:
self.click(locators['system-groups.unlimited'])
self.field_update('system-groups.limit', limit)
# create
self.click(common_locators['create'])
def update(self, name, new_name=None, new_description=None, limit=None):
"""Updates existing System Group from UI"""
system_group = self.search(name)
self.wait_for_ajax()
if system_group:
system_group.click()
self.wait_for_ajax()
if new_name: # update name
self.edit_entity(locators["system-groups.update_name"],
locators["system-groups.update_name_field"],
new_name,
locators["system-groups.update_name_save"])
if new_description: # update description
self.edit_entity(
locators["system-groups.update_description"],
locators["system-groups.update_description_field"],
new_description,
locators["system-groups.update_description_save"]
)
if limit: # update limit
self.click(locators["system-groups.update_limit"])
checkbox = self.wait_until_element(
locators["system-groups.update_limit_checkbox"])
# uncheck checkbox when needed
if checkbox.get_attribute('checked'):
checkbox.click()
self.wait_for_ajax()
# update field and save
self.field_update("system-groups.update_limit_field", limit)
self.click(locators["system-groups.update_limit_save"])
def remove(self, name):
"""Removes existing System Group from UI"""
system_group = self.search(name)
self.wait_for_ajax()
if system_group:
system_group.click()
self.wait_for_ajax()
self.click(locators["system-groups.remove"])
self.click(locators["system-groups.confirm_remove"])
def search(self, name):
"""Searches existing System Group from UI"""
return self.search_entity(
name, locators['system-groups.search'], katello=True)
| abalakh/robottelo | robottelo/ui/systemgroup.py | Python | gpl-3.0 | 3,072 |
# -*- coding: utf-8 -*-
# This package and all its sub-packages are part of django-wiki,
# except where otherwise stated.
#
# django-wiki is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# django-wiki is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.
VERSION = "0.0.17"
| Attorney-Fee/django-wiki | wiki/__init__.py | Python | gpl-3.0 | 773 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^register/$', views.register, name='register'),
url(r'^register/mail/$', views.register_mail, name='register_mail'),
url(r'^register/activate/$', views.register_activate, name='register_activate'),
]
| enfancemill/baobaocloud | app/account/urls.py | Python | gpl-3.0 | 390 |
import os
os.environ["DJANGO_SETTINGS_MODULE"] = "settings"
from collections import namedtuple
from django.conf import settings
from django.contrib.auth.models import User
from django.db import connection
from djsopnet.control.assembly import \
generate_assembly_equivalences, \
generate_compatible_assemblies_between_cores, \
_map_assembly_equivalence_to_skeleton
from djsopnet.models import SegmentationConfiguration
from djsopnet.control.block import _blockcursor_to_namedtuple
from tests.testsopnet import SopnetTest
PARALLEL_JOBS = getattr(settings, 'SOPNET_TEST_PARALLEL_JOBS', {'ASSEMBLY': False})['ASSEMBLY']
if PARALLEL_JOBS:
from joblib import Parallel, delayed
# Threshold for number of segments an assembly equivalence must have to be
# mapped to a skeleton.
MAPPING_SEGMENTS_THRESHOLD = 20
st = SopnetTest()
sc = SegmentationConfiguration.objects.get(pk=st.segmentation_configuration_id)
segstack = sc.segmentationstack_set.get(type='Membrane')
segstack.clear_schema(delete_slices=False,
delete_segments=False,
delete_solutions=False,
delete_assembly_relationships=True)
bi = sc.block_info
block_size = bi.size_for_unit('block')
jobs = []
# Generate assembly compatibility edges for all (6-)neighboring, solved cores.
def core_compatibility(i, j, k):
cursor = connection.cursor()
cursor.execute('''
SELECT * FROM segstack_%s.core
WHERE coordinate_x = %s
AND coordinate_y = %s
AND coordinate_z = %s
''' % (segstack.id, i, j, k))
c = _blockcursor_to_namedtuple(cursor, block_size)[0]
if c.solution_set_flag:
print 'Generating compatibility for core %s (%s, %s, %s)' % (c.id, i, j, k)
for (ni, nj, nk) in bi.core_neighbor_range((i, j, k)):
cursor.execute('''
SELECT * FROM segstack_%s.core
WHERE coordinate_x = %s
AND coordinate_y = %s
AND coordinate_z = %s
''' % (segstack.id, ni, nj, nk))
nbr = _blockcursor_to_namedtuple(cursor, block_size)[0]
if nbr.solution_set_flag:
generate_compatible_assemblies_between_cores(segstack.id, c.id, nbr.id)
for core_coord in bi.core_range():
if PARALLEL_JOBS:
connection.close()
jobs.append(delayed(core_compatibility)(*core_coord))
else:
core_compatibility(*core_coord)
if PARALLEL_JOBS:
Parallel(n_jobs=PARALLEL_JOBS)(jobs)
# Generate assembly equivalences.
print 'Generating assembly equivalences...'
generate_assembly_equivalences(segstack.id)
# For each assembly equivalence, map to a skeleton.
Reqfake = namedtuple('Reqfake', ['user', 'project_id'])
u = User.objects.get(username='drew')
request = Reqfake(user=u, project_id=sc.project_id)
def map_skeleton(equivalence_id):
print 'Mapping assembly equivalence %s' % equivalence_id
try:
_map_assembly_equivalence_to_skeleton(request, segstack.id, equivalence_id)
except Exception as e:
print '...error'
print str(e)
global_cursor = connection.cursor()
global_cursor.execute('''
SELECT
e.id,
COUNT(aseg.segment_id)
FROM segstack_%(segstack_id)s.assembly_equivalence e
JOIN segstack_%(segstack_id)s.assembly a
ON a.equivalence_id = e.id
JOIN segstack_%(segstack_id)s.assembly_segment aseg
ON aseg.assembly_id = a.id
WHERE e.skeleton_id IS NULL
GROUP BY e.id
HAVING COUNT(aseg.segment_id) > %(segments_threshold)s
''' % {'segstack_id': segstack.id, 'segments_threshold': MAPPING_SEGMENTS_THRESHOLD})
equivalence_ids = [r[0] for r in global_cursor.fetchall()]
jobs = []
for equivalence_id in equivalence_ids:
if PARALLEL_JOBS:
connection.close()
jobs.append(delayed(map_skeleton)(equivalence_id))
else:
map_skeleton(equivalence_id)
if PARALLEL_JOBS:
Parallel(n_jobs=PARALLEL_JOBS)(jobs)
| catsop/CATMAID | django/projects/mysite/scripts/test_assembly_mapping.py | Python | gpl-3.0 | 3,648 |
"""
Transducer predicate class and parser
has input and output predicate and defines operations with them
Copyright (c) 2017 Michaela Bielikova <[email protected]>
"""
import abc
import random
from predicate_interface import PredicateInterface
class TransPred(PredicateInterface):
"""
Transducer predicate class
represents a transducer label with input and output predicates
Attributes:
input input predicate
output output predicate
identity flag if the label represents identity
"""
def __init__(self):
self.input = None
self.output = None
self.identity = False
def __str__(self):
if self.identity:
return "@" + str(self.input) + "/@" + str(self.output)
else:
return str(self.input) + "/" + str(self.output)
def __repr__(self):
if self.identity:
return "@" + str(self.input) + "/@" + str(self.output)
else:
return str(self.input) + "/" + str(self.output)
def __eq__(self, other):
return (self.identity, self.input, self.output) == (other.identity, other.input, other.output)
def __hash__(self):
return hash((self.identity, str(self.input), str(self.output)))
@abc.abstractmethod
def complement(self):
"""
Predicate negation
:return: negation of given predicate
"""
result = TransPred()
result.identity = self.identity
result.input = self.input.complement()
result.output = self.output.complement()
return result
@abc.abstractmethod
def conjunction(self, predicate):
"""
Predicate conjunction
:param predicate: second predicate
:return: conjunction of two predicates
"""
result = TransPred()
if self.identity or predicate.identity:
result.identity = True
else:
result.identity = False
if result.identity:
identic_input = self.input.conjunction(predicate.input)
identic_output = self.output.conjunction(predicate.output)
identic = identic_input.conjunction(identic_output)
result.input = identic
result.output = identic
else:
result.input = self.input.conjunction(predicate.input)
result.output = self.output.conjunction(predicate.output)
return result
@abc.abstractmethod
def disjunction(self, predicate):
"""
Predicate disjunction
:param predicate: second predicate
:return: disjunction of two predicates
"""
result = TransPred()
if self.identity or predicate.identity:
result.identity = True
else:
result.identity = False
if result.identity:
identic_input = self.input.disjunction(predicate.input)
identic_output = self.output.disjunction(predicate.output)
identic = identic_input.conjunction(identic_output)
result.input = identic
result.output = identic
else:
result.input = self.input.disjunction(predicate.input)
result.output = self.output.disjunction(predicate.output)
return result
@abc.abstractmethod
def is_equal(self, predicate):
"""
Checks whether the given predicates are equal
:param predicate: second predicate
:return: bool
"""
if self.identity != predicate.identity:
# if every predicate has exactly one symbol, they can be equal even if their .identity is not the same
if len(self.input) != 1 or len(self.output) != 1 or len(predicate.input) != 1 or len(predicate.output) != 1:
return False
if not self.input.is_equal(predicate.input):
return False
if not self.output.is_equal(predicate.output):
return False
return True
@abc.abstractmethod
def is_subset(self, predicate):
"""
Checks whether the given predicate represent a subset of the second one
:param predicate: second predicate
:return: bool
"""
if self.identity != predicate.identity:
if predicate.identity and not self.is_equal(predicate):
return False
if not self.input.is_subset(predicate.input):
return False
if not self.output.is_subset(predicate.output):
return False
return True
@abc.abstractmethod
def is_satisfiable(self):
"""
Checks whether the given predicate is satisfiable
:return: bool
"""
if not self.input.is_satisfiable():
return False
if not self.output.is_satisfiable():
return False
return True
def combine(self, other):
"""
Creates composition of two given labels
:param other: the second predicate
:return: composed predicate
"""
result = TransPred()
if self.identity or result.identity:
result.identity = True
identic = self.input.conjunction(other.output)
result.input = identic
result.output = identic
else:
result.identity = False
result.input = self.input
result.output = other.output
return result
def translates(self, a, b):
"""
Checks whether predicates translates symbol a to symbol b
:param a: the input symbol
:param b: the output symbol
:return: bool
"""
if self.identity:
if self.input.has_letter(a) and a == b:
return True
else:
if self.input.has_letter(a) and self.output.has_letter(b):
return True
return False
def translate(self, a, alphabet):
"""
Translates symbol a to another symbol
:param a: the input symbol
:param alphabet: alphabet of the automaton
:return: translation fo the symbol
"""
if self.input.has_letter(a):
if self.identity:
return a
else:
for symbol in alphabet:
if self.output.has_letter(symbol):
return symbol
else:
return False
def parsePredicate(pred, automaton_type):
"""
Parses given predicate
:param pred: predicate string
:param automaton_type: type of the automaton
:return: predicate object
"""
result = TransPred()
if pred[0] == "@":
result.identity = True
pred = pred.replace("@", "")
pred_parts = pred.split("/")
if automaton_type == "INT":
from in_notin_parser import parsePredicate as parsePr
elif automaton_type == "LT":
from letter_parser import parsePredicate as parsePr
else:
print("Unsupported transducer type.")
exit(-1)
result.input = parsePr(pred_parts[0])
result.output = parsePr(pred_parts[1])
return result
| Miskaaa/symboliclib | symboliclib/transducer_predicate.py | Python | gpl-3.0 | 7,134 |
from django.db import models
from pygments.lexers import get_all_lexers
from pygments.styles import get_all_styles
LEXERS=[item for item in get_all_lexers() if item[1]]
LANGUAGE_CHOICES=sorted([(item[1][0],item[0]) for item in LEXERS])
STYLE_CHOICES=sorted((item,item) for item in get_all_styles())
# Create your models here.
class Interest(models.Model):
created=models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=100,blank=True,default='')
description=models.TextField(default='^$')
category=models.TextField(default='^$')
subcategory=models.TextField(default='^$')
linenos = models.BooleanField(default=False)
language = models.CharField(choices=LANGUAGE_CHOICES, default='python', max_length=100)
style = models.CharField(choices=STYLE_CHOICES, default='friendly', max_length=100)
class Meta:
ordering=('created',) | DataDrivenExperiences/Server | interest/models.py | Python | mpl-2.0 | 891 |
from configparser import SafeConfigParser
import os
import sys
from collections import OrderedDict
here = os.path.dirname(__file__)
class ConfigDict(dict):
def __init__(self, base_path, *args, **kwargs):
self.base_path = base_path
dict.__init__(self, *args, **kwargs)
def get_path(self, key, default=None):
if key not in self:
return default
path = self[key]
os.path.expanduser(path)
return os.path.abspath(os.path.join(self.base_path, path))
def read(config_path):
config_path = os.path.abspath(config_path)
config_root = os.path.dirname(config_path)
parser = SafeConfigParser()
success = parser.read(config_path)
assert config_path in success, success
subns = {"pwd": os.path.abspath(os.path.curdir)}
rv = OrderedDict()
for section in parser.sections():
rv[section] = ConfigDict(config_root)
for key in parser.options(section):
rv[section][key] = parser.get(section, key, raw=False, vars=subns)
return rv
def path(argv=None):
if argv is None:
argv = []
path = None
for i, arg in enumerate(argv):
if arg == "--config":
if i + 1 < len(argv):
path = argv[i + 1]
elif arg.startswith("--config="):
path = arg.split("=", 1)[1]
if path is not None:
break
if path is None:
if os.path.exists("wptrunner.ini"):
path = os.path.abspath("wptrunner.ini")
else:
path = os.path.join(here, "..", "wptrunner.default.ini")
return os.path.abspath(path)
def load():
return read(path(sys.argv))
| KiChjang/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/config.py | Python | mpl-2.0 | 1,667 |
# Generated by Django 3.1.6 on 2021-02-02 19:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('busstops', '0006_auto_20201225_0004'),
('vehicles', '0009_livery_text_colour'),
]
operations = [
migrations.AlterField(
model_name='vehicle',
name='latest_location',
field=models.OneToOneField(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to='vehicles.vehiclelocation'),
),
migrations.AlterField(
model_name='vehicle',
name='source',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='busstops.datasource'),
),
]
| jclgoodwin/bustimes.org.uk | vehicles/migrations/0010_auto_20210202_1951.py | Python | mpl-2.0 | 819 |
import datetime
from dataclasses import dataclass
from typing import List, Dict, Optional
from everyclass.server.entity import domain
from everyclass.server.utils import JSONSerializable
from everyclass.server.utils.encryption import encrypt, RTYPE_STUDENT, RTYPE_TEACHER
@dataclass
class Event(JSONSerializable):
name: str
room: str
def __json_encode__(self):
return {'name': self.name, 'room': self.room}
@dataclass
class People(JSONSerializable):
name: str
id_encoded: str
def __json_encode__(self):
return {'name': self.name, 'id': self.id_encoded}
@dataclass
class MultiPeopleSchedule(JSONSerializable):
schedules: List[Dict[str, Optional[Event]]]
accessible_people: List[People]
inaccessible_people: List[People]
def __json_encode__(self):
return {'schedules': self.schedules,
'inaccessible_people': self.inaccessible_people,
'accessible_people': self.accessible_people}
def __init__(self, people: List[str], date: datetime.date, current_user: str):
"""多人日程展示。输入学号或教工号列表及日期,输出多人在当天的日程"""
from everyclass.server import logger
from everyclass.server.entity import service
from everyclass.server.user import service as user_service
from everyclass.server.entity import service as entity_service
accessible_people_ids = []
accessible_people = []
inaccessible_people = []
for identifier in people:
if user_service.has_access(identifier, current_user)[0]:
accessible_people_ids.append(identifier)
else:
inaccessible_people.append(People(entity_service.get_student(identifier).name, encrypt(RTYPE_STUDENT, identifier)))
self.schedules = list()
for identifier in accessible_people_ids:
is_student, people_info = service.get_people_info(identifier)
accessible_people.append(
People(people_info.name, encrypt(RTYPE_STUDENT, identifier) if is_student else encrypt(RTYPE_TEACHER, identifier)))
semester, week, day = domain.get_semester_date(date)
if is_student:
cards = service.get_student_timetable(identifier, semester).cards
else:
cards = service.get_teacher_timetable(identifier, semester).cards
cards = filter(lambda c: week in c.weeks and c.lesson[0] == str(day), cards) # 用日期所属的周次和星期过滤card
event_dict = {}
for card in cards:
time = card.lesson[1:5] # "10102" -> "0102"
if time not in event_dict:
event_dict[time] = Event(name=card.name, room=card.room)
else:
# 课程重叠
logger.warning("time of card overlapped", extra={'people_identifier': identifier,
'date': date})
# 给没课的位置补充None
for i in range(1, 10, 2):
key = f"{i:02}{i + 1:02}"
if key not in event_dict:
event_dict[key] = None
self.schedules.append(event_dict)
self.inaccessible_people = inaccessible_people
self.accessible_people = accessible_people
@dataclass
class SearchResultItem(JSONSerializable):
name: str
description: str
people_type: str
id_encoded: str
has_access: bool
forbid_reason: Optional[bool]
def __json_encode__(self):
return {'name': self.name, 'description': self.description,
'people_type': self.people_type, 'id_encoded': self.id_encoded,
'has_access': self.has_access, 'forbid_reason': self.forbid_reason}
| fr0der1c/EveryClass-server | everyclass/server/entity/model/multi_people_schedule.py | Python | mpl-2.0 | 3,857 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import BytesIO
from gzip import GzipFile
import pytest
from botocore.exceptions import ClientError
from requests.exceptions import ContentDecodingError
from requests.packages.urllib3.response import HTTPResponse
from tecken.storage import StorageBucket
from tecken.base.symboldownloader import (
SymbolDownloader,
SymbolNotFound,
iter_lines,
exists_in_source,
)
def test_exists_in_source(botomock, settings):
mock_api_calls = []
def mock_api_call(self, operation_name, api_params):
mock_api_calls.append(api_params)
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
bucket = StorageBucket("https://s3.example.com/private")
with botomock(mock_api_call):
assert not exists_in_source(bucket, "xxx.sym")
assert exists_in_source(bucket, "xul.sym")
assert len(mock_api_calls) == 2
# again
assert not exists_in_source(bucket, "xxx.sym")
assert exists_in_source(bucket, "xul.sym")
assert len(mock_api_calls) == 2
def test_iter_lines():
class Stream:
def __init__(self, content):
self.left = content
def read(self, size):
if not self.left:
raise StopIteration
chunk = self.left[:size]
self.left = self.left[size:]
return chunk
lines = "Line 1\n" "Line 2\n" "Line 3\n"
stream = Stream(lines)
output = list(iter_lines(stream))
assert output == ["Line 1", "Line 2", "Line 3"]
# Create it again because our little stream mock doesn't rewind
stream = Stream(lines)
output = list(iter_lines(stream, chunk_size=5))
assert output == ["Line 1", "Line 2", "Line 3"]
stream = Stream(lines.strip()) # no trailing linebreak
output = list(iter_lines(stream))
assert output == ["Line 1", "Line 2", "Line 3"]
stream = Stream(lines.strip()) # no trailing linebreak
output = list(iter_lines(stream, chunk_size=3))
assert output == ["Line 1", "Line 2", "Line 3"]
def test_has_public(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls, file_prefix="v0")
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_has_private_bubble_other_clienterrors(botomock):
def mock_api_call(self, operation_name, api_params):
parsed_response = {"Error": {"Code": "403", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
# Expect this to raise a ClientError because the bucket ('private')
# doesn't exist. So boto3 would normally trigger a ClientError
# with a code 'Forbidden'.
with botomock(mock_api_call):
with pytest.raises(ClientError):
downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
def test_has_private(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.time_took > 0.0
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert downloader.time_took > 0.0
def test_has_private_caching_and_invalidation(botomock):
mock_calls = []
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
mock_calls.append(api_params["Prefix"])
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 1
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# This should be cached
assert len(mock_calls) == 1
# Now invalidate it
downloader.invalidate_cache(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 2
# Invalidating unrecognized keys shouldn't break anything
downloader.invalidate_cache(
"never", "44E4EC8C2F41492B9369D6B9A059577C2", "heardof"
)
def test_get_url_private_caching_and_invalidation(botomock):
mock_calls = []
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
mock_calls.append(api_params["Prefix"])
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 1
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# This should be cached
assert len(mock_calls) == 1
# Now invalidate it
downloader.invalidate_cache(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 2
def test_has_private_without_prefix(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xul.sym"):
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
elif api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
raise NotImplementedError(api_params)
urls = ("https://s3.example.com/private",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_get_url_public(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert url == (
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym"
)
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
def test_get_url_private(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# The bucket gets put in the top-domain.
assert url.startswith("https://s3.example.com/")
assert (
"/private/prefix/v0/xul.pdb/" "44E4EC8C2F41492B9369D6B9A059577C2/xul.sym?"
) in url
assert "Expires=" in url
assert "AWSAccessKeyId=" in url
assert "Signature=" in url
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
assert len(botomock.calls) == 2
def test_public_default_file_prefix(requestsmock, settings):
"""The idea with settings.SYMBOL_FILE_PREFIX is to make it easier
to specify the settings.SYMBOL_URLS. That settings.SYMBOL_FILE_PREFIX
is *always* used when uploading symbols. So it's *always* useful to
query for symbols with a prefix. However, it's an easy mistake to make
that you just focus on the bucket name to say where symbols come from.
In those cases, the code should "protect" you can make sure we actually
use the prefix.
However, we don't want to lose the flexibility to actually override
it on a *per URL* basis.
"""
# settings.SYMBOL_FILE_PREFIX = 'myprfx'
requestsmock.head(
"https://s3.example.com/public/start/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.head(
"https://s3.example.com/also-public/prrffxx/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.head(
"https://s3.example.com/special/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/public/start/?access=public",
# No trailing / in the path part
"https://s3.example.com/also-public/prrffxx?access=public",
# No prefix!
"https://s3.example.com/special?access=public",
)
downloader = SymbolDownloader(urls, file_prefix="myprfx")
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
requestsmock.get(
"https://s3.example.com/public/start/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.get(
"https://s3.example.com/also-public/prrffxx/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.get(
"https://s3.example.com/special/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
# Now try to stream it
with pytest.raises(SymbolNotFound):
list(stream)
def test_private_default_file_prefix(botomock, settings):
"""See doc string in test_public_default_file_prefix"""
all_mock_calls = []
def mock_api_call(self, operation_name, api_params):
if operation_name == "ListObjectsV2":
# the has_symbol() was called
all_mock_calls.append(api_params["Prefix"])
# pretend it doesn't exist
return {}
elif operation_name == "GetObject":
# someone wants a stream
all_mock_calls.append(api_params["Key"])
parsed_response = {"Error": {"Code": "NoSuchKey", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
else:
raise NotImplementedError(operation_name)
urls = (
# Private URL with prefix and trailing /
"https://s3.example.com/priv-bucket/borje/",
# No trailing /
"https://s3.example.com/also-priv-bucket/prrffxx",
# No prefix
"https://s3.example.com/some-bucket",
)
downloader = SymbolDownloader(urls, file_prefix="myprfx")
with botomock(mock_api_call):
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert len(all_mock_calls) == 3
assert all_mock_calls[0].startswith("borje/myprfx/xxx.pdb")
assert all_mock_calls[1].startswith("prrffxx/myprfx/xxx.pdb")
assert all_mock_calls[2].startswith("myprfx/xxx.pdb")
# reset the mutable recorder
all_mock_calls = []
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
next(stream)
assert len(all_mock_calls) == 3
assert all_mock_calls[0].startswith("borje/myprfx/xxx.pdb")
assert all_mock_calls[1].startswith("prrffxx/myprfx/xxx.pdb")
assert all_mock_calls[2].startswith("myprfx/xxx.pdb")
def test_get_url_private_dotted_name(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/com.example.private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert (
"/com.example.private/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym?"
) in url
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
assert len(botomock.calls) == 2
def test_get_stream_public(requestsmock):
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
content=b"LINE ONE\nLINE TWO\n",
)
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
content=b"Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
url = next(stream)
assert url == (
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym"
)
lines = list(stream)
assert lines == ["LINE ONE", "LINE TWO"]
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
list(stream)
def test_get_stream_private(botomock):
long_line = "x" * 600
def mock_api_call(self, operation_name, api_params):
assert operation_name == "GetObject"
if api_params["Key"].endswith("xxx.sym"):
parsed_response = {"Error": {"Code": "NoSuchKey", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
return {"Body": BytesIO(bytes(f"line 1\r\nline 2\r\n{long_line}\r\n", "utf-8"))}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
lines = list(stream)
assert lines == ["line 1", "line 2", long_line]
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
next(stream)
def test_get_stream_gzipped(botomock):
def mock_api_call(self, operation_name, api_params):
payload = b"line 1\n" b"line 2\n" b"line 3\n"
buffer_ = BytesIO()
with GzipFile(fileobj=buffer_, mode="w") as f:
f.write(payload)
payload_gz = buffer_.getvalue()
return {"ContentEncoding": "gzip", "Body": BytesIO(payload_gz)}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
lines = list(stream)
assert lines == ["line 1", "line 2", "line 3"]
def test_get_stream_gzipped_but_not_gzipped(botomock):
def mock_api_call(self, operation_name, api_params):
payload = b"line 1\n" b"line 2\n" b"line 3\n"
return {
"ContentEncoding": "gzip", # <-- note!
"Body": BytesIO(payload), # but it's not gzipped!
}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
# But when you start to stream it will realize that the file is not
# actually gzipped and SymbolDownloader will automatically just skip
# that file as if it doesn't exist.
with pytest.raises(SymbolNotFound):
next(stream)
def test_get_stream_private_other_clienterrors(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "GetObject"
parsed_response = {"Error": {"Code": "403", "Message": "Forbidden"}}
raise ClientError(parsed_response, operation_name)
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
with pytest.raises(ClientError):
next(stream)
def test_multiple_urls_public_then_private(requestsmock, botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/public/prefix/?access=public",
"https://s3.example.com/private/prefix/",
)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_multiple_urls_private_then_public(requestsmock, botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/private/prefix/",
"https://s3.example.com/public/prefix/?access=public",
)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_has_public_case_insensitive_debugid(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
assert downloader.has_symbol(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
def test_has_private_case_insensitive_debugid(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
assert "44E4EC8C2F41492B9369D6B9A059577C2" in api_params["Prefix"]
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
def test_get_stream_public_content_encode_error(requestsmock):
class BreakingStreamHTTPResponse(HTTPResponse):
def stream(self, *a, **kwargs):
raise ContentDecodingError("something terrible!")
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
raw=BreakingStreamHTTPResponse(status=200),
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
stream = downloader.get_symbol_stream(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
# Because the URL exists (hence the 200 OK), but when you start
# streaming it, it realizes it's there's something wrong with the
# content encoding, it captures that and consider this symbol
# not found.
# I.e. unable to stream its content is as bad as the file not existing.
# And because it's not found, the whole stream lookup is exhausted and
# it finally raises a SymbolNotFound error.
with pytest.raises(SymbolNotFound):
list(stream)
| mozilla-services/tecken | tecken/tests/test_symboldownloader.py | Python | mpl-2.0 | 24,201 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
from __future__ import unicode_literals
from .models import Tree, Repository, Locale, Forest
from django.contrib import admin
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name', 'id',)
exclude = ('changesets',)
search_fields = ('name',)
admin.site.register(Locale)
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(Tree)
admin.site.register(Forest)
| mozilla/elmo | apps/life/admin.py | Python | mpl-2.0 | 636 |
from Tkinter import *
class Interface:
def __init__(self, contenedor):
self.textoE3 = StringVar()
self.e1 = Label(contenedor, text = "Convertir Celsius a Farenheit", fg = "black")
self.e2 = Label(contenedor, text = "Celsius", fg = "black")
self.e3 = Label(contenedor, text = "Farenheit", fg = "black")
self.e4 = Button(contenedor, text = "Convertir", fg = "black", bg = "cyan", command=self.hacerConversion)
self.e5 = Entry(contenedor, fg = "black", bg = "white")
self.e6 = Label(contenedor, text ="", fg = "black", textvariable=self.textoE3)
self.e1.grid(column=0,row=0,columnspan=2)
self.e2.grid(column=0,row=1)
self.e3.grid(column=0,row=2)
self.e4.grid(column=1,row=3,columnspan=2)
self.e5.grid(column=1,row=1)
self.e6.grid(column=1,row=2)
def hacerConversion(self):
cel = float(self.e5.get())
far = (cel*1.8)+32
self.textoE3.set(far)
ventana = Tk()
miInterface = Interface(ventana)
ventana.mainloop()
| heliogabalo/The-side-of-the-source | Codigo/Python/ventana5.py | Python | mpl-2.0 | 1,053 |
from __future__ import unicode_literals
import pytest
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
from django.test import RequestFactory
from mock import MagicMock, patch
from ..middleware import (
ForceAnonymousSessionMiddleware,
RestrictedEndpointsMiddleware,
RestrictedWhiteNoiseMiddleware,
SetRemoteAddrFromForwardedFor,
WaffleWithCookieDomainMiddleware,
WhiteNoiseMiddleware,
)
@pytest.mark.parametrize('path', ('/missing_url', '/missing_url/'))
def test_slash_middleware_keep_404(client, db, path):
'''The SlashMiddleware retains 404s.'''
response = client.get(path)
assert response.status_code == 404
def test_slash_middleware_removes_slash(client, db):
'''The SlashMiddleware fixes a URL that shouldn't have a trailing slash.'''
response = client.get('/contribute.json/')
assert response.status_code == 301
assert response['Location'].endswith('/contribute.json')
@pytest.mark.parametrize('path', ('/admin', '/en-US'))
def test_slash_middleware_adds_slash(path, client, db):
'''The SlashMiddleware fixes a URL that should have a trailing slash.'''
response = client.get(path)
assert response.status_code == 301
assert response['Location'].endswith(path + '/')
def test_slash_middleware_retains_querystring(client, db):
'''The SlashMiddleware handles encoded querystrings.'''
response = client.get('/contribute.json/?xxx=%C3%83')
assert response.status_code == 301
assert response['Location'].endswith('/contribute.json?xxx=%C3%83')
@pytest.mark.parametrize(
'forwarded_for,remote_addr',
(('1.1.1.1', '1.1.1.1'),
('2.2.2.2', '2.2.2.2'),
('3.3.3.3, 4.4.4.4', '3.3.3.3')))
def test_set_remote_addr_from_forwarded_for(rf, forwarded_for, remote_addr):
'''SetRemoteAddrFromForwardedFor parses the X-Forwarded-For Header.'''
rf = RequestFactory()
middleware = SetRemoteAddrFromForwardedFor(lambda req: None)
request = rf.get('/', HTTP_X_FORWARDED_FOR=forwarded_for)
middleware(request)
assert request.META['REMOTE_ADDR'] == remote_addr
def test_force_anonymous_session_middleware(rf, settings):
request = rf.get('/foo')
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'totallyfake'
mock_response = MagicMock()
middleware = ForceAnonymousSessionMiddleware(lambda req: mock_response)
response = middleware(request)
assert request.session
assert request.session.session_key is None
assert not response.method_calls
@pytest.mark.parametrize(
'host,key,expected',
(('beta', 'BETA_HOST', 'kuma.urls_beta'),
('beta-origin', 'BETA_ORIGIN', 'kuma.urls_beta'),
('demos', 'ATTACHMENT_HOST', 'kuma.urls_untrusted'),
('demos-origin', 'ATTACHMENT_ORIGIN', 'kuma.urls_untrusted')),
ids=('beta', 'beta-origin', 'attachment', 'attachment-origin')
)
def test_restricted_endpoints_middleware(rf, settings, host, key, expected):
setattr(settings, key, host)
settings.ENABLE_RESTRICTIONS_BY_HOST = True
settings.ALLOWED_HOSTS.append(host)
middleware = RestrictedEndpointsMiddleware(lambda req: None)
request = rf.get('/foo', HTTP_HOST=host)
middleware(request)
assert request.urlconf == expected
request = rf.get('/foo', HTTP_HOST='testserver')
middleware(request)
assert not hasattr(request, 'urlconf')
def test_restricted_endpoints_middleware_when_disabled(settings):
settings.ENABLE_RESTRICTIONS_BY_HOST = False
with pytest.raises(MiddlewareNotUsed):
RestrictedEndpointsMiddleware(lambda req: None)
def test_restricted_whitenoise_middleware(rf, settings):
settings.ATTACHMENT_HOST = 'demos'
settings.ENABLE_RESTRICTIONS_BY_HOST = True
settings.ALLOWED_HOSTS.append('demos')
middleware = RestrictedWhiteNoiseMiddleware(lambda req: None)
sentinel = object()
with patch.object(WhiteNoiseMiddleware, 'process_request',
return_value=sentinel):
request = rf.get('/foo', HTTP_HOST='demos')
assert middleware(request) is None
request = rf.get('/foo', HTTP_HOST='testserver')
assert middleware(request) is sentinel
settings.ENABLE_RESTRICTIONS_BY_HOST = False
request = rf.get('/foo', HTTP_HOST='demos')
assert middleware(request) is sentinel
def test_waffle_cookie_domain_middleware(rf, settings):
settings.WAFFLE_COOKIE = 'dwf_%s'
settings.WAFFLE_COOKIE_DOMAIN = 'mdn.dev'
resp = HttpResponse()
resp.set_cookie('some_key', 'some_value', domain=None)
resp.set_cookie('another_key', 'another_value', domain='another.domain')
middleware = WaffleWithCookieDomainMiddleware(lambda req: resp)
request = rf.get('/foo')
request.waffles = {
'contrib_beta': (True, False),
'developer_needs': (True, False),
}
response = middleware(request)
assert response.cookies['some_key']['domain'] == ''
assert response.cookies['another_key']['domain'] == 'another.domain'
assert response.cookies['dwf_contrib_beta']['domain'] == 'mdn.dev'
assert response.cookies['dwf_developer_needs']['domain'] == 'mdn.dev'
| SphinxKnight/kuma | kuma/core/tests/test_middleware.py | Python | mpl-2.0 | 5,154 |
# coding: utf-8
"""
MIT License
Copyright (c) 2019 Claude SIMON (https://q37.info/s/rmnmqd49)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import workshop._.z_2a as workshop
import workshop.fr._ as _
from workshop.fr.turtle import *
def go():
workshop.main(None, _.DEFAULT_TITLE)
| epeios-q37/epeios | other/exercises/basics/workshop/fr/z_2a.py | Python | agpl-3.0 | 1,301 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today INECO LTD,. PART. (<http://www.ineco.co.th>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product
import wizard
import sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| jeffery9/mixprint_addons | ineco_sale_make_purchase/__init__.py | Python | agpl-3.0 | 1,105 |
# The Hazard Library
# Copyright (C) 2012-2016 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.source.base` defines a base class for
seismic sources.
"""
import abc
from openquake.baselib.slots import with_slots
from openquake.baselib.python3compat import with_metaclass
@with_slots
class BaseSeismicSource(with_metaclass(abc.ABCMeta)):
"""
Base class representing a seismic source, that is a structure generating
earthquake ruptures.
:param source_id:
Some (numeric or literal) source identifier. Supposed to be unique
within the source model.
:param name:
String, a human-readable name of the source.
:param tectonic_region_type:
Source's tectonic regime. See :class:`openquake.hazardlib.const.TRT`.
"""
_slots_ = ['source_id', 'name', 'tectonic_region_type',
'src_group_id', 'num_ruptures', 'seed', 'id']
MODIFICATIONS = abc.abstractproperty()
RUPTURE_WEIGHT = 1. # overridden in PointSource
@property
def weight(self):
"""
Determine the source weight from the number of ruptures, by
multiplying with the scale factor RUPTURE_WEIGHT
"""
return self.num_ruptures * self.RUPTURE_WEIGHT
def __init__(self, source_id, name, tectonic_region_type):
self.source_id = source_id
self.name = name
self.tectonic_region_type = tectonic_region_type
self.src_group_id = None # set by the engine
self.num_ruptures = 0 # set by the engine
self.seed = None # set by the engine
self.id = None # set by the engine
@abc.abstractmethod
def iter_ruptures(self):
"""
Get a generator object that yields probabilistic ruptures the source
consists of.
:returns:
Generator of instances of sublclass of :class:
`~openquake.hazardlib.source.rupture.BaseProbabilisticRupture`.
"""
@abc.abstractmethod
def count_ruptures(self):
"""
Return the number of ruptures that will be generated by the source.
"""
@abc.abstractmethod
def get_min_max_mag(self):
"""
Return minimum and maximum magnitudes of the ruptures generated
by the source.
"""
@abc.abstractmethod
def get_rupture_enclosing_polygon(self, dilation=0):
"""
Get a polygon which encloses all the ruptures generated by the source.
The rupture enclosing polygon is meant to be used in all hazard
calculators to filter out sources whose ruptures the user wants
to be neglected because they are too far from the locations
of interest.
For performance reasons, the ``get_rupture_enclosing_polygon()``
should compute the polygon, without creating all the ruptures.
The rupture enclosing polygon may not be necessarily the *minimum*
enclosing polygon, but must guarantee that all ruptures are within
the polygon.
This method must be implemented by subclasses.
:param dilation:
A buffer distance in km to extend the polygon borders to.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon`.
"""
def filter_sites_by_distance_to_source(self, integration_distance, sites):
"""
Filter out sites from the collection that are further from the source
than some arbitrary threshold.
:param integration_distance:
Distance in km representing a threshold: sites that are further
than that distance from the closest rupture produced by the source
should be excluded.
:param sites:
Instance of :class:`openquake.hazardlib.site.SiteCollection`
to filter.
:returns:
Filtered :class:`~openquake.hazardlib.site.SiteCollection`.
Method can be overridden by subclasses in order to achieve
higher performance for a specific typology. Base class method calls
:meth:`get_rupture_enclosing_polygon` with ``integration_distance``
as a dilation value and then filters site collection by checking
:meth:
`containment <openquake.hazardlib.geo.polygon.Polygon.intersects>`
of site locations.
The main criteria for this method to decide whether a site should be
filtered out or not is the minimum distance between the site and all
the ruptures produced by the source. If at least one rupture is closer
(in terms of great circle distance between surface projections) than
integration distance to a site, it should not be filtered out. However,
it is important not to make this method too computationally intensive.
If short-circuits are taken, false positives are generally better than
false negatives (it's better not to filter a site out if there is some
uncertainty about its distance).
"""
rup_enc_poly = self.get_rupture_enclosing_polygon(integration_distance)
return sites.filter(rup_enc_poly.intersects(sites.mesh))
def modify(self, modification, parameters):
"""
Apply a single modificaton to the source parameters
Reflects the modification method and calls it passing ``parameters``
as keyword arguments.
Modifications can be applied one on top of another. The logic
of stacking modifications is up to a specific source implementation.
:param modification:
String name representing the type of modification.
:param parameters:
Dictionary of parameters needed for modification.
:raises ValueError:
If ``modification`` is missing from the attribute `MODIFICATIONS`.
"""
if modification not in self.MODIFICATIONS:
raise ValueError('Modification %s is not supported by %s' %
(modification, type(self).__name__))
meth = getattr(self, 'modify_%s' % modification)
meth(**parameters)
@with_slots
class ParametricSeismicSource(with_metaclass(abc.ABCMeta, BaseSeismicSource)):
"""
Parametric Seismic Source generates earthquake ruptures from source
parameters, and associated probabilities of occurrence are defined through
a magnitude frequency distribution and a temporal occurrence model.
:param mfd:
Magnitude-Frequency distribution for the source.
See :mod:`openquake.hazardlib.mfd`.
:param rupture_mesh_spacing:
The desired distance between two adjacent points in source's
ruptures' mesh, in km. Mainly this parameter allows to balance
the trade-off between time needed to compute the :meth:`distance
<openquake.hazardlib.geo.surface.base.BaseQuadrilateralSurface.get_min_distance>`
between the rupture surface and a site and the precision of that
computation.
:param magnitude_scaling_relationship:
Instance of subclass of
:class:`openquake.hazardlib.scalerel.base.BaseMSR` to
describe how does the area of the rupture depend on magnitude and rake.
:param rupture_aspect_ratio:
Float number representing how much source's ruptures are more wide
than tall. Aspect ratio of 1 means ruptures have square shape,
value below 1 means ruptures stretch vertically more than horizontally
and vice versa.
:param temporal_occurrence_model:
Instance of
:class:`openquake.hazardlib.tom.PoissonTOM` defining temporal occurrence
model for calculating rupture occurrence probabilities
:raises ValueError:
If either rupture aspect ratio or rupture mesh spacing is not positive
(if not None).
"""
_slots_ = BaseSeismicSource._slots_ + '''mfd rupture_mesh_spacing
magnitude_scaling_relationship rupture_aspect_ratio
temporal_occurrence_model'''.split()
def __init__(self, source_id, name, tectonic_region_type, mfd,
rupture_mesh_spacing, magnitude_scaling_relationship,
rupture_aspect_ratio, temporal_occurrence_model):
super(ParametricSeismicSource, self). \
__init__(source_id, name, tectonic_region_type)
if rupture_mesh_spacing is not None and not rupture_mesh_spacing > 0:
raise ValueError('rupture mesh spacing must be positive')
if rupture_aspect_ratio is not None and not rupture_aspect_ratio > 0:
raise ValueError('rupture aspect ratio must be positive')
self.mfd = mfd
self.rupture_mesh_spacing = rupture_mesh_spacing
self.magnitude_scaling_relationship = magnitude_scaling_relationship
self.rupture_aspect_ratio = rupture_aspect_ratio
self.temporal_occurrence_model = temporal_occurrence_model
def get_annual_occurrence_rates(self, min_rate=0):
"""
Get a list of pairs "magnitude -- annual occurrence rate".
The list is taken from assigned MFD object
(see :meth:`openquake.hazardlib.mfd.base.BaseMFD.get_annual_occurrence_rates`)
with simple filtering by rate applied.
:param min_rate:
A non-negative value to filter magnitudes by minimum annual
occurrence rate. Only magnitudes with rates greater than that
are included in the result list.
:returns:
A list of two-item tuples -- magnitudes and occurrence rates.
"""
return [(mag, occ_rate)
for (mag, occ_rate) in self.mfd.get_annual_occurrence_rates()
if min_rate is None or occ_rate > min_rate]
def get_min_max_mag(self):
"""
Get the minimum and maximum magnitudes of the ruptures generated
by the source from the underlying MFD.
"""
return self.mfd.get_min_max_mag()
def __repr__(self):
"""
String representation of a source, displaying the source class name
and the source id.
"""
return '<%s %s>' % (self.__class__.__name__, self.source_id)
| vup1120/oq-hazardlib | openquake/hazardlib/source/base.py | Python | agpl-3.0 | 10,755 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from datetime import timedelta
from functools import partial
import psycopg2
import pytz
from odoo import api, fields, models, tools, _
from odoo.tools import float_is_zero
from odoo.exceptions import UserError
from odoo.http import request
from odoo.addons import decimal_precision as dp
_logger = logging.getLogger(__name__)
class PosOrder(models.Model):
_name = "pos.order"
_description = "Point of Sale Orders"
_order = "id desc"
@api.model
def _amount_line_tax(self, line, fiscal_position_id):
taxes = line.tax_ids.filtered(lambda t: t.company_id.id == line.order_id.company_id.id)
if fiscal_position_id:
taxes = fiscal_position_id.map_tax(taxes, line.product_id, line.order_id.partner_id)
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = taxes.compute_all(price, line.order_id.pricelist_id.currency_id, line.qty, product=line.product_id, partner=line.order_id.partner_id or False)['taxes']
return sum(tax.get('amount', 0.0) for tax in taxes)
@api.model
def _order_fields(self, ui_order):
process_line = partial(self.env['pos.order.line']._order_line_fields, session_id=ui_order['pos_session_id'])
return {
'name': ui_order['name'],
'user_id': ui_order['user_id'] or False,
'session_id': ui_order['pos_session_id'],
'lines': [process_line(l) for l in ui_order['lines']] if ui_order['lines'] else False,
'pos_reference': ui_order['name'],
'partner_id': ui_order['partner_id'] or False,
'date_order': ui_order['creation_date'],
'fiscal_position_id': ui_order['fiscal_position_id'],
'pricelist_id': ui_order['pricelist_id'],
}
def _payment_fields(self, ui_paymentline):
payment_date = ui_paymentline['name']
payment_date = fields.Date.context_today(self, fields.Datetime.from_string(payment_date))
return {
'amount': ui_paymentline['amount'] or 0.0,
'payment_date': payment_date,
'statement_id': ui_paymentline['statement_id'],
'payment_name': ui_paymentline.get('note', False),
'journal': ui_paymentline['journal_id'],
}
# This deals with orders that belong to a closed session. In order
# to recover from this situation we create a new rescue session,
# making it obvious that something went wrong.
# A new, separate, rescue session is preferred for every such recovery,
# to avoid adding unrelated orders to live sessions.
def _get_valid_session(self, order):
PosSession = self.env['pos.session']
closed_session = PosSession.browse(order['pos_session_id'])
_logger.warning('session %s (ID: %s) was closed but received order %s (total: %s) belonging to it',
closed_session.name,
closed_session.id,
order['name'],
order['amount_total'])
rescue_session = PosSession.search([
('state', 'not in', ('closed', 'closing_control')),
('rescue', '=', True),
('config_id', '=', closed_session.config_id.id),
], limit=1)
if rescue_session:
_logger.warning('reusing recovery session %s for saving order %s', rescue_session.name, order['name'])
return rescue_session
_logger.warning('attempting to create recovery session for saving order %s', order['name'])
new_session = PosSession.create({
'config_id': closed_session.config_id.id,
'name': _('(RESCUE FOR %(session)s)') % {'session': closed_session.name},
'rescue': True, # avoid conflict with live sessions
})
# bypass opening_control (necessary when using cash control)
new_session.action_pos_session_open()
return new_session
def _match_payment_to_invoice(self, order):
pricelist_id = self.env['product.pricelist'].browse(order.get('pricelist_id'))
account_precision = pricelist_id.currency_id.decimal_places
# ignore orders with an amount_paid of 0 because those are returns through the POS
if not float_is_zero(order['amount_return'], account_precision) and not float_is_zero(order['amount_paid'], account_precision):
cur_amount_paid = 0
payments_to_keep = []
for payment in order.get('statement_ids'):
if cur_amount_paid + payment[2]['amount'] > order['amount_total']:
payment[2]['amount'] = order['amount_total'] - cur_amount_paid
payments_to_keep.append(payment)
break
cur_amount_paid += payment[2]['amount']
payments_to_keep.append(payment)
order['statement_ids'] = payments_to_keep
order['amount_return'] = 0
@api.model
def _process_order(self, pos_order):
pos_session = self.env['pos.session'].browse(pos_order['pos_session_id'])
if pos_session.state == 'closing_control' or pos_session.state == 'closed':
pos_order['pos_session_id'] = self._get_valid_session(pos_order).id
order = self.create(self._order_fields(pos_order))
prec_acc = order.pricelist_id.currency_id.decimal_places
journal_ids = set()
for payments in pos_order['statement_ids']:
if not float_is_zero(payments[2]['amount'], precision_digits=prec_acc):
order.add_payment(self._payment_fields(payments[2]))
journal_ids.add(payments[2]['journal_id'])
if pos_session.sequence_number <= pos_order['sequence_number']:
pos_session.write({'sequence_number': pos_order['sequence_number'] + 1})
pos_session.refresh()
if not float_is_zero(pos_order['amount_return'], prec_acc):
cash_journal_id = pos_session.cash_journal_id.id
if not cash_journal_id:
# Select for change one of the cash journals used in this
# payment
cash_journal = self.env['account.journal'].search([
('type', '=', 'cash'),
('id', 'in', list(journal_ids)),
], limit=1)
if not cash_journal:
# If none, select for change one of the cash journals of the POS
# This is used for example when a customer pays by credit card
# an amount higher than total amount of the order and gets cash back
cash_journal = [statement.journal_id for statement in pos_session.statement_ids if statement.journal_id.type == 'cash']
if not cash_journal:
raise UserError(_("No cash statement found for this session. Unable to record returned cash."))
cash_journal_id = cash_journal[0].id
order.add_payment({
'amount': -pos_order['amount_return'],
'payment_date': fields.Datetime.now(),
'payment_name': _('return'),
'journal': cash_journal_id,
})
return order
def _prepare_analytic_account(self, line):
'''This method is designed to be inherited in a custom module'''
return False
def _create_account_move(self, dt, ref, journal_id, company_id):
date_tz_user = fields.Datetime.context_timestamp(self, fields.Datetime.from_string(dt))
date_tz_user = fields.Date.to_string(date_tz_user)
return self.env['account.move'].sudo().create({'ref': ref, 'journal_id': journal_id, 'date': date_tz_user})
def _prepare_invoice(self):
"""
Prepare the dict of values to create the new invoice for a pos order.
"""
invoice_type = 'out_invoice' if self.amount_total >= 0 else 'out_refund'
return {
'name': self.name,
'origin': self.name,
'account_id': self.partner_id.property_account_receivable_id.id,
'journal_id': self.session_id.config_id.invoice_journal_id.id,
'company_id': self.company_id.id,
'type': invoice_type,
'reference': self.name,
'partner_id': self.partner_id.id,
'comment': self.note or '',
# considering partner's sale pricelist's currency
'currency_id': self.pricelist_id.currency_id.id,
'user_id': self.env.uid,
}
@api.model
def _get_account_move_line_group_data_type_key(self, data_type, values):
"""
Return a tuple which will be used as a key for grouping account
move lines in _create_account_move_line method.
:param data_type: 'product', 'tax', ....
:param values: account move line values
:return: tuple() representing the data_type key
"""
if data_type == 'product':
return ('product',
values['partner_id'],
(values['product_id'], tuple(values['tax_ids'][0][2]), values['name']),
values['analytic_account_id'],
values['debit'] > 0)
elif data_type == 'tax':
return ('tax',
values['partner_id'],
values['tax_line_id'],
values['debit'] > 0)
elif data_type == 'counter_part':
return ('counter_part',
values['partner_id'],
values['account_id'],
values['debit'] > 0)
return False
def _action_create_invoice_line(self, line=False, invoice_id=False):
InvoiceLine = self.env['account.invoice.line']
inv_name = line.product_id.name_get()[0][1]
inv_line = {
'invoice_id': invoice_id,
'product_id': line.product_id.id,
'quantity': line.qty if self.amount_total >= 0 else -line.qty,
'account_analytic_id': self._prepare_analytic_account(line),
'name': inv_name,
}
# Oldlin trick
invoice_line = InvoiceLine.sudo().new(inv_line)
invoice_line._onchange_product_id()
invoice_line.invoice_line_tax_ids = invoice_line.invoice_line_tax_ids.filtered(lambda t: t.company_id.id == line.order_id.company_id.id).ids
fiscal_position_id = line.order_id.fiscal_position_id
if fiscal_position_id:
invoice_line.invoice_line_tax_ids = fiscal_position_id.map_tax(invoice_line.invoice_line_tax_ids, line.product_id, line.order_id.partner_id)
invoice_line.invoice_line_tax_ids = invoice_line.invoice_line_tax_ids.ids
# We convert a new id object back to a dictionary to write to
# bridge between old and new api
inv_line = invoice_line._convert_to_write({name: invoice_line[name] for name in invoice_line._cache})
inv_line.update(price_unit=line.price_unit, discount=line.discount, name=inv_name)
return InvoiceLine.sudo().create(inv_line)
def _create_account_move_line(self, session=None, move=None):
def _flatten_tax_and_children(taxes, group_done=None):
children = self.env['account.tax']
if group_done is None:
group_done = set()
for tax in taxes.filtered(lambda t: t.amount_type == 'group'):
if tax.id not in group_done:
group_done.add(tax.id)
children |= _flatten_tax_and_children(tax.children_tax_ids, group_done)
return taxes + children
# Tricky, via the workflow, we only have one id in the ids variable
"""Create a account move line of order grouped by products or not."""
IrProperty = self.env['ir.property']
ResPartner = self.env['res.partner']
if session and not all(session.id == order.session_id.id for order in self):
raise UserError(_('Selected orders do not have the same session!'))
grouped_data = {}
have_to_group_by = session and session.config_id.group_by or False
rounding_method = session and session.config_id.company_id.tax_calculation_rounding_method
def add_anglosaxon_lines(grouped_data):
Product = self.env['product.product']
Analytic = self.env['account.analytic.account']
for product_key in list(grouped_data.keys()):
if product_key[0] == "product":
line = grouped_data[product_key][0]
product = Product.browse(line['product_id'])
# In the SO part, the entries will be inverted by function compute_invoice_totals
price_unit = self._get_pos_anglo_saxon_price_unit(product, line['partner_id'], line['quantity'])
account_analytic = Analytic.browse(line.get('analytic_account_id'))
res = Product._anglo_saxon_sale_move_lines(
line['name'], product, product.uom_id, line['quantity'], price_unit,
fiscal_position=order.fiscal_position_id,
account_analytic=account_analytic)
if res:
line1, line2 = res
line1 = Product._convert_prepared_anglosaxon_line(line1, order.partner_id)
insert_data('counter_part', {
'name': line1['name'],
'account_id': line1['account_id'],
'credit': line1['credit'] or 0.0,
'debit': line1['debit'] or 0.0,
'partner_id': line1['partner_id']
})
line2 = Product._convert_prepared_anglosaxon_line(line2, order.partner_id)
insert_data('counter_part', {
'name': line2['name'],
'account_id': line2['account_id'],
'credit': line2['credit'] or 0.0,
'debit': line2['debit'] or 0.0,
'partner_id': line2['partner_id']
})
for order in self.filtered(lambda o: not o.account_move or o.state == 'paid'):
current_company = order.sale_journal.company_id
account_def = IrProperty.get(
'property_account_receivable_id', 'res.partner')
order_account = order.partner_id.property_account_receivable_id.id or account_def and account_def.id
partner_id = ResPartner._find_accounting_partner(order.partner_id).id or False
if move is None:
# Create an entry for the sale
journal_id = self.env['ir.config_parameter'].sudo().get_param(
'pos.closing.journal_id_%s' % current_company.id, default=order.sale_journal.id)
move = self._create_account_move(
order.session_id.start_at, order.name, int(journal_id), order.company_id.id)
def insert_data(data_type, values):
# if have_to_group_by:
values.update({
'partner_id': partner_id,
'move_id': move.id,
})
key = self._get_account_move_line_group_data_type_key(data_type, values)
if not key:
return
grouped_data.setdefault(key, [])
if have_to_group_by:
if not grouped_data[key]:
grouped_data[key].append(values)
else:
current_value = grouped_data[key][0]
current_value['quantity'] = current_value.get('quantity', 0.0) + values.get('quantity', 0.0)
current_value['credit'] = current_value.get('credit', 0.0) + values.get('credit', 0.0)
current_value['debit'] = current_value.get('debit', 0.0) + values.get('debit', 0.0)
else:
grouped_data[key].append(values)
# because of the weird way the pos order is written, we need to make sure there is at least one line,
# because just after the 'for' loop there are references to 'line' and 'income_account' variables (that
# are set inside the for loop)
# TOFIX: a deep refactoring of this method (and class!) is needed
# in order to get rid of this stupid hack
assert order.lines, _('The POS order must have lines when calling this method')
# Create an move for each order line
cur = order.pricelist_id.currency_id
for line in order.lines:
amount = line.price_subtotal
# Search for the income account
if line.product_id.property_account_income_id.id:
income_account = line.product_id.property_account_income_id.id
elif line.product_id.categ_id.property_account_income_categ_id.id:
income_account = line.product_id.categ_id.property_account_income_categ_id.id
else:
raise UserError(_('Please define income '
'account for this product: "%s" (id:%d).')
% (line.product_id.name, line.product_id.id))
name = line.product_id.name
if line.notice:
# add discount reason in move
name = name + ' (' + line.notice + ')'
# Create a move for the line for the order line
# Just like for invoices, a group of taxes must be present on this base line
# As well as its children
base_line_tax_ids = _flatten_tax_and_children(line.tax_ids_after_fiscal_position).filtered(lambda tax: tax.type_tax_use in ['sale', 'none'])
insert_data('product', {
'name': name,
'quantity': line.qty,
'product_id': line.product_id.id,
'account_id': income_account,
'analytic_account_id': self._prepare_analytic_account(line),
'credit': ((amount > 0) and amount) or 0.0,
'debit': ((amount < 0) and -amount) or 0.0,
'tax_ids': [(6, 0, base_line_tax_ids.ids)],
'partner_id': partner_id
})
# Create the tax lines
taxes = line.tax_ids_after_fiscal_position.filtered(lambda t: t.company_id.id == current_company.id)
if not taxes:
continue
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
for tax in taxes.compute_all(price, cur, line.qty)['taxes']:
insert_data('tax', {
'name': _('Tax') + ' ' + tax['name'],
'product_id': line.product_id.id,
'quantity': line.qty,
'account_id': tax['account_id'] or income_account,
'credit': ((tax['amount'] > 0) and tax['amount']) or 0.0,
'debit': ((tax['amount'] < 0) and -tax['amount']) or 0.0,
'tax_line_id': tax['id'],
'partner_id': partner_id
})
# round tax lines per order
if rounding_method == 'round_globally':
for group_key, group_value in grouped_data.items():
if group_key[0] == 'tax':
for line in group_value:
line['credit'] = cur.round(line['credit'])
line['debit'] = cur.round(line['debit'])
# counterpart
insert_data('counter_part', {
'name': _("Trade Receivables"), # order.name,
'account_id': order_account,
'credit': ((order.amount_total < 0) and -order.amount_total) or 0.0,
'debit': ((order.amount_total > 0) and order.amount_total) or 0.0,
'partner_id': partner_id
})
order.write({'state': 'done', 'account_move': move.id})
if self and order.company_id.anglo_saxon_accounting:
add_anglosaxon_lines(grouped_data)
all_lines = []
for group_key, group_data in grouped_data.items():
for value in group_data:
all_lines.append((0, 0, value),)
if move: # In case no order was changed
move.sudo().write({'line_ids': all_lines})
move.sudo().post()
return True
def _get_pos_anglo_saxon_price_unit(self, product, partner_id, quantity):
price_unit = product._get_anglo_saxon_price_unit()
if product._get_invoice_policy() == "delivery":
moves = self.filtered(lambda o: o.partner_id.id == partner_id).mapped('picking_id.move_lines').filtered(lambda m: m.product_id.id == product.id)
moves.sorted(lambda x: x.date)
average_price_unit = product._compute_average_price(0, quantity, moves)
price_unit = average_price_unit or price_unit
# In the SO part, the entries will be inverted by function compute_invoice_totals
return - price_unit
def _reconcile_payments(self):
for order in self:
aml = order.statement_ids.mapped('journal_entry_ids') | order.account_move.line_ids | order.invoice_id.move_id.line_ids
aml = aml.filtered(lambda r: not r.reconciled and r.account_id.internal_type == 'receivable' and r.partner_id == order.partner_id.commercial_partner_id)
# Reconcile returns first
# to avoid mixing up the credit of a payment and the credit of a return
# in the receivable account
aml_returns = aml.filtered(lambda l: (l.journal_id.type == 'sale' and l.credit) or (l.journal_id.type != 'sale' and l.debit))
try:
aml_returns.reconcile()
(aml - aml_returns).reconcile()
except Exception:
# There might be unexpected situations where the automatic reconciliation won't
# work. We don't want the user to be blocked because of this, since the automatic
# reconciliation is introduced for convenience, not for mandatory accounting
# reasons.
# It may be interesting to have the Traceback logged anyway
# for debugging and support purposes
_logger.exception('Reconciliation did not work for order %s', order.name)
def _default_session(self):
return self.env['pos.session'].search([('state', '=', 'opened'), ('user_id', '=', self.env.uid)], limit=1)
def _default_pricelist(self):
return self._default_session().config_id.pricelist_id
name = fields.Char(string='Order Ref', required=True, readonly=True, copy=False, default='/')
company_id = fields.Many2one('res.company', string='Company', required=True, readonly=True, default=lambda self: self.env.user.company_id)
date_order = fields.Datetime(string='Order Date', readonly=True, index=True, default=fields.Datetime.now)
user_id = fields.Many2one(
comodel_name='res.users', string='Salesman',
help="Person who uses the cash register. It can be a reliever, a student or an interim employee.",
default=lambda self: self.env.uid,
states={'done': [('readonly', True)], 'invoiced': [('readonly', True)]},
)
amount_tax = fields.Float(compute='_compute_amount_all', string='Taxes', digits=0)
amount_total = fields.Float(compute='_compute_amount_all', string='Total', digits=0)
amount_paid = fields.Float(compute='_compute_amount_all', string='Paid', states={'draft': [('readonly', False)]}, readonly=True, digits=0)
amount_return = fields.Float(compute='_compute_amount_all', string='Returned', digits=0)
lines = fields.One2many('pos.order.line', 'order_id', string='Order Lines', states={'draft': [('readonly', False)]}, readonly=True, copy=True)
statement_ids = fields.One2many('account.bank.statement.line', 'pos_statement_id', string='Payments', states={'draft': [('readonly', False)]}, readonly=True)
pricelist_id = fields.Many2one('product.pricelist', string='Pricelist', required=True, states={
'draft': [('readonly', False)]}, readonly=True, default=_default_pricelist)
partner_id = fields.Many2one('res.partner', string='Customer', change_default=True, index=True, states={'draft': [('readonly', False)], 'paid': [('readonly', False)]})
sequence_number = fields.Integer(string='Sequence Number', help='A session-unique sequence number for the order', default=1)
session_id = fields.Many2one(
'pos.session', string='Session', required=True, index=True,
domain="[('state', '=', 'opened')]", states={'draft': [('readonly', False)]},
readonly=True, default=_default_session)
config_id = fields.Many2one('pos.config', related='session_id.config_id', string="Point of Sale")
state = fields.Selection(
[('draft', 'New'), ('cancel', 'Cancelled'), ('paid', 'Paid'), ('done', 'Posted'), ('invoiced', 'Invoiced')],
'Status', readonly=True, copy=False, default='draft')
invoice_id = fields.Many2one('account.invoice', string='Invoice', copy=False)
account_move = fields.Many2one('account.move', string='Journal Entry', readonly=True, copy=False)
picking_id = fields.Many2one('stock.picking', string='Picking', readonly=True, copy=False)
picking_type_id = fields.Many2one('stock.picking.type', related='session_id.config_id.picking_type_id', string="Operation Type")
location_id = fields.Many2one(
comodel_name='stock.location',
related='session_id.config_id.stock_location_id',
string="Location", store=True,
readonly=True,
)
note = fields.Text(string='Internal Notes')
nb_print = fields.Integer(string='Number of Print', readonly=True, copy=False, default=0)
pos_reference = fields.Char(string='Receipt Ref', readonly=True, copy=False)
sale_journal = fields.Many2one('account.journal', related='session_id.config_id.journal_id', string='Sales Journal', store=True, readonly=True)
fiscal_position_id = fields.Many2one(
comodel_name='account.fiscal.position', string='Fiscal Position',
default=lambda self: self._default_session().config_id.default_fiscal_position_id,
readonly=True,
states={'draft': [('readonly', False)]},
)
@api.depends('statement_ids', 'lines.price_subtotal_incl', 'lines.discount')
def _compute_amount_all(self):
for order in self:
order.amount_paid = order.amount_return = order.amount_tax = 0.0
currency = order.pricelist_id.currency_id
order.amount_paid = sum(payment.amount for payment in order.statement_ids)
order.amount_return = sum(payment.amount < 0 and payment.amount or 0 for payment in order.statement_ids)
order.amount_tax = currency.round(sum(self._amount_line_tax(line, order.fiscal_position_id) for line in order.lines))
amount_untaxed = currency.round(sum(line.price_subtotal for line in order.lines))
order.amount_total = order.amount_tax + amount_untaxed
@api.onchange('partner_id')
def _onchange_partner_id(self):
if self.partner_id:
self.pricelist = self.partner_id.property_product_pricelist.id
@api.multi
def write(self, vals):
res = super(PosOrder, self).write(vals)
Partner = self.env['res.partner']
# If you change the partner of the PoS order, change also the partner of the associated bank statement lines
if 'partner_id' in vals:
for order in self:
partner_id = False
if order.invoice_id:
raise UserError(_("You cannot change the partner of a POS order for which an invoice has already been issued."))
if vals['partner_id']:
partner = Partner.browse(vals['partner_id'])
partner_id = Partner._find_accounting_partner(partner).id
order.statement_ids.write({'partner_id': partner_id})
return res
@api.multi
def unlink(self):
for pos_order in self.filtered(lambda pos_order: pos_order.state not in ['draft', 'cancel']):
raise UserError(_('In order to delete a sale, it must be new or cancelled.'))
return super(PosOrder, self).unlink()
@api.model
def create(self, values):
if values.get('session_id'):
# set name based on the sequence specified on the config
session = self.env['pos.session'].browse(values['session_id'])
values['name'] = session.config_id.sequence_id._next()
values.setdefault('pricelist_id', session.config_id.pricelist_id.id)
else:
# fallback on any pos.order sequence
values['name'] = self.env['ir.sequence'].next_by_code('pos.order')
return super(PosOrder, self).create(values)
@api.multi
def action_view_invoice(self):
return {
'name': _('Customer Invoice'),
'view_mode': 'form',
'view_id': self.env.ref('account.invoice_form').id,
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'res_id': self.invoice_id.id,
}
@api.multi
def action_pos_order_paid(self):
if not self.test_paid():
raise UserError(_("Order is not paid."))
self.write({'state': 'paid'})
return self.create_picking()
@api.multi
def action_pos_order_invoice(self):
Invoice = self.env['account.invoice']
for order in self:
# Force company for all SUPERUSER_ID action
local_context = dict(self.env.context, force_company=order.company_id.id, company_id=order.company_id.id)
if order.invoice_id:
Invoice += order.invoice_id
continue
if not order.partner_id:
raise UserError(_('Please provide a partner for the sale.'))
invoice = Invoice.new(order._prepare_invoice())
invoice._onchange_partner_id()
invoice.fiscal_position_id = order.fiscal_position_id
inv = invoice._convert_to_write({name: invoice[name] for name in invoice._cache})
new_invoice = Invoice.with_context(local_context).sudo().create(inv)
message = _("This invoice has been created from the point of sale session: <a href=# data-oe-model=pos.order data-oe-id=%d>%s</a>") % (order.id, order.name)
new_invoice.message_post(body=message)
order.write({'invoice_id': new_invoice.id, 'state': 'invoiced'})
Invoice += new_invoice
for line in order.lines:
self.with_context(local_context)._action_create_invoice_line(line, new_invoice.id)
new_invoice.with_context(local_context).sudo().compute_taxes()
order.sudo().write({'state': 'invoiced'})
if not Invoice:
return {}
return {
'name': _('Customer Invoice'),
'view_type': 'form',
'view_mode': 'form',
'view_id': self.env.ref('account.invoice_form').id,
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': Invoice and Invoice.ids[0] or False,
}
# this method is unused, and so is the state 'cancel'
@api.multi
def action_pos_order_cancel(self):
return self.write({'state': 'cancel'})
@api.multi
def action_pos_order_done(self):
return self._create_account_move_line()
@api.model
def create_from_ui(self, orders):
# Keep only new orders
submitted_references = [o['data']['name'] for o in orders]
pos_order = self.search([('pos_reference', 'in', submitted_references)])
existing_orders = pos_order.read(['pos_reference'])
existing_references = set([o['pos_reference'] for o in existing_orders])
orders_to_save = [o for o in orders if o['data']['name'] not in existing_references]
order_ids = []
for tmp_order in orders_to_save:
to_invoice = tmp_order['to_invoice']
order = tmp_order['data']
if to_invoice:
self._match_payment_to_invoice(order)
pos_order = self._process_order(order)
order_ids.append(pos_order.id)
try:
pos_order.action_pos_order_paid()
except psycopg2.OperationalError:
# do not hide transactional errors, the order(s) won't be saved!
raise
except Exception as e:
_logger.error('Could not fully process the POS Order: %s', tools.ustr(e))
if to_invoice:
pos_order.action_pos_order_invoice()
pos_order.invoice_id.sudo().action_invoice_open()
pos_order.account_move = pos_order.invoice_id.move_id
return order_ids
def test_paid(self):
"""A Point of Sale is paid when the sum
@return: True
"""
for order in self:
if order.lines and not order.amount_total:
continue
if (not order.lines) or (not order.statement_ids) or (abs(order.amount_total - order.amount_paid) > 0.00001):
return False
return True
def create_picking(self):
"""Create a picking for each order and validate it."""
Picking = self.env['stock.picking']
Move = self.env['stock.move']
StockWarehouse = self.env['stock.warehouse']
for order in self:
if not order.lines.filtered(lambda l: l.product_id.type in ['product', 'consu']):
continue
address = order.partner_id.address_get(['delivery']) or {}
picking_type = order.picking_type_id
return_pick_type = order.picking_type_id.return_picking_type_id or order.picking_type_id
order_picking = Picking
return_picking = Picking
moves = Move
location_id = order.location_id.id
if order.partner_id:
destination_id = order.partner_id.property_stock_customer.id
else:
if (not picking_type) or (not picking_type.default_location_dest_id):
customerloc, supplierloc = StockWarehouse._get_partner_locations()
destination_id = customerloc.id
else:
destination_id = picking_type.default_location_dest_id.id
if picking_type:
message = _("This transfer has been created from the point of sale session: <a href=# data-oe-model=pos.order data-oe-id=%d>%s</a>") % (order.id, order.name)
picking_vals = {
'origin': order.name,
'partner_id': address.get('delivery', False),
'date_done': order.date_order,
'picking_type_id': picking_type.id,
'company_id': order.company_id.id,
'move_type': 'direct',
'note': order.note or "",
'location_id': location_id,
'location_dest_id': destination_id,
}
pos_qty = any([x.qty > 0 for x in order.lines if x.product_id.type in ['product', 'consu']])
if pos_qty:
order_picking = Picking.create(picking_vals.copy())
order_picking.message_post(body=message)
neg_qty = any([x.qty < 0 for x in order.lines if x.product_id.type in ['product', 'consu']])
if neg_qty:
return_vals = picking_vals.copy()
return_vals.update({
'location_id': destination_id,
'location_dest_id': return_pick_type != picking_type and return_pick_type.default_location_dest_id.id or location_id,
'picking_type_id': return_pick_type.id
})
return_picking = Picking.create(return_vals)
return_picking.message_post(body=message)
for line in order.lines.filtered(lambda l: l.product_id.type in ['product', 'consu'] and not float_is_zero(l.qty, precision_rounding=l.product_id.uom_id.rounding)):
moves |= Move.create({
'name': line.name,
'product_uom': line.product_id.uom_id.id,
'picking_id': order_picking.id if line.qty >= 0 else return_picking.id,
'picking_type_id': picking_type.id if line.qty >= 0 else return_pick_type.id,
'product_id': line.product_id.id,
'product_uom_qty': abs(line.qty),
'state': 'draft',
'location_id': location_id if line.qty >= 0 else destination_id,
'location_dest_id': destination_id if line.qty >= 0 else return_pick_type != picking_type and return_pick_type.default_location_dest_id.id or location_id,
})
# prefer associating the regular order picking, not the return
order.write({'picking_id': order_picking.id or return_picking.id})
if return_picking:
order._force_picking_done(return_picking)
if order_picking:
order._force_picking_done(order_picking)
# when the pos.config has no picking_type_id set only the moves will be created
if moves and not return_picking and not order_picking:
moves._action_assign()
moves.filtered(lambda m: m.state in ['confirmed', 'waiting'])._force_assign()
moves.filtered(lambda m: m.product_id.tracking == 'none')._action_done()
return True
def _force_picking_done(self, picking):
"""Force picking in order to be set as done."""
self.ensure_one()
picking.action_assign()
picking.force_assign()
wrong_lots = self.set_pack_operation_lot(picking)
if not wrong_lots:
picking.action_done()
def set_pack_operation_lot(self, picking=None):
"""Set Serial/Lot number in pack operations to mark the pack operation done."""
StockProductionLot = self.env['stock.production.lot']
PosPackOperationLot = self.env['pos.pack.operation.lot']
has_wrong_lots = False
for order in self:
for move in (picking or self.picking_id).move_lines:
picking_type = (picking or self.picking_id).picking_type_id
lots_necessary = True
if picking_type:
lots_necessary = picking_type and picking_type.use_existing_lots
qty = 0
qty_done = 0
pack_lots = []
pos_pack_lots = PosPackOperationLot.search([('order_id', '=', order.id), ('product_id', '=', move.product_id.id)])
pack_lot_names = [pos_pack.lot_name for pos_pack in pos_pack_lots]
if pack_lot_names and lots_necessary:
for lot_name in list(set(pack_lot_names)):
stock_production_lot = StockProductionLot.search([('name', '=', lot_name), ('product_id', '=', move.product_id.id)])
if stock_production_lot:
if stock_production_lot.product_id.tracking == 'lot':
# if a lot nr is set through the frontend it will refer to the full quantity
qty = move.product_uom_qty
else: # serial numbers
qty = 1.0
qty_done += qty
pack_lots.append({'lot_id': stock_production_lot.id, 'qty': qty})
else:
has_wrong_lots = True
elif move.product_id.tracking == 'none' or not lots_necessary:
qty_done = move.product_uom_qty
else:
has_wrong_lots = True
for pack_lot in pack_lots:
lot_id, qty = pack_lot['lot_id'], pack_lot['qty']
self.env['stock.move.line'].create({
'move_id': move.id,
'product_id': move.product_id.id,
'product_uom_id': move.product_uom.id,
'qty_done': qty,
'location_id': move.location_id.id,
'location_dest_id': move.location_dest_id.id,
'lot_id': lot_id,
})
if not pack_lots and not float_is_zero(qty_done, precision_rounding=move.product_uom.rounding):
move.quantity_done = qty_done
return has_wrong_lots
def _prepare_bank_statement_line_payment_values(self, data):
"""Create a new payment for the order"""
args = {
'amount': data['amount'],
'date': data.get('payment_date', fields.Date.context_today(self)),
'name': self.name + ': ' + (data.get('payment_name', '') or ''),
'partner_id': self.env["res.partner"]._find_accounting_partner(self.partner_id).id or False,
}
journal_id = data.get('journal', False)
statement_id = data.get('statement_id', False)
assert journal_id or statement_id, "No statement_id or journal_id passed to the method!"
journal = self.env['account.journal'].browse(journal_id)
# use the company of the journal and not of the current user
company_cxt = dict(self.env.context, force_company=journal.company_id.id)
account_def = self.env['ir.property'].with_context(company_cxt).get('property_account_receivable_id', 'res.partner')
args['account_id'] = (self.partner_id.property_account_receivable_id.id) or (account_def and account_def.id) or False
if not args['account_id']:
if not args['partner_id']:
msg = _('There is no receivable account defined to make payment.')
else:
msg = _('There is no receivable account defined to make payment for the partner: "%s" (id:%d).') % (
self.partner_id.name, self.partner_id.id,)
raise UserError(msg)
context = dict(self.env.context)
context.pop('pos_session_id', False)
for statement in self.session_id.statement_ids:
if statement.id == statement_id:
journal_id = statement.journal_id.id
break
elif statement.journal_id.id == journal_id:
statement_id = statement.id
break
if not statement_id:
raise UserError(_('You have to open at least one cashbox.'))
args.update({
'statement_id': statement_id,
'pos_statement_id': self.id,
'journal_id': journal_id,
'ref': self.session_id.name,
})
return args
def add_payment(self, data):
"""Create a new payment for the order"""
args = self._prepare_bank_statement_line_payment_values(data)
context = dict(self.env.context)
context.pop('pos_session_id', False)
self.env['account.bank.statement.line'].with_context(context).create(args)
return args.get('statement_id', False)
@api.multi
def refund(self):
"""Create a copy of order for refund order"""
PosOrder = self.env['pos.order']
current_session = self.env['pos.session'].search([('state', '!=', 'closed'), ('user_id', '=', self.env.uid)], limit=1)
if not current_session:
raise UserError(_('To return product(s), you need to open a session that will be used to register the refund.'))
for order in self:
clone = order.copy({
# ot used, name forced by create
'name': order.name + _(' REFUND'),
'session_id': current_session.id,
'date_order': fields.Datetime.now(),
'pos_reference': order.pos_reference,
'lines': False,
})
for line in order.lines:
clone_line = line.copy({
# required=True, copy=False
'name': line.name + _(' REFUND'),
'order_id': clone.id,
'qty': -line.qty,
})
PosOrder += clone
return {
'name': _('Return Products'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'pos.order',
'res_id': PosOrder.ids[0],
'view_id': False,
'context': self.env.context,
'type': 'ir.actions.act_window',
'target': 'current',
}
class PosOrderLine(models.Model):
_name = "pos.order.line"
_description = "Lines of Point of Sale Orders"
_rec_name = "product_id"
def _order_line_fields(self, line, session_id=None):
if line and 'name' not in line[2]:
session = self.env['pos.session'].browse(session_id).exists() if session_id else None
if session and session.config_id.sequence_line_id:
# set name based on the sequence specified on the config
line[2]['name'] = session.config_id.sequence_line_id._next()
else:
# fallback on any pos.order.line sequence
line[2]['name'] = self.env['ir.sequence'].next_by_code('pos.order.line')
if line and 'tax_ids' not in line[2]:
product = self.env['product.product'].browse(line[2]['product_id'])
line[2]['tax_ids'] = [(6, 0, [x.id for x in product.taxes_id])]
return line
company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.user.company_id)
name = fields.Char(string='Line No', required=True, copy=False)
notice = fields.Char(string='Discount Notice')
product_id = fields.Many2one('product.product', string='Product', domain=[('sale_ok', '=', True)], required=True, change_default=True)
price_unit = fields.Float(string='Unit Price', digits=0)
qty = fields.Float('Quantity', digits=dp.get_precision('Product Unit of Measure'), default=1)
price_subtotal = fields.Float(compute='_compute_amount_line_all', digits=0, string='Subtotal w/o Tax')
price_subtotal_incl = fields.Float(compute='_compute_amount_line_all', digits=0, string='Subtotal')
discount = fields.Float(string='Discount (%)', digits=0, default=0.0)
order_id = fields.Many2one('pos.order', string='Order Ref', ondelete='cascade')
create_date = fields.Datetime(string='Creation Date', readonly=True)
tax_ids = fields.Many2many('account.tax', string='Taxes', readonly=True)
tax_ids_after_fiscal_position = fields.Many2many('account.tax', compute='_get_tax_ids_after_fiscal_position', string='Taxes to Apply')
pack_lot_ids = fields.One2many('pos.pack.operation.lot', 'pos_order_line_id', string='Lot/serial Number')
@api.model
def create(self, values):
if values.get('order_id') and not values.get('name'):
# set name based on the sequence specified on the config
config_id = self.order_id.browse(values['order_id']).session_id.config_id.id
# HACK: sequence created in the same transaction as the config
# cf TODO master is pos.config create
# remove me saas-15
self.env.cr.execute("""
SELECT s.id
FROM ir_sequence s
JOIN pos_config c
ON s.create_date=c.create_date
WHERE c.id = %s
AND s.code = 'pos.order.line'
LIMIT 1
""", (config_id,))
sequence = self.env.cr.fetchone()
if sequence:
values['name'] = self.env['ir.sequence'].browse(sequence[0])._next()
if not values.get('name'):
# fallback on any pos.order sequence
values['name'] = self.env['ir.sequence'].next_by_code('pos.order.line')
return super(PosOrderLine, self).create(values)
@api.depends('price_unit', 'tax_ids', 'qty', 'discount', 'product_id')
def _compute_amount_line_all(self):
for line in self:
fpos = line.order_id.fiscal_position_id
tax_ids_after_fiscal_position = fpos.map_tax(line.tax_ids, line.product_id, line.order_id.partner_id) if fpos else line.tax_ids
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = tax_ids_after_fiscal_position.compute_all(price, line.order_id.pricelist_id.currency_id, line.qty, product=line.product_id, partner=line.order_id.partner_id)
line.update({
'price_subtotal_incl': taxes['total_included'],
'price_subtotal': taxes['total_excluded'],
})
@api.onchange('product_id')
def _onchange_product_id(self):
if self.product_id:
if not self.order_id.pricelist_id:
raise UserError(
_('You have to select a pricelist in the sale form !\n'
'Please set one before choosing a product.'))
price = self.order_id.pricelist_id.get_product_price(
self.product_id, self.qty or 1.0, self.order_id.partner_id)
self._onchange_qty()
self.tax_ids = self.product_id.taxes_id.filtered(lambda r: not self.company_id or r.company_id == self.company_id)
fpos = self.order_id.fiscal_position_id
tax_ids_after_fiscal_position = fpos.map_tax(self.tax_ids, self.product_id, self.order_id.partner_id) if fpos else self.tax_ids
self.price_unit = self.env['account.tax']._fix_tax_included_price_company(price, self.product_id.taxes_id, tax_ids_after_fiscal_position, self.company_id)
@api.onchange('qty', 'discount', 'price_unit', 'tax_ids')
def _onchange_qty(self):
if self.product_id:
if not self.order_id.pricelist_id:
raise UserError(_('You have to select a pricelist in the sale form !'))
price = self.price_unit * (1 - (self.discount or 0.0) / 100.0)
self.price_subtotal = self.price_subtotal_incl = price * self.qty
if (self.product_id.taxes_id):
taxes = self.product_id.taxes_id.compute_all(price, self.order_id.pricelist_id.currency_id, self.qty, product=self.product_id, partner=False)
self.price_subtotal = taxes['total_excluded']
self.price_subtotal_incl = taxes['total_included']
@api.multi
def _get_tax_ids_after_fiscal_position(self):
for line in self:
line.tax_ids_after_fiscal_position = line.order_id.fiscal_position_id.map_tax(line.tax_ids, line.product_id, line.order_id.partner_id)
class PosOrderLineLot(models.Model):
_name = "pos.pack.operation.lot"
_description = "Specify product lot/serial number in pos order line"
pos_order_line_id = fields.Many2one('pos.order.line')
order_id = fields.Many2one('pos.order', related="pos_order_line_id.order_id")
lot_name = fields.Char('Lot Name')
product_id = fields.Many2one('product.product', related='pos_order_line_id.product_id')
class ReportSaleDetails(models.AbstractModel):
_name = 'report.point_of_sale.report_saledetails'
@api.model
def get_sale_details(self, date_start=False, date_stop=False, configs=False):
""" Serialise the orders of the day information
params: date_start, date_stop string representing the datetime of order
"""
if not configs:
configs = self.env['pos.config'].search([])
user_tz = pytz.timezone(self.env.context.get('tz') or self.env.user.tz or 'UTC')
today = user_tz.localize(fields.Datetime.from_string(fields.Date.context_today(self)))
today = today.astimezone(pytz.timezone('UTC'))
if date_start:
date_start = fields.Datetime.from_string(date_start)
else:
# start by default today 00:00:00
date_start = today
if date_stop:
# set time to 23:59:59
date_stop = fields.Datetime.from_string(date_stop)
else:
# stop by default today 23:59:59
date_stop = today + timedelta(days=1, seconds=-1)
# avoid a date_stop smaller than date_start
date_stop = max(date_stop, date_start)
date_start = fields.Datetime.to_string(date_start)
date_stop = fields.Datetime.to_string(date_stop)
orders = self.env['pos.order'].search([
('date_order', '>=', date_start),
('date_order', '<=', date_stop),
('state', 'in', ['paid','invoiced','done']),
('config_id', 'in', configs.ids)])
user_currency = self.env.user.company_id.currency_id
total = 0.0
products_sold = {}
taxes = {}
for order in orders:
if user_currency != order.pricelist_id.currency_id:
total += order.pricelist_id.currency_id._convert(
order.amount_total, user_currency, order.company_id, order.date_order or fields.Date.today())
else:
total += order.amount_total
currency = order.session_id.currency_id
for line in order.lines:
key = (line.product_id, line.price_unit, line.discount)
products_sold.setdefault(key, 0.0)
products_sold[key] += line.qty
if line.tax_ids_after_fiscal_position:
line_taxes = line.tax_ids_after_fiscal_position.compute_all(line.price_unit * (1-(line.discount or 0.0)/100.0), currency, line.qty, product=line.product_id, partner=line.order_id.partner_id or False)
for tax in line_taxes['taxes']:
taxes.setdefault(tax['id'], {'name': tax['name'], 'tax_amount':0.0, 'base_amount':0.0})
taxes[tax['id']]['tax_amount'] += tax['amount']
taxes[tax['id']]['base_amount'] += tax['base']
else:
taxes.setdefault(0, {'name': _('No Taxes'), 'tax_amount':0.0, 'base_amount':0.0})
taxes[0]['base_amount'] += line.price_subtotal_incl
st_line_ids = self.env["account.bank.statement.line"].search([('pos_statement_id', 'in', orders.ids)]).ids
if st_line_ids:
self.env.cr.execute("""
SELECT aj.name, sum(amount) total
FROM account_bank_statement_line AS absl,
account_bank_statement AS abs,
account_journal AS aj
WHERE absl.statement_id = abs.id
AND abs.journal_id = aj.id
AND absl.id IN %s
GROUP BY aj.name
""", (tuple(st_line_ids),))
payments = self.env.cr.dictfetchall()
else:
payments = []
return {
'currency_precision': user_currency.decimal_places,
'total_paid': user_currency.round(total),
'payments': payments,
'company_name': self.env.user.company_id.name,
'taxes': list(taxes.values()),
'products': sorted([{
'product_id': product.id,
'product_name': product.name,
'code': product.default_code,
'quantity': qty,
'price_unit': price_unit,
'discount': discount,
'uom': product.uom_id.name
} for (product, price_unit, discount), qty in products_sold.items()], key=lambda l: l['product_name'])
}
@api.multi
def get_report_values(self, docids, data=None):
data = dict(data or {})
configs = self.env['pos.config'].browse(data['config_ids'])
data.update(self.get_sale_details(data['date_start'], data['date_stop'], configs))
return data
| maxive/erp | addons/point_of_sale/models/pos_order.py | Python | agpl-3.0 | 56,888 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2014-2022 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Implements the set of tests for the Ghofrani & Atkinson (2014) Subduction
Interface GMPE
Test data are generated from tables supplied by Gail Atkinson
(2015, personal communication)
"""
from openquake.hazardlib.gsim.ghofrani_atkinson_2014 import (
GhofraniAtkinson2014,
GhofraniAtkinson2014Cascadia,
GhofraniAtkinson2014Upper,
GhofraniAtkinson2014Lower,
GhofraniAtkinson2014CascadiaUpper,
GhofraniAtkinson2014CascadiaLower)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
# Discrepency percentages to be applied to all tests
MEAN_DISCREP = 0.1
STDDEV_DISCREP = 0.1
class GhofraniAtkinson2014TestCase(BaseGSIMTestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE for the
default condition
"""
GSIM_CLASS = GhofraniAtkinson2014
# File for the mean results
MEAN_FILE = "GA2014/GA2014_MEAN.csv"
# File for the total standard deviation
STD_FILE = "GA2014/GA2014_TOTAL.csv"
# File for the inter-event standard deviation
INTER_FILE = "GA2014/GA2014_INTER.csv"
# File for the intra-event standard deviation
INTRA_FILE = "GA2014/GA2014_INTRA.csv"
def test_all(self):
self.check(self.MEAN_FILE,
self.STD_FILE,
self.INTER_FILE,
self.INTRA_FILE,
max_discrep_percentage=MEAN_DISCREP,
std_discrep_percentage=STDDEV_DISCREP)
class GhofraniAtkinson2014CascadiaTestCase(GhofraniAtkinson2014TestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE for with
the adjustment for Cascadia
"""
GSIM_CLASS = GhofraniAtkinson2014Cascadia
MEAN_FILE = "GA2014/GA2014_CASCADIA_MEAN.csv"
STD_FILE = "GA2014/GA2014_CASCADIA_TOTAL.csv"
INTER_FILE = "GA2014/GA2014_CASCADIA_INTER.csv"
INTRA_FILE = "GA2014/GA2014_CASCADIA_INTRA.csv"
class GhofraniAtkinson2014UpperTestCase(GhofraniAtkinson2014TestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE for the
"upper" epistemic uncertainty case
"""
GSIM_CLASS = GhofraniAtkinson2014Upper
MEAN_FILE = "GA2014/GA2014_UPPER_MEAN.csv"
STD_FILE = "GA2014/GA2014_UPPER_TOTAL.csv"
INTER_FILE = "GA2014/GA2014_UPPER_INTER.csv"
INTRA_FILE = "GA2014/GA2014_UPPER_INTRA.csv"
class GhofraniAtkinson2014LowerTestCase(GhofraniAtkinson2014TestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE for the
"lower" epistemic uncertainty case
"""
GSIM_CLASS = GhofraniAtkinson2014Lower
MEAN_FILE = "GA2014/GA2014_LOWER_MEAN.csv"
STD_FILE = "GA2014/GA2014_LOWER_TOTAL.csv"
INTER_FILE = "GA2014/GA2014_LOWER_INTER.csv"
INTRA_FILE = "GA2014/GA2014_LOWER_INTRA.csv"
class GhofraniAtkinson2014CascadiaUpperTestCase(GhofraniAtkinson2014TestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE with the
adjustment for Cascadia and the "upper" epistemic uncertainty case
"""
GSIM_CLASS = GhofraniAtkinson2014CascadiaUpper
MEAN_FILE = "GA2014/GA2014_CASCADIA_UPPER_MEAN.csv"
STD_FILE = "GA2014/GA2014_CASCADIA_UPPER_TOTAL.csv"
INTER_FILE = "GA2014/GA2014_CASCADIA_UPPER_INTER.csv"
INTRA_FILE = "GA2014/GA2014_CASCADIA_UPPER_INTRA.csv"
class GhofraniAtkinson2014CascadiaLowerTestCase(GhofraniAtkinson2014TestCase):
"""
Implements the test case for the Ghorfrani & Atkinson (2014) GMPE with the
adjustment for Cascadia and the "lower" epistemic uncertainty case
"""
GSIM_CLASS = GhofraniAtkinson2014CascadiaLower
MEAN_FILE = "GA2014/GA2014_CASCADIA_LOWER_MEAN.csv"
STD_FILE = "GA2014/GA2014_CASCADIA_LOWER_TOTAL.csv"
INTER_FILE = "GA2014/GA2014_CASCADIA_LOWER_INTER.csv"
INTRA_FILE = "GA2014/GA2014_CASCADIA_LOWER_INTRA.csv"
| gem/oq-engine | openquake/hazardlib/tests/gsim/ghofrani_atkinson_test.py | Python | agpl-3.0 | 4,595 |
"""add column automatic_crawling to the user table
Revision ID: 8bf5694c0b9e
Revises: 5553a6c05fa7
Create Date: 2016-10-06 13:47:32.784711
"""
# revision identifiers, used by Alembic.
revision = '8bf5694c0b9e'
down_revision = '5553a6c05fa7'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('automatic_crawling',
sa.Boolean(), default=True))
def downgrade():
op.drop_column('user', 'automatic_crawling')
| JARR/JARR | migrations/versions/8bf5694c0b9e_add_column_automatic_crawling_to_the_.py | Python | agpl-3.0 | 543 |
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from django.test import override_settings
from shuup.core.models import get_person_contact, MutableAddress, OrderLineType
from shuup.core.order_creator import OrderCreator
from shuup.testing.factories import (
get_default_payment_method, get_default_product,
get_default_shipping_method, get_default_shop, get_default_supplier,
get_initial_order_status
)
from shuup_tests.utils.basketish_order_source import BasketishOrderSource
def get_order_and_source(admin_user, product, language, language_fallback):
# create original source to tamper with
contact = get_person_contact(admin_user)
contact.language = language
contact.save()
assert contact.language == language # contact language is naive
source = BasketishOrderSource(get_default_shop())
source.status = get_initial_order_status()
source.billing_address = MutableAddress.objects.create(name="Original Billing")
source.shipping_address = MutableAddress.objects.create(name="Original Shipping")
source.customer = contact
source.payment_method = get_default_payment_method()
source.shipping_method = get_default_shipping_method()
source.add_line(
type=OrderLineType.PRODUCT,
product=product,
supplier=get_default_supplier(),
quantity=1,
base_unit_price=source.create_price(10),
)
source.add_line(
type=OrderLineType.OTHER,
quantity=1,
base_unit_price=source.create_price(10),
require_verification=True,
)
assert len(source.get_lines()) == 2
source.creator = admin_user
assert not source._language # is None because it was not directly assigned
assert source.language == language_fallback
creator = OrderCreator()
order = creator.create_order(source)
assert order.language == source.language
return order, source
@pytest.mark.django_db
@pytest.mark.parametrize("lang_code", ["en", "fi", "sv", "ja", "zh-hans", "pt-br", "it"])
def test_order_language_fallbacks(rf, admin_user, lang_code):
product = get_default_product()
with override_settings(LANGUAGE_CODE=lang_code):
languages = {
0: ("en", "en"), # English
1: ("fi", "fi"), # Finnish
2: ("bew", lang_code), # Betawi
3: ("bss", lang_code), # Akoose
4: ("en_US", lang_code), # American English
5: ("is", "is"), # Icelandic
6: ("es_419", lang_code), # Latin American Spanish
7: ("nds_NL", lang_code), # Low Saxon
8: ("arn", lang_code), # Mapuche
9: ("sv", "sv") # swedish
}
for x in range(10):
language = languages[x][0]
fallback = languages[x][1]
# print(lang_code, language, fallback)
get_order_and_source(admin_user=admin_user, product=product, language=language, language_fallback=fallback)
| suutari/shoop | shuup_tests/core/test_order_languages.py | Python | agpl-3.0 | 3,168 |
Subsets and Splits